diff --git a/.gitmodules b/.gitmodules index be60f421a48..686ec7fb653 100644 --- a/.gitmodules +++ b/.gitmodules @@ -14,6 +14,3 @@ [submodule "src/external/c-reference-signer"] path = src/external/c-reference-signer url = https://github.com/MinaProtocol/c-reference-signer.git -[submodule "src/lib/snarky_js_bindings/snarkyjs"] - path = src/lib/snarkyjs - url = https://github.com/o1-labs/snarkyjs.git diff --git a/CODEOWNERS b/CODEOWNERS index 86ab915ce1a..561a11f7ebe 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -51,7 +51,6 @@ /src/lib/snarky_curves/ @MinaProtocol/crypto-eng-reviewers /src/lib/snarky_field_extensions/ @MinaProtocol/crypto-eng-reviewers /src/lib/snarky_group_map/ @MinaProtocol/crypto-eng-reviewers -/src/lib/snarkyjs @MinaProtocol/product-eng-reviewers /src/lib/snarky_log/ @MinaProtocol/crypto-eng-reviewers /src/lib/unsigned_extended/ @MinaProtocol/crypto-eng-reviewers diff --git a/Makefile b/Makefile index f9d59753d00..57d99932ed3 100644 --- a/Makefile +++ b/Makefile @@ -115,18 +115,6 @@ build_intgtest: ocaml_checks dune build --profile=$(DUNE_PROFILE) src/app/test_executive/test_executive.exe src/app/logproc/logproc.exe $(info Build complete) -snarkyjs: ocaml_checks - $(info Starting Build) - ((ulimit -s 65532) || true) && (ulimit -n 10240 || true) \ - && bash ./src/lib/snarkyjs/src/bindings/scripts/build-snarkyjs-node.sh - $(info Build complete) - -snarkyjs_no_types: ocaml_checks - $(info Starting Build) - ((ulimit -s 65532) || true) && (ulimit -n 10240 || true) \ - && bash ./src/lib/snarkyjs/src/bindings/scripts/build-snarkyjs-node-artifacts.sh - $(info Build complete) - rosetta_lib_encodings: ocaml_checks $(info Starting Build) (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/lib/rosetta_lib/test/test_encodings.exe --profile=mainnet diff --git a/buildkite/scripts/build-js-tests.sh b/buildkite/scripts/build-js-tests.sh deleted file mode 100755 index ef33ac983ff..00000000000 --- a/buildkite/scripts/build-js-tests.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -set -eo pipefail -source ~/.profile - -echo "Building SnarkyJS.." -make snarkyjs - -echo "Prepare SnarkyJS test module and pack into archive" -npm pack src/lib/snarkyjs -mv o1js-*.tgz o1js.tgz -cd src/lib/snarkyjs/tests/integration -npm i ../../../../../o1js.tgz -cp $(which node) ./node -cd ../../../../.. -tar -chzf snarkyjs_test.tar.gz src/lib/snarkyjs/tests/integration -chmod 777 snarkyjs_test.tar.gz diff --git a/buildkite/scripts/build-snarkyjs-bindings.sh b/buildkite/scripts/build-snarkyjs-bindings.sh deleted file mode 100755 index 5c89e2cd6b1..00000000000 --- a/buildkite/scripts/build-snarkyjs-bindings.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -set -eo pipefail -source ~/.profile - -echo "Install NPM dependencies..." -cd src/lib/snarkyjs -npm install --no-progress -cd - - -echo "Build SnarkyJS..." -./scripts/update-snarkyjs-bindings.sh diff --git a/buildkite/scripts/rosetta-integration-tests-fast.sh b/buildkite/scripts/rosetta-integration-tests-fast.sh new file mode 100755 index 00000000000..a548163395f --- /dev/null +++ b/buildkite/scripts/rosetta-integration-tests-fast.sh @@ -0,0 +1 @@ +buildkite/scripts/rosetta-integration-tests.sh --mode=minimal diff --git a/buildkite/scripts/rosetta-integration-tests-full.sh b/buildkite/scripts/rosetta-integration-tests-full.sh new file mode 100755 index 00000000000..1f76975d611 --- /dev/null +++ b/buildkite/scripts/rosetta-integration-tests-full.sh @@ -0,0 +1 @@ +buildkite/scripts/rosetta-integration-tests.sh --mode=full diff --git a/buildkite/scripts/rosetta-integration-tests.sh b/buildkite/scripts/rosetta-integration-tests.sh index 1efca1562ef..a1a3f25f051 100755 --- a/buildkite/scripts/rosetta-integration-tests.sh +++ b/buildkite/scripts/rosetta-integration-tests.sh @@ -1,21 +1,22 @@ #!/bin/bash - -# Deploy a sandboxed Mina daemon with an archive and a Rosetta instance. -# Deploy 2 zkApps from https://github.com/MinaProtocol/rosetta-integration-test-zkapps -# to the network, interact with them and add some regular transactions. -# Then run full rosetta-cli tests against the Rosetta instance. - -# NPM and NodeJS are installed through NVM, versions are stored in environment -# variables below. Zkapp-cli is installed globally through NPM, however, to -# ensure compatibility with the daemon, we use o1js pinned in the Mina repo. -# The repo is mounted into the container at /workdir, so we can build o1js from -# that source. It is important to make sure that the zkapp-cli version installed -# is compatible with o1js version used. +set -eox pipefail # These tests use the mina-dev binary, as rosetta-cli assumes we use a testnet. # See https://github.com/coinbase/rosetta-sdk-go/blob/master/keys/signer_pallas.go#L222 -set -eo pipefail +# Defines scope of test. Currently supported are: +# - minimal -> only quick checks (~5 mins) +# - full -> all checks +MODE="minimal" + +while [ $# -gt 0 ]; do + case "$1" in + --mode=*) + MODE="${1#*=}" + ;; + esac + shift +done export MINA_NETWORK=${MINA_NETWORK:=sandbox} export LOG_LEVEL="${LOG_LEVEL:=Info}" @@ -47,43 +48,6 @@ export MINA_CONFIG_FILE=$HOME/${MINA_NETWORK}.json export MINA_CONFIG_DIR="${MINA_CONFIG_DIR:=$HOME/.mina-config}" export MINA_GRAPHQL_PORT=${MINA_GRAPHQL_PORT:=3085} -# Test variables -export ROSETTA_INT_TEST_ZKAPPS_VERSION=${ROSETTA_INT_TEST_ZKAPPS_VERSION:=rosetta-ci-tests} - -# We need a version which is compatible with o1js pinned to the Mina repo. -# Should be set to 'latest' most of the time, but occasionally we might need -# an older one. -export ZKAPP_CLI_VERSION=0.11.0 - -# Nodejs variables -export NVM_VERSION=0.39.3 -export NODE_VERSION=20.6.1 - -# zkApps variables -export ZKAPP_PATH=$HOME/zkapps - -echo "=========================== INSTALLING NPM ===========================" -curl -so- https://raw.githubusercontent.com/nvm-sh/nvm/v${NVM_VERSION}/install.sh | bash &>/dev/null -export NVM_DIR="$HOME/.nvm" -[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm -[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion - -nvm install $NODE_VERSION -nvm use --delete-prefix $NODE_VERSION - -mkdir ~/.npm-global -npm config set prefix '~/.npm-global' -export PATH=~/.npm-global/bin:$PATH - -# Install zkapp-cli and Typescript compiler. -npm install --no-progress --global "zkapp-cli@${ZKAPP_CLI_VERSION}" "typescript@latest" - -# Build o1js so that we can use it later. -pushd /workdir/src/lib/snarkyjs -npm ci -npm run build -popd - # Rosetta CLI variables # Files from ROSETTA_CLI_CONFIG_FILES will be read from # ROSETTA_CONFIGURATION_INPUT_DIR and some placeholders will be @@ -95,10 +59,6 @@ ROSETTA_CLI_MAIN_CONFIG_FILE=${ROSETTA_CLI_MAIN_CONFIG_FILE:="config.json"} # Frequency (in seconds) at which payment operations will be sent TRANSACTION_FREQUENCY=60 -# Fetch zkApps -curl -Ls https://github.com/MinaProtocol/rosetta-integration-test-zkapps/tarball/$ROSETTA_INT_TEST_ZKAPPS_VERSION | tar xz -C /tmp -mv /tmp/MinaProtocol-rosetta-integration-test-zkapps-* $ZKAPP_PATH - # Libp2p Keypair echo "=========================== GENERATING KEYPAIR IN ${MINA_LIBP2P_KEYPAIR_PATH} ===========================" mina-dev libp2p generate-keypair -privkey-path $MINA_LIBP2P_KEYPAIR_PATH @@ -108,9 +68,18 @@ echo "=========================== GENERATING GENESIS LEDGER FOR ${MINA_NETWORK} mkdir -p $MINA_KEYS_PATH mina-dev advanced generate-keypair --privkey-path $MINA_KEYS_PATH/block-producer.key mina-dev advanced generate-keypair --privkey-path $MINA_KEYS_PATH/snark-producer.key +mina-dev advanced generate-keypair --privkey-path $MINA_KEYS_PATH/zkapp-fee-payer.key +mina-dev advanced generate-keypair --privkey-path $MINA_KEYS_PATH/zkapp-sender.key +mina-dev advanced generate-keypair --privkey-path $MINA_KEYS_PATH/zkapp-account.key chmod -R 0700 $MINA_KEYS_PATH BLOCK_PRODUCER_PK=$(cat $MINA_KEYS_PATH/block-producer.key.pub) SNARK_PRODUCER_PK=$(cat $MINA_KEYS_PATH/snark-producer.key.pub) +ZKAPP_FEE_PAYER_KEY=$MINA_KEYS_PATH/zkapp-fee-payer.key +ZKAPP_FEE_PAYER_PUB_KEY=$(cat ${ZKAPP_FEE_PAYER_KEY}.pub) +ZKAPP_SENDER_KEY=$MINA_KEYS_PATH/zkapp-sender.key +ZKAPP_SENDER_PUB_KEY=$(cat ${ZKAPP_SENDER_KEY}.pub) +ZKAPP_ACCOUNT_KEY=$MINA_KEYS_PATH/zkapp-account.key +ZKAPP_ACCOUNT_PUB_KEY=$(cat ${ZKAPP_ACCOUNT_KEY}.pub) mkdir -p $MINA_CONFIG_DIR/wallets/store cp $MINA_KEYS_PATH/block-producer.key $MINA_CONFIG_DIR/wallets/store/$BLOCK_PRODUCER_PK @@ -122,25 +91,15 @@ cat <"$MINA_CONFIG_FILE" "ledger": { "name": "${MINA_NETWORK}", "accounts": [ - { "pk": "${BLOCK_PRODUCER_PK}", "balance": "1000", "delegate": null, "sk": null }, - { "pk": "${SNARK_PRODUCER_PK}", "balance": "2000", "delegate": "${BLOCK_PRODUCER_PK}", "sk": null } + { "pk": "${BLOCK_PRODUCER_PK}", "balance": "101550000.000000000", "delegate": null, "sk": null }, + { "pk": "${SNARK_PRODUCER_PK}", "balance": "605500.000000000", "delegate": "${BLOCK_PRODUCER_PK}", "sk": null }, + { "pk": "${ZKAPP_FEE_PAYER_PUB_KEY}", "balance": "10055.000000000", "delegate": null, "sk": null }, + { "pk": "${ZKAPP_SENDER_PUB_KEY}", "balance": "10055.000000000", "delegate": null, "sk": null }, + { "pk": "${ZKAPP_ACCOUNT_PUB_KEY}", "balance": "10055.000000000", "delegate": null, "sk": null } ] } } EOF -for zkapp_path in ${ZKAPP_PATH}/*/; do - zkapp_path=${zkapp_path%/} - zkapp=$(basename $zkapp_path) - # Generate zkApp account keypair - mina-dev advanced generate-keypair --privkey-path ${MINA_KEYS_PATH}/zkapp-${zkapp}-account.key - # Generate zkApp fee payer keypair - mina-dev advanced generate-keypair --privkey-path ${MINA_KEYS_PATH}/zkapp-${zkapp}-fee-payer.key - zkapp_fee_payer_pk=$(cat $MINA_KEYS_PATH/zkapp-${zkapp}-fee-payer.key.pub) - line="[{ \"pk\": \"${zkapp_fee_payer_pk}\", \"balance\": \"10000\", \"delegate\": null, \"sk\": null }]" - jq ".ledger.accounts |= . + ${line}" $MINA_CONFIG_FILE >${MINA_CONFIG_FILE}.tmp - mv ${MINA_CONFIG_FILE}.tmp $MINA_CONFIG_FILE -done -cat $MINA_CONFIG_FILE | jq . # Substitute placeholders in rosetta-cli configuration ROSETTA_CONFIGURATION_OUTPUT_DIR=/tmp/rosetta-cli-config @@ -160,11 +119,6 @@ done echo "==================== IMPORTING GENESIS ACCOUNTS ======================" mina-dev accounts import --privkey-path $MINA_KEYS_PATH/block-producer.key --config-directory $MINA_CONFIG_DIR mina-dev accounts import --privkey-path $MINA_KEYS_PATH/snark-producer.key --config-directory $MINA_CONFIG_DIR -for zkapp_path in ${ZKAPP_PATH}/*/; do - zkapp_path=${zkapp_path%/} - zkapp=$(basename $zkapp_path) - mina-dev accounts import --privkey-path $MINA_KEYS_PATH/zkapp-${zkapp}-fee-payer.key --config-directory $MINA_CONFIG_DIR -done # Postgres echo "========================= INITIALIZING POSTGRESQL ===========================" @@ -222,100 +176,56 @@ until [ $daemon_status == "Synced" ]; do echo "Daemon Status: ${daemon_status}" done +send_zkapp_txn() { + local GRAPHQL_REQUEST="$1" + local ESCAPED_GRAPHQL_REQUEST="${GRAPHQL_REQUEST//\"/\\\"}" + local ENDPOINT="http://127.0.0.1:${MINA_GRAPHQL_PORT}/graphql" + + curl -X POST \ + -H "Content-Type: application/json" \ + --data "{\"query\":\"$ESCAPED_GRAPHQL_REQUEST\"}" \ + "$ENDPOINT" +} + +echo "========================= ZKAPP ACCOUNT SETTING UP ===========================" +ZKAPP_TXN_QUERY=$(zkapp_test_transaction create-zkapp-account --fee-payer-key ${ZKAPP_FEE_PAYER_KEY} --nonce 0 --sender-key ${ZKAPP_SENDER_KEY} --sender-nonce 0 --receiver-amount 1000 --zkapp-account-key ${ZKAPP_ACCOUNT_KEY} --fee 5 | sed 1,7d) +send_zkapp_txn "${ZKAPP_TXN_QUERY}" + # Unlock Genesis Accounts echo "==================== UNLOCKING GENESIS ACCOUNTS ======================" mina-dev accounts unlock --public-key $BLOCK_PRODUCER_PK mina-dev accounts unlock --public-key $SNARK_PRODUCER_PK -# Start sending payments -send_payments() { +# Start sending value transfer transactions +send_value_transfer_txns() { mina-dev client send-payment -rest-server http://127.0.0.1:${MINA_GRAPHQL_PORT}/graphql -amount 1 -nonce 0 -receiver $BLOCK_PRODUCER_PK -sender $BLOCK_PRODUCER_PK while true; do sleep $TRANSACTION_FREQUENCY mina-dev client send-payment -rest-server http://127.0.0.1:${MINA_GRAPHQL_PORT}/graphql -amount 1 -receiver $BLOCK_PRODUCER_PK -sender $BLOCK_PRODUCER_PK done } -send_payments & - -# Fee payer cache creation -echo "==================== PREPARE FEE PAYER CACHE ======================" -zkapp_fee_payer_pk=$(cat ${MINA_KEYS_PATH}/zkapp-${zkapp}-fee-payer.key.pub) -zkapp_fee_payer_privkey=$(mina-dev advanced dump-keypair --privkey-path "${MINA_KEYS_PATH}/zkapp-${zkapp}-fee-payer.key" | sed -ne "s/Private key: //p") - -mkdir -p /root/.cache/zkapp-cli/keys -echo -e "{\n \"privateKey\": \"${zkapp_fee_payer_privkey}\",\n \"publicKey\": \"${zkapp_fee_payer_pk}\"\n}" >/root/.cache/zkapp-cli/keys/sandbox.json - -# Deploy zkApps -echo "==================== DEPLOYING ZKAPPS ======================" -echo "If this fails, it's likely due to incompatibility between the -o1js and zkapp-cli versions in use." -echo "NOTE: At the moment the daemon still has an old version of - snarkyjs pinned to it, so we cannot use the latest version of - zkapp-cli, which requires o1js. Once the pinned snarkyjs version - gets updated, this build will most likely fail and we will then - need to update the zkapp-cli version used here. This is - unfortunate, but necessary. Please delete this warning once it's - done." > /dev/stderr - -deploy_txs=() -for zkapp_path in ${ZKAPP_PATH}/*/; do - zkapp_path=${zkapp_path%/} - zkapp=$(basename $zkapp_path) - echo "Deploying ${zkapp}..." - - zkapp_account_pk=$(cat ${MINA_KEYS_PATH}/zkapp-${zkapp}-account.key.pub) - zkapp_account_privkey=$(mina-dev advanced dump-keypair --privkey-path "${MINA_KEYS_PATH}/zkapp-${zkapp}-account.key" | sed -ne "s/Private key: //p") - - mkdir -p ${zkapp_path}/keys - echo -e "{\n \"privateKey\": \"${zkapp_account_privkey}\",\n \"publicKey\": \"${zkapp_account_pk}\"\n}" >"${zkapp_path}/keys/sandbox.json" - - cat <"${zkapp_path}/config.json" -{ - "version": 1, - "networks": { - "sandbox": { - "url": "http://127.0.0.1:${MINA_GRAPHQL_PORT}/graphql", - "keyPath": "keys/sandbox.json", - "feepayerKeyPath": "/root/.cache/zkapp-cli/keys/sandbox.json", - "feepayerAlias": "sandbox", - "fee": "1" - } - } -} -EOF - cd "$zkapp_path" - npm ci - npm run build - txn=$(zk deploy sandbox -y | sed -ne "s/https:\/\/berkeley.minaexplorer.com\/transaction\///p") - deploy_txs+=txn - cd - - echo "Done." -done - -# TODO: wait until all zkApps deploy txns are included in a block +send_value_transfer_txns & -next_block_time=$(mina-dev client status --json | jq '.next_block_production.timing[1].time' | tr -d '"') -curr_time=$(date +%s%N | cut -b1-13) -sleep_time=$((($next_block_time - $curr_time) / 1000)) -echo "Sleeping for ${sleep_time}s until next block is created..." -sleep ${sleep_time} - -# Start calling zkApp methods -echo "==================== INTERACTING WITH ZKAPPS ======================" -RECEIVER_PK=$BLOCK_PRODUCER_PK -for zkapp_path in ${ZKAPP_PATH}/*/; do - zkapp_path=${zkapp_path%/} - zkapp=$(basename $zkapp_path) - echo "Interacting with ${zkapp}..." - - cd "$zkapp_path" - ./interact.sh sandbox - cd - - echo "Done." -done +# Start sending zkapp transactions +ZKAPP_FEE_PAYER_NONCE=1 +ZKAPP_SENDER_NONCE=1 +ZKAPP_STATE=0 +send_zkapp_transactions() { + while true; do + ZKAPP_TXN_QUERY=$(zkapp_test_transaction transfer-funds-one-receiver --fee-payer-key ${ZKAPP_FEE_PAYER_KEY} --nonce ${ZKAPP_FEE_PAYER_NONCE} --sender-key ${ZKAPP_SENDER_KEY} --sender-nonce ${ZKAPP_SENDER_NONCE} --receiver-amount 1 --fee 5 --receiver ${ZKAPP_ACCOUNT_PUB_KEY} | sed 1,5d) + send_zkapp_txn "${ZKAPP_TXN_QUERY}" + let ZKAPP_FEE_PAYER_NONCE++ + let ZKAPP_SENDER_NONCE++ + + ZKAPP_TXN_QUERY=$(zkapp_test_transaction update-state --fee-payer-key ${ZKAPP_FEE_PAYER_KEY} --nonce ${ZKAPP_FEE_PAYER_NONCE} --zkapp-account-key ${ZKAPP_SENDER_KEY} --zkapp-state ${ZKAPP_STATE} --fee 5 | sed 1,5d) + send_zkapp_txn "${ZKAPP_TXN_QUERY}" + let ZKAPP_FEE_PAYER_NONCE++ + let ZKAPP_STATE++ + done +} +send_zkapp_transactions & -next_block_time=$(mina-dev client status --json | jq '.next_block_production.timing[1].time' | tr -d '"') -curr_time=$(date +%s%N | cut -b1-13) +next_block_time=$(mina-dev client status --json | jq '.next_block_production.timing[1].time' | tr -d '"') curr_time=$(date +%s%N | cut -b1-13) sleep_time=$((($next_block_time - $curr_time) / 1000)) echo "Sleeping for ${sleep_time}s until next block is created..." sleep ${sleep_time} @@ -327,11 +237,15 @@ rosetta-cli configuration:validate ${ROSETTA_CONFIGURATION_FILE} echo "========================= ROSETTA CLI: CHECK:SPEC ===========================" rosetta-cli check:spec --all --configuration-file ${ROSETTA_CONFIGURATION_FILE} -echo "========================= ROSETTA CLI: CHECK:CONSTRUCTION ===========================" -rosetta-cli check:construction --configuration-file ${ROSETTA_CONFIGURATION_FILE} +if [[ $MODE == "full" ]]; then + + echo "========================= ROSETTA CLI: CHECK:CONSTRUCTION ===========================" + rosetta-cli check:construction --configuration-file ${ROSETTA_CONFIGURATION_FILE} + + echo "========================= ROSETTA CLI: CHECK:DATA ===========================" + rosetta-cli check:data --configuration-file ${ROSETTA_CONFIGURATION_FILE} -echo "========================= ROSETTA CLI: CHECK:DATA ===========================" -rosetta-cli check:data --configuration-file ${ROSETTA_CONFIGURATION_FILE} + echo "========================= ROSETTA CLI: CHECK:PERF ===========================" + echo "rosetta-cli check:perf" # Will run this command when tests are fully implemented -echo "========================= ROSETTA CLI: CHECK:PERF ===========================" -echo "rosetta-cli check:perf" # Will run this command when tests are fully implemented +fi diff --git a/buildkite/scripts/test-snarkyjs-bindings-minimal.sh b/buildkite/scripts/test-snarkyjs-bindings-minimal.sh deleted file mode 100755 index 784b7847bad..00000000000 --- a/buildkite/scripts/test-snarkyjs-bindings-minimal.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -export NODE_OPTIONS="--enable-source-maps --stack-trace-limit=1000" - -set -eo pipefail -source ~/.profile - -echo "Node version:" -node --version - -echo "Build SnarkyJS (w/o TS)..." -make snarkyjs_no_types - -echo "Run bare minimum SnarkyJS tests..." -cd src/lib/snarkyjs -./run-minimal-mina-tests.sh diff --git a/buildkite/scripts/test-snarkyjs-bindings.sh b/buildkite/scripts/test-snarkyjs-bindings.sh deleted file mode 100755 index 10fa4929195..00000000000 --- a/buildkite/scripts/test-snarkyjs-bindings.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash - -export NODE_OPTIONS="--enable-source-maps --stack-trace-limit=1000" - -set -eo pipefail -source ~/.profile - -echo "Node version:" -node --version - -echo "Build SnarkyJS..." -make snarkyjs - -echo "Run SnarkyJS unit tests..." -cd src/lib/snarkyjs -npm run test:unit - -echo "Run additional SnarkyJS tests..." -./run-mina-integration-tests.sh diff --git a/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall b/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall index 06f2b20473c..eb9b0a297b7 100644 --- a/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall +++ b/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall @@ -20,7 +20,8 @@ let dirtyWhen = [ S.strictlyStart (S.contains "src/lib"), S.strictlyStart (S.contains "src/app/archive"), S.exactly "buildkite/src/Jobs/Test/RosettaIntegrationTests" "dhall", - S.exactly "buildkite/scripts/rosetta-integration-tests" "sh" + S.exactly "buildkite/scripts/rosetta-integration-tests" "sh", + S.exactly "buildkite/scripts/rosetta-integration-tests-long.sh" "sh" ] let B/SoftFail = B.definitions/commandStep/properties/soft_fail/Type @@ -41,7 +42,7 @@ Pipeline.build Command.Config::{ commands = [ Cmd.run ("export MINA_DEB_CODENAME=bullseye && source ./buildkite/scripts/export-git-env-vars.sh && echo \\\${MINA_DOCKER_TAG}"), - Cmd.runInDocker Cmd.Docker::{image="gcr.io/o1labs-192920/mina-rosetta:\\\${MINA_DOCKER_TAG}", entrypoint=" --entrypoint buildkite/scripts/rosetta-integration-tests.sh"} "bash" + Cmd.runInDocker Cmd.Docker::{image="gcr.io/o1labs-192920/mina-rosetta:\\\${MINA_DOCKER_TAG}", entrypoint=" --entrypoint buildkite/scripts/rosetta-integration-tests-full.sh"} "bash" ], label = "Rosetta integration tests Bullseye" , key = "rosetta-integration-tests-bullseye" diff --git a/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall b/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall new file mode 100644 index 00000000000..c2af7b5be95 --- /dev/null +++ b/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall @@ -0,0 +1,53 @@ +let Prelude = ../../External/Prelude.dhall +let B = ../../External/Buildkite.dhall + +let Cmd = ../../Lib/Cmds.dhall +let S = ../../Lib/SelectFiles.dhall + +let Pipeline = ../../Pipeline/Dsl.dhall +let PipelineMode = ../../Pipeline/Mode.dhall +let JobSpec = ../../Pipeline/JobSpec.dhall + +let Command = ../../Command/Base.dhall +let RunInToolchain = ../../Command/RunInToolchain.dhall +let Size = ../../Command/Size.dhall +let Libp2p = ../../Command/Libp2pHelperBuild.dhall +let DockerImage = ../../Command/DockerImage.dhall +let DebianVersions = ../../Constants/DebianVersions.dhall + +let dirtyWhen = [ + S.strictlyStart (S.contains "src/app/rosetta"), + S.strictlyStart (S.contains "src/lib"), + S.strictlyStart (S.contains "src/app/archive"), + S.exactly "buildkite/src/Jobs/Test/RosettaIntegrationTests" "dhall", + S.exactly "buildkite/scripts/rosetta-integration-tests" "sh", + S.exactly "buildkite/scripts/rosetta-integration-tests-fast.sh" "sh" +] + +let B/SoftFail = B.definitions/commandStep/properties/soft_fail/Type + +in + +Pipeline.build + Pipeline.Config:: + { spec = + JobSpec::{ + dirtyWhen = dirtyWhen, + path = "Test", + name = "RosettaIntegrationTestsLong", + mode = PipelineMode.Type.Stable + } + , steps = [ + Command.build + Command.Config::{ + commands = [ + Cmd.run ("export MINA_DEB_CODENAME=bullseye && source ./buildkite/scripts/export-git-env-vars.sh && echo \\\${MINA_DOCKER_TAG}"), + Cmd.runInDocker Cmd.Docker::{image="gcr.io/o1labs-192920/mina-rosetta:\\\${MINA_DOCKER_TAG}", entrypoint=" --entrypoint buildkite/scripts/rosetta-integration-tests-full.sh"} "bash" + ], + label = "Rosetta integration tests Bullseye Long" + , key = "rosetta-integration-tests-bullseye-long" + , target = Size.Small + , depends_on = [ { name = "MinaArtifactBullseye", key = "rosetta-bullseye-docker-image" } ] + } + ] + } \ No newline at end of file diff --git a/buildkite/src/Jobs/Test/SnarkyJSTest.dhall b/buildkite/src/Jobs/Test/SnarkyJSTest.dhall deleted file mode 100644 index b2027f9863b..00000000000 --- a/buildkite/src/Jobs/Test/SnarkyJSTest.dhall +++ /dev/null @@ -1,50 +0,0 @@ -let S = ../../Lib/SelectFiles.dhall -let B = ../../External/Buildkite.dhall - -let Pipeline = ../../Pipeline/Dsl.dhall -let PipelineTag = ../../Pipeline/Tag.dhall -let JobSpec = ../../Pipeline/JobSpec.dhall - -let Command = ../../Command/Base.dhall -let RunInToolchain = ../../Command/RunInToolchain.dhall -let Docker = ../../Command/Docker/Type.dhall -let Size = ../../Command/Size.dhall - -let B/SoftFail = B.definitions/commandStep/properties/soft_fail/Type - -let key = "snarkyjs-bindings-test" -in - -Pipeline.build - Pipeline.Config::{ - spec = - JobSpec::{ - dirtyWhen = [ - S.strictlyStart (S.contains "buildkite/src/Jobs/Test/SnarkyJSTest"), - S.strictlyStart (S.contains "buildkite/scripts/test-snarkyjs-bindings.sh"), - S.strictlyStart (S.contains "src/lib") - ], - path = "Test", - name = "SnarkyJSTest", - tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ] - }, - steps = [ - Command.build - Command.Config::{ - commands = RunInToolchain.runInToolchainBuster ["DUNE_INSTRUMENT_WITH=bisect_ppx", "COVERALLS_TOKEN"] "buildkite/scripts/test-snarkyjs-bindings.sh && buildkite/scripts/upload-partial-coverage-data.sh ${key} dev" - , label = "SnarkyJS unit tests" - , key = key - , target = Size.XLarge - , docker = None Docker.Type - , soft_fail = Some (B/SoftFail.Boolean True) - }, - Command.build - Command.Config::{ - commands = RunInToolchain.runInToolchainBuster ([] : List Text) "buildkite/scripts/test-snarkyjs-bindings-minimal.sh" - , label = "SnarkyJS minimal tests" - , key = "snarkyjs-minimal-test" - , target = Size.XLarge - , docker = None Docker.Type - } - ] - } diff --git a/dockerfiles/stages/1-build-deps b/dockerfiles/stages/1-build-deps index 5202a830a59..65e0f9f748f 100644 --- a/dockerfiles/stages/1-build-deps +++ b/dockerfiles/stages/1-build-deps @@ -25,7 +25,7 @@ ARG GO_CAPNP_VERSION=v3.0.0-alpha.5 # - src/lib/crypto/proof-systems/rust-toolchain.toml ARG RUST_VERSION=1.72 # Nightly Rust Version used for WebAssembly builds -# - src/lib/snarkyjs/src/bindings/kimchi/wasm/rust-toolchain.toml +# - src/lib/crypto/kimchi_bindings/wasm/rust-toolchain.toml ARG RUST_NIGHTLY=2023-09-01 # wasm-pack version ARG WASM_PACK_VERSION=v0.12.1 @@ -39,35 +39,35 @@ ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update --quiet \ && apt-get upgrade --quiet --yes \ && apt-get install --no-install-recommends --quiet --yes \ - libboost-dev \ - libboost-program-options-dev \ - libbz2-dev \ - libcap-dev \ - libffi-dev \ - libgflags-dev \ - libgmp-dev \ - libgmp3-dev \ - libjemalloc-dev \ - liblmdb-dev \ - liblmdb0 \ - libpq-dev \ - libprocps-dev \ - libsodium-dev \ - libssl-dev \ - build-essential \ - ca-certificates \ - capnproto \ - cmake \ - curl \ - file \ - git \ - '(^lld-10$|^lld-11$)' \ - m4 \ - pkg-config \ - rsync \ - sudo \ - unzip \ - zlib1g-dev \ + libboost-dev \ + libboost-program-options-dev \ + libbz2-dev \ + libcap-dev \ + libffi-dev \ + libgflags-dev \ + libgmp-dev \ + libgmp3-dev \ + libjemalloc-dev \ + liblmdb-dev \ + liblmdb0 \ + libpq-dev \ + libprocps-dev \ + libsodium-dev \ + libssl-dev \ + build-essential \ + ca-certificates \ + capnproto \ + cmake \ + curl \ + file \ + git \ + '(^lld-10$|^lld-11$)' \ + m4 \ + pkg-config \ + rsync \ + sudo \ + unzip \ + zlib1g-dev \ && rm -rf /var/lib/apt/lists/* # Symlink image-specific lld version to a single lld executable @@ -123,8 +123,8 @@ RUN git clone https://github.com/facebook/rocksdb \ --depth 1 --shallow-submodules \ -b "${ROCKSDB_VERSION}" /rocksdb \ && test $(g++ -dumpversion | cut -c 1 -) -lt 9 \ - && CXXFLAGS='' make -C /rocksdb static_lib PORTABLE=1 -j$(nproc) 2>/dev/null \ - || make -C /rocksdb static_lib PORTABLE=1 -j$(nproc) 2>/dev/null \ + && CXXFLAGS='' make -C /rocksdb static_lib PORTABLE=1 -j$(nproc) 2>/dev/null \ + || make -C /rocksdb static_lib PORTABLE=1 -j$(nproc) 2>/dev/null \ && cp /rocksdb/librocksdb.a /usr/local/lib/librocksdb_coda.a \ && rm -rf /rocksdb \ && strip -S /usr/local/lib/librocksdb_coda.a diff --git a/dockerfiles/stages/3-builder b/dockerfiles/stages/3-builder index 38b36f5d066..1649a5d9e12 100644 --- a/dockerfiles/stages/3-builder +++ b/dockerfiles/stages/3-builder @@ -36,23 +36,25 @@ RUN mkdir ${HOME}/app # --- Build libp2p_helper RUN make libp2p_helper \ - && mv src/app/libp2p_helper/result/bin/libp2p_helper ${HOME}/app/libp2p_helper + && mv src/app/libp2p_helper/result/bin/libp2p_helper ${HOME}/app/libp2p_helper # --- Make rosetta-crucial components and the generate_keypair tool +# ---- Rosetta tests also need zkapp_test_transaction RUN eval $(opam config env) \ && dune build --profile=${DUNE_PROFILE} \ - src/app/cli/src/mina_testnet_signatures.exe \ - src/app/cli/src/mina_mainnet_signatures.exe \ - src/app/archive/archive.exe \ - src/app/archive_blocks/archive_blocks.exe \ - src/app/extract_blocks/extract_blocks.exe \ - src/app/missing_blocks_auditor/missing_blocks_auditor.exe \ - src/app/replayer/replayer.exe \ - src/app/rosetta/rosetta_testnet_signatures.exe \ - src/app/rosetta/rosetta_mainnet_signatures.exe \ - src/app/generate_keypair/generate_keypair.exe \ - src/app/validate_keypair/validate_keypair.exe \ - src/app/rosetta/ocaml-signer/signer.exe \ + src/app/cli/src/mina_testnet_signatures.exe \ + src/app/cli/src/mina_mainnet_signatures.exe \ + src/app/archive/archive.exe \ + src/app/archive_blocks/archive_blocks.exe \ + src/app/extract_blocks/extract_blocks.exe \ + src/app/missing_blocks_auditor/missing_blocks_auditor.exe \ + src/app/replayer/replayer.exe \ + src/app/rosetta/rosetta_testnet_signatures.exe \ + src/app/rosetta/rosetta_mainnet_signatures.exe \ + src/app/generate_keypair/generate_keypair.exe \ + src/app/validate_keypair/validate_keypair.exe \ + src/app/rosetta/ocaml-signer/signer.exe \ + src/app/zkapp_test_transaction/zkapp_test_transaction.exe \ && cp _build/default/src/app/archive_blocks/archive_blocks.exe $HOME/app/mina-archive-blocks \ && cp _build/default/src/app/extract_blocks/extract_blocks.exe $HOME/app/mina-extract-blocks \ && cp _build/default/src/app/missing_blocks_auditor/missing_blocks_auditor.exe $HOME/app/mina-missing-blocks-auditor \ @@ -65,6 +67,7 @@ RUN eval $(opam config env) \ && mv _build/default/src/app/generate_keypair/generate_keypair.exe $HOME/app/mina-generate-keypair \ && mv _build/default/src/app/validate_keypair/validate_keypair.exe $HOME/app/mina-validate-keypair \ && mv _build/default/src/app/rosetta/ocaml-signer/signer.exe $HOME/app/mina-ocaml-signer \ + && mv _build/default/src/app/zkapp_test_transaction/zkapp_test_transaction.exe $HOME/app/zkapp_test_transaction \ && rm -rf _build # --- Clear go module caches to make the container smaller diff --git a/nix/javascript.nix b/nix/javascript.nix index 12791560f42..b6e01dd798c 100644 --- a/nix/javascript.nix +++ b/nix/javascript.nix @@ -23,25 +23,6 @@ in { mv src/*.js $out/share/client_sdk ''; }; - snarky_js = nix-npm-buildPackage.buildNpmPackage { - src = ../src/lib/snarkyjs; - preBuild = '' - BINDINGS_PATH=./src/bindings/compiled/node_bindings - mkdir -p "$BINDINGS_PATH" - cp ${plonk_wasm}/nodejs/plonk_wasm* "$BINDINGS_PATH" - cp ${ocamlPackages_mina.mina_client_sdk}/share/snarkyjs_bindings/snarky_js_node*.js "$BINDINGS_PATH" - chmod -R 777 "$BINDINGS_PATH" - - # TODO: deduplicate from ./scripts/build-snarkyjs-node.sh - # better error messages - # TODO: find a less hacky way to make adjustments to jsoo compiler output - # `s` is the jsoo representation of the error message string, and `s.c` is the actual JS string - sed -i 's/function failwith(s){throw \[0,Failure,s\]/function failwith(s){throw globalThis.Error(s.c)/' "$BINDINGS_PATH"/snarky_js_node.bc.js - sed -i 's/function invalid_arg(s){throw \[0,Invalid_argument,s\]/function invalid_arg(s){throw globalThis.Error(s.c)/' "$BINDINGS_PATH"/snarky_js_node.bc.js - sed -i 's/return \[0,Exn,t\]/return globalThis.Error(t.c)/' "$BINDINGS_PATH"/snarky_js_node.bc.js - ''; - npmBuild = "npm run build"; - }; # Jobs/Release/LeaderboardArtifact leaderboard = nix-npm-buildPackage.buildYarnPackage { diff --git a/nix/ocaml.nix b/nix/ocaml.nix index 95d5699d4f0..bf81a1e79f1 100644 --- a/nix/ocaml.nix +++ b/nix/ocaml.nix @@ -297,46 +297,6 @@ let dune exec --profile=dev src/app/reformat/reformat.exe -- -path . -check ''; - # Javascript Client SDK - mina_client_sdk = self.mina-dev.overrideAttrs (_: { - pname = "mina_client_sdk"; - version = "dev"; - src = filtered-src; - - outputs = [ "out" ]; - - checkInputs = [ pkgs.nodejs-16_x ]; - - MINA_VERSION_IMPLEMENTATION = "mina_version.dummy"; - - buildPhase = '' - dune build --display=short \ - src/lib/crypto/kimchi_bindings/js/node_js \ - src/app/client_sdk/client_sdk.bc.js \ - src/lib/snarky_js_bindings/snarky_js_node.bc.js \ - src/lib/snarky_js_bindings/snarky_js_web.bc.js - ''; - - doCheck = true; - checkPhase = '' - node src/app/client_sdk/tests/run_unit_tests.js - - dune build src/app/client_sdk/tests/test_signatures.exe - ./_build/default/src/app/client_sdk/tests/test_signatures.exe > nat.consensus.json - node src/app/client_sdk/tests/test_signatures.js > js.nonconsensus.json - if ! diff -q nat.consensus.json js.nonconsensus.json; then - echo "Consensus and JS code generate different signatures"; - exit 1 - fi - ''; - - installPhase = '' - mkdir -p $out/share/client_sdk $out/share/snarkyjs_bindings - mv _build/default/src/app/client_sdk/client_sdk.bc.js $out/share/client_sdk - mv _build/default/src/lib/snarky_js_bindings/snarky_js_*.js $out/share/snarkyjs_bindings - ''; - }); - # Integration test executive test_executive-dev = self.mina-dev.overrideAttrs (oa: { pname = "mina-test_executive"; diff --git a/nix/rust.nix b/nix/rust.nix index 7ac8a553694..13902e91a31 100644 --- a/nix/rust.nix +++ b/nix/rust.nix @@ -100,7 +100,7 @@ in }; kimchi-rust = rustChannelFromToolchainFileOf - ../src/lib/snarkyjs/src/bindings/kimchi/wasm/rust-toolchain.toml; + ../src/lib/crypto/kimchi_bindings/wasm/rust-toolchain.toml; # TODO: raise issue on nixpkgs and remove workaround when fix is applied kimchi-rust-wasm = (final.kimchi-rust.rust.override { @@ -124,7 +124,7 @@ in plonk_wasm = let - lock = ../src/lib/snarkyjs/src/bindings/kimchi/wasm/Cargo.lock; + lock = ../src/lib/crypto/kimchi_bindings/wasm/Cargo.lock; deps = builtins.listToAttrs (map (pkg: { @@ -165,10 +165,10 @@ in pname = "plonk_wasm"; version = "0.1.0"; src = final.lib.sourceByRegex ../src [ - "^lib(/snarkyjs(/src(/bindings(/kimchi(/wasm(/.*)?)?)?)?)?)?$" + "^lib(/crypto(/kimchi(/wasm(/.*)?)?)?)?$" "^lib(/crypto(/proof-systems(/.*)?)?)?$" ]; - sourceRoot = "source/lib/snarkyjs/src/bindings/kimchi/wasm"; + sourceRoot = "source/lib/crypto/kimchi_bindings/wasm"; nativeBuildInputs = [ final.wasm-pack wasm-bindgen-cli ]; buildInputs = with final; lib.optional stdenv.isDarwin libiconv; cargoLock.lockFile = lock; diff --git a/scripts/build-snarkyjs-node.sh b/scripts/build-snarkyjs-node.sh deleted file mode 100755 index 29cdf807907..00000000000 --- a/scripts/build-snarkyjs-node.sh +++ /dev/null @@ -1 +0,0 @@ -./src/lib/snarkyjs/src/bindings/scripts/build-snarkyjs-node.sh diff --git a/scripts/update-snarkyjs-bindings.sh b/scripts/update-snarkyjs-bindings.sh deleted file mode 100755 index 71e5b0f1587..00000000000 --- a/scripts/update-snarkyjs-bindings.sh +++ /dev/null @@ -1 +0,0 @@ -./src/lib/snarkyjs/src/bindings/scripts/update-snarkyjs-bindings.sh diff --git a/src/app/rosetta/lib/block.ml b/src/app/rosetta/lib/block.ml index 6d9d22329c9..1db0a0f7b4d 100644 --- a/src/app/rosetta/lib/block.ml +++ b/src/app/rosetta/lib/block.ml @@ -998,9 +998,12 @@ module Sql = struct zaub.call_depth, zaub.zkapp_network_precondition_id, zaub.zkapp_account_precondition_id, + zaub.zkapp_valid_while_precondition_id, zaub.use_full_commitment, + zaub.implicit_account_creation_fee, zaub.may_use_token, zaub.authorization_kind, + zaub.verification_key_hash_id, pk.value as account, bzc.status FROM zkapp_commands zc diff --git a/src/app/rosetta/postgresql.conf b/src/app/rosetta/postgresql.conf index 30736eb3be8..965634234d6 100644 --- a/src/app/rosetta/postgresql.conf +++ b/src/app/rosetta/postgresql.conf @@ -3,3 +3,6 @@ max_locks_per_transaction=100 max_pred_locks_per_relation=100 max_pred_locks_per_transaction=5000 max_connections=500 +log_statement='all' +logging_collector='on' +log_directory = 'pg_log' \ No newline at end of file diff --git a/src/dune-project b/src/dune-project index 7ecace1391f..36c27823b6e 100644 --- a/src/dune-project +++ b/src/dune-project @@ -8,6 +8,7 @@ (package (name best_tip_merger)) (package (name best_tip_prover)) (package (name bignum_bigint)) +(package (name bindings_js)) (package (name blake2)) (package (name blockchain_snark)) (package (name block_producer)) diff --git a/src/lib/crypto/kimchi_bindings/js/README.md b/src/lib/crypto/kimchi_bindings/js/README.md new file mode 100644 index 00000000000..8ead124f985 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/README.md @@ -0,0 +1,28 @@ +This library provides a wrapper around the WebAssembly prover code, which +allows `js_of_ocaml` to compile the mina project against the WebAssembly +backend. + +The different versions of the backend are generated in subdirectories; e.g. the +NodeJS backend is generated in `node_js/` and the Web backend is generated +in `web/`. To use a backend, run `dune build backend/plonk_wasm.js` and copy +`backend/plonk_wasm*` to the project directory. + +Note that the backend code is not automatically compiled while linking against +the backend library. You should always manually issue a build command for the +`plonk_wasm.js` for the desired backend to ensure that it has been generated. +For example, to run the nodejs tests in the `test/nodejs` directory you will +need to run + +``` +dune build src/lib/marlin_plonk_bindings/js/test/nodejs/nodejs_test.bc.js +src/lib/marlin_plonk_bindings/js/test/nodejs/copy_over.sh +``` + +Similarly, to run the web tests in `test/web`, you can run + +``` +dune build src/lib/marlin_plonk_bindings/js/test/web/web_test.bc.js +src/lib/marlin_plonk_bindings/js/test/web/copy_over.sh +``` + +and then visit `http://localhost:8000` from a browser. diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-bigint256.js b/src/lib/crypto/kimchi_bindings/js/bindings-bigint256.js new file mode 100644 index 00000000000..50fa2a00048 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/bindings-bigint256.js @@ -0,0 +1,50 @@ +/* global tsBindings +*/ + +// Provides: caml_bigint_256_of_numeral +// Requires: tsBindings +var caml_bigint_256_of_numeral = tsBindings.caml_bigint_256_of_numeral; + +// Provides: caml_bigint_256_of_decimal_string +// Requires: tsBindings +var caml_bigint_256_of_decimal_string = tsBindings.caml_bigint_256_of_decimal_string; + +// Provides: caml_bigint_256_num_limbs +// Requires: tsBindings +var caml_bigint_256_num_limbs = tsBindings.caml_bigint_256_num_limbs; + +// Provides: caml_bigint_256_bytes_per_limb +// Requires: tsBindings +var caml_bigint_256_bytes_per_limb = tsBindings.caml_bigint_256_bytes_per_limb; + +// Provides: caml_bigint_256_div +// Requires: tsBindings +var caml_bigint_256_div = tsBindings.caml_bigint_256_div; + +// Provides: caml_bigint_256_compare +// Requires: tsBindings +var caml_bigint_256_compare = tsBindings.caml_bigint_256_compare; + +// Provides: caml_bigint_256_print +// Requires: tsBindings +var caml_bigint_256_print = tsBindings.caml_bigint_256_print; + +// Provides: caml_bigint_256_to_string +// Requires: tsBindings +var caml_bigint_256_to_string = tsBindings.caml_bigint_256_to_string; + +// Provides: caml_bigint_256_test_bit +// Requires: tsBindings +var caml_bigint_256_test_bit = tsBindings.caml_bigint_256_test_bit; + +// Provides: caml_bigint_256_to_bytes +// Requires: tsBindings +var caml_bigint_256_to_bytes = tsBindings.caml_bigint_256_to_bytes; + +// Provides: caml_bigint_256_of_bytes +// Requires: tsBindings +var caml_bigint_256_of_bytes = tsBindings.caml_bigint_256_of_bytes; + +// Provides: caml_bigint_256_deep_copy +// Requires: tsBindings +var caml_bigint_256_deep_copy = tsBindings.caml_bigint_256_deep_copy diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-curve.js b/src/lib/crypto/kimchi_bindings/js/bindings-curve.js new file mode 100644 index 00000000000..18d67a0b28d --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/bindings-curve.js @@ -0,0 +1,118 @@ +/* global tsBindings +*/ + +// pallas + +// Provides: caml_pallas_one +// Requires: tsBindings +var caml_pallas_one = tsBindings.caml_pallas_one; + +// Provides: caml_pallas_add +// Requires: tsBindings +var caml_pallas_add = tsBindings.caml_pallas_add; + +// Provides: caml_pallas_sub +// Requires: tsBindings +var caml_pallas_sub = tsBindings.caml_pallas_sub; + +// Provides: caml_pallas_negate +// Requires: tsBindings +var caml_pallas_negate = tsBindings.caml_pallas_negate; + +// Provides: caml_pallas_double +// Requires: tsBindings +var caml_pallas_double = tsBindings.caml_pallas_double; + +// Provides: caml_pallas_scale +// Requires: tsBindings +var caml_pallas_scale = tsBindings.caml_pallas_scale; + +// Provides: caml_pallas_random +// Requires: tsBindings +var caml_pallas_random = tsBindings.caml_pallas_random; + +// Provides: caml_pallas_rng +// Requires: tsBindings +var caml_pallas_rng = tsBindings.caml_pallas_rng; + +// Provides: caml_pallas_endo_base +// Requires: tsBindings +var caml_pallas_endo_base = tsBindings.caml_pallas_endo_base; + +// Provides: caml_pallas_endo_scalar +// Requires: tsBindings +var caml_pallas_endo_scalar = tsBindings.caml_pallas_endo_scalar; + +// Provides: caml_pallas_to_affine +// Requires: tsBindings +var caml_pallas_to_affine = tsBindings.caml_pallas_to_affine; + +// Provides: caml_pallas_of_affine +// Requires: tsBindings +var caml_pallas_of_affine = tsBindings.caml_pallas_of_affine; + +// Provides: caml_pallas_of_affine_coordinates +// Requires: tsBindings +var caml_pallas_of_affine_coordinates = tsBindings.caml_pallas_of_affine_coordinates; + +// Provides: caml_pallas_affine_deep_copy +// Requires: tsBindings +var caml_pallas_affine_deep_copy = tsBindings.caml_pallas_affine_deep_copy; + +// vesta + +// Provides: caml_vesta_one +// Requires: tsBindings +var caml_vesta_one = tsBindings.caml_vesta_one; + +// Provides: caml_vesta_add +// Requires: tsBindings +var caml_vesta_add = tsBindings.caml_vesta_add; + +// Provides: caml_vesta_sub +// Requires: tsBindings +var caml_vesta_sub = tsBindings.caml_vesta_sub; + +// Provides: caml_vesta_negate +// Requires: tsBindings +var caml_vesta_negate = tsBindings.caml_vesta_negate; + +// Provides: caml_vesta_double +// Requires: tsBindings +var caml_vesta_double = tsBindings.caml_vesta_double; + +// Provides: caml_vesta_scale +// Requires: tsBindings +var caml_vesta_scale = tsBindings.caml_vesta_scale; + +// Provides: caml_vesta_random +// Requires: tsBindings +var caml_vesta_random = tsBindings.caml_vesta_random; + +// Provides: caml_vesta_rng +// Requires: tsBindings +var caml_vesta_rng = tsBindings.caml_vesta_rng; + +// Provides: caml_vesta_endo_base +// Requires: tsBindings +var caml_vesta_endo_base = tsBindings.caml_vesta_endo_base; + +// Provides: caml_vesta_endo_scalar +// Requires: tsBindings +var caml_vesta_endo_scalar = tsBindings.caml_vesta_endo_scalar; + +// Provides: caml_vesta_to_affine +// Requires: tsBindings +var caml_vesta_to_affine = tsBindings.caml_vesta_to_affine; + +// Provides: caml_vesta_of_affine +// Requires: tsBindings +var caml_vesta_of_affine = tsBindings.caml_vesta_of_affine; + +// Provides: caml_vesta_of_affine_coordinates +// Requires: tsBindings +var caml_vesta_of_affine_coordinates = tsBindings.caml_vesta_of_affine_coordinates; + +// Provides: caml_vesta_affine_deep_copy +// Requires: tsBindings +var caml_vesta_affine_deep_copy = tsBindings.caml_vesta_affine_deep_copy; diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-field.js b/src/lib/crypto/kimchi_bindings/js/bindings-field.js new file mode 100644 index 00000000000..4b93a52cecd --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/bindings-field.js @@ -0,0 +1,253 @@ +/* global tsBindings +*/ + +// Provides: caml_pasta_fp_copy +// Requires: tsBindings +var caml_pasta_fp_copy = tsBindings.caml_pasta_fp_copy; + +// Provides: caml_pasta_fp_size_in_bits +// Requires: tsBindings +var caml_pasta_fp_size_in_bits = tsBindings.caml_pasta_fp_size_in_bits; + +// Provides: caml_pasta_fp_size +// Requires: tsBindings +var caml_pasta_fp_size = tsBindings.caml_pasta_fp_size; + +// Provides: caml_pasta_fp_add +// Requires: tsBindings +var caml_pasta_fp_add = tsBindings.caml_pasta_fp_add; + +// Provides: caml_pasta_fp_sub +// Requires: tsBindings +var caml_pasta_fp_sub = tsBindings.caml_pasta_fp_sub; + +// Provides: caml_pasta_fp_negate +// Requires: tsBindings +var caml_pasta_fp_negate = tsBindings.caml_pasta_fp_negate; + +// Provides: caml_pasta_fp_mul +// Requires: tsBindings +var caml_pasta_fp_mul = tsBindings.caml_pasta_fp_mul; + +// Provides: caml_pasta_fp_div +// Requires: tsBindings +var caml_pasta_fp_div = tsBindings.caml_pasta_fp_div; + +// Provides: caml_pasta_fp_inv +// Requires: tsBindings +var caml_pasta_fp_inv = tsBindings.caml_pasta_fp_inv; + +// Provides: caml_pasta_fp_square +// Requires: tsBindings +var caml_pasta_fp_square = tsBindings.caml_pasta_fp_square + +// Provides: caml_pasta_fp_is_square +// Requires: tsBindings +var caml_pasta_fp_is_square = tsBindings.caml_pasta_fp_is_square; + +// Provides: caml_pasta_fp_sqrt +// Requires: tsBindings +var caml_pasta_fp_sqrt = tsBindings.caml_pasta_fp_sqrt; + +// Provides: caml_pasta_fp_of_int +// Requires: tsBindings +var caml_pasta_fp_of_int = tsBindings.caml_pasta_fp_of_int + +// Provides: caml_pasta_fp_to_string +// Requires: tsBindings +var caml_pasta_fp_to_string = tsBindings.caml_pasta_fp_to_string; + +// Provides: caml_pasta_fp_of_string +// Requires: tsBindings +var caml_pasta_fp_of_string = tsBindings.caml_pasta_fp_of_string; + +// Provides: caml_pasta_fp_print +// Requires: tsBindings +var caml_pasta_fp_print = tsBindings.caml_pasta_fp_print; + +// Provides: caml_pasta_fp_mut_add +// Requires: tsBindings +var caml_pasta_fp_mut_add = tsBindings.caml_pasta_fp_mut_add; + +// Provides: caml_pasta_fp_mut_sub +// Requires: tsBindings +var caml_pasta_fp_mut_sub = tsBindings.caml_pasta_fp_mut_sub; + +// Provides: caml_pasta_fp_mut_mul +// Requires: tsBindings +var caml_pasta_fp_mut_mul = tsBindings.caml_pasta_fp_mut_mul; + +// Provides: caml_pasta_fp_mut_square +// Requires: tsBindings +var caml_pasta_fp_mut_square = tsBindings.caml_pasta_fp_mut_square; + +// Provides: caml_pasta_fp_compare +// Requires: tsBindings +var caml_pasta_fp_compare = tsBindings.caml_pasta_fp_compare; + +// Provides: caml_pasta_fp_equal +// Requires: tsBindings +var caml_pasta_fp_equal = tsBindings.caml_pasta_fp_equal; + +// Provides: caml_pasta_fp_random +// Requires: tsBindings +var caml_pasta_fp_random = tsBindings.caml_pasta_fp_random; + +// Provides: caml_pasta_fp_rng +// Requires: tsBindings +var caml_pasta_fp_rng = tsBindings.caml_pasta_fp_rng; + +// Provides: caml_pasta_fp_to_bigint +// Requires: tsBindings +var caml_pasta_fp_to_bigint = tsBindings.caml_pasta_fp_to_bigint; + +// Provides: caml_pasta_fp_of_bigint +// Requires: tsBindings +var caml_pasta_fp_of_bigint = tsBindings.caml_pasta_fp_of_bigint; + +// Provides: caml_pasta_fp_two_adic_root_of_unity +// Requires: tsBindings +var caml_pasta_fp_two_adic_root_of_unity = tsBindings.caml_pasta_fp_two_adic_root_of_unity; + +// Provides: caml_pasta_fp_domain_generator +// Requires: tsBindings +var caml_pasta_fp_domain_generator = tsBindings.caml_pasta_fp_domain_generator; + +// Provides: caml_pasta_fp_to_bytes +// Requires: tsBindings +var caml_pasta_fp_to_bytes = tsBindings.caml_pasta_fp_to_bytes; + +// Provides: caml_pasta_fp_of_bytes +// Requires: tsBindings +var caml_pasta_fp_of_bytes = tsBindings.caml_pasta_fp_of_bytes; + +// Provides: caml_pasta_fp_deep_copy +// Requires: tsBindings +var caml_pasta_fp_deep_copy = tsBindings.caml_pasta_fp_deep_copy; + + + + +// Provides: caml_pasta_fq_copy +// Requires: tsBindings +var caml_pasta_fq_copy = tsBindings.caml_pasta_fq_copy; + +// Provides: caml_pasta_fq_size_in_bits +// Requires: tsBindings +var caml_pasta_fq_size_in_bits = tsBindings.caml_pasta_fq_size_in_bits; + +// Provides: caml_pasta_fq_size +// Requires: tsBindings +var caml_pasta_fq_size = tsBindings.caml_pasta_fq_size; + +// Provides: caml_pasta_fq_add +// Requires: tsBindings +var caml_pasta_fq_add = tsBindings.caml_pasta_fq_add; + +// Provides: caml_pasta_fq_sub +// Requires: tsBindings +var caml_pasta_fq_sub = tsBindings.caml_pasta_fq_sub; + +// Provides: caml_pasta_fq_negate +// Requires: tsBindings +var caml_pasta_fq_negate = tsBindings.caml_pasta_fq_negate; + +// Provides: caml_pasta_fq_mul +// Requires: tsBindings +var caml_pasta_fq_mul = tsBindings.caml_pasta_fq_mul; + +// Provides: caml_pasta_fq_div +// Requires: tsBindings +var caml_pasta_fq_div = tsBindings.caml_pasta_fq_div; + +// Provides: caml_pasta_fq_inv +// Requires: tsBindings +var caml_pasta_fq_inv = tsBindings.caml_pasta_fq_inv; + +// Provides: caml_pasta_fq_square +// Requires: tsBindings +var caml_pasta_fq_square = tsBindings.caml_pasta_fq_square + +// Provides: caml_pasta_fq_is_square +// Requires: tsBindings +var caml_pasta_fq_is_square = tsBindings.caml_pasta_fq_is_square; + +// Provides: caml_pasta_fq_sqrt +// Requires: tsBindings +var caml_pasta_fq_sqrt = tsBindings.caml_pasta_fq_sqrt; + +// Provides: caml_pasta_fq_of_int +// Requires: tsBindings +var caml_pasta_fq_of_int = tsBindings.caml_pasta_fq_of_int; + +// Provides: caml_pasta_fq_to_string +// Requires: tsBindings +var caml_pasta_fq_to_string = tsBindings.caml_pasta_fq_to_string; + +// Provides: caml_pasta_fq_of_string +// Requires: tsBindings +var caml_pasta_fq_of_string = tsBindings.caml_pasta_fq_of_string; + +// Provides: caml_pasta_fq_print +// Requires: tsBindings +var caml_pasta_fq_print = tsBindings.caml_pasta_fq_print; + +// Provides: caml_pasta_fq_mut_add +// Requires: tsBindings +var caml_pasta_fq_mut_add = tsBindings.caml_pasta_fq_mut_add; + +// Provides: caml_pasta_fq_mut_sub +// Requires: tsBindings +var caml_pasta_fq_mut_sub = tsBindings.caml_pasta_fq_mut_sub; + +// Provides: caml_pasta_fq_mut_mul +// Requires: tsBindings +var caml_pasta_fq_mut_mul = tsBindings.caml_pasta_fq_mut_mul; + +// Provides: caml_pasta_fq_mut_square +// Requires: tsBindings +var caml_pasta_fq_mut_square = tsBindings.caml_pasta_fq_mut_square; + +// Provides: caml_pasta_fq_compare +// Requires: tsBindings +var caml_pasta_fq_compare = tsBindings.caml_pasta_fq_compare; + +// Provides: caml_pasta_fq_equal +// Requires: tsBindings +var caml_pasta_fq_equal = tsBindings.caml_pasta_fq_equal; + +// Provides: caml_pasta_fq_random +// Requires: tsBindings +var caml_pasta_fq_random = tsBindings.caml_pasta_fq_random; + +// Provides: caml_pasta_fq_rng +// Requires: tsBindings +var caml_pasta_fq_rng = tsBindings.caml_pasta_fq_rng; + +// Provides: caml_pasta_fq_to_bigint +// Requires: tsBindings +var caml_pasta_fq_to_bigint = tsBindings.caml_pasta_fq_to_bigint; + +// Provides: caml_pasta_fq_of_bigint +// Requires: tsBindings +var caml_pasta_fq_of_bigint = tsBindings.caml_pasta_fq_of_bigint; + +// Provides: caml_pasta_fq_two_adic_root_of_unity +// Requires: tsBindings +var caml_pasta_fq_two_adic_root_of_unity = tsBindings.caml_pasta_fq_two_adic_root_of_unity; + +// Provides: caml_pasta_fq_domain_generator +// Requires: tsBindings +var caml_pasta_fq_domain_generator = tsBindings.caml_pasta_fq_domain_generator; + +// Provides: caml_pasta_fq_to_bytes +// Requires: tsBindings +var caml_pasta_fq_to_bytes = tsBindings.caml_pasta_fq_to_bytes; + +// Provides: caml_pasta_fq_of_bytes +// Requires: tsBindings +var caml_pasta_fq_of_bytes = tsBindings.caml_pasta_fq_of_bytes; + +// Provides: caml_pasta_fq_deep_copy +// Requires: tsBindings +var caml_pasta_fq_deep_copy = tsBindings.caml_pasta_fq_deep_copy; diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-vector.js b/src/lib/crypto/kimchi_bindings/js/bindings-vector.js new file mode 100644 index 00000000000..9ff2441bc33 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/bindings-vector.js @@ -0,0 +1,45 @@ +/* global tsBindings */ + +// fp + +// Provides: caml_fp_vector_create +// Requires: tsBindings +var caml_fp_vector_create = tsBindings.caml_fp_vector_create; + +// Provides: caml_fp_vector_length +// Requires: tsBindings +var caml_fp_vector_length = tsBindings.caml_fp_vector_length; + +// Provides: caml_fp_vector_emplace_back +// Requires: tsBindings +var caml_fp_vector_emplace_back = tsBindings.caml_fp_vector_emplace_back; + +// Provides: caml_fp_vector_get +// Requires: tsBindings +var caml_fp_vector_get = tsBindings.caml_fp_vector_get; + +// Provides: caml_fp_vector_set +// Requires: tsBindings +var caml_fp_vector_set = tsBindings.caml_fp_vector_set; + +// fq + +// Provides: caml_fq_vector_create +// Requires: tsBindings +var caml_fq_vector_create = tsBindings.caml_fq_vector_create; + +// Provides: caml_fq_vector_length +// Requires: tsBindings +var caml_fq_vector_length = tsBindings.caml_fq_vector_length; + +// Provides: caml_fq_vector_emplace_back +// Requires: tsBindings +var caml_fq_vector_emplace_back = tsBindings.caml_fq_vector_emplace_back; + +// Provides: caml_fq_vector_get +// Requires: tsBindings +var caml_fq_vector_get = tsBindings.caml_fq_vector_get; + +// Provides: caml_fq_vector_set +// Requires: tsBindings +var caml_fq_vector_set = tsBindings.caml_fq_vector_set; diff --git a/src/lib/crypto/kimchi_bindings/js/bindings.js b/src/lib/crypto/kimchi_bindings/js/bindings.js new file mode 100644 index 00000000000..32907233043 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/bindings.js @@ -0,0 +1,1068 @@ +/* global plonk_wasm, caml_jsstring_of_string, caml_string_of_jsstring, + caml_create_bytes, caml_bytes_unsafe_set, caml_bytes_unsafe_get, caml_ml_bytes_length, + UInt64, caml_int64_of_int32 +*/ + +// Provides: tsBindings +var tsBindings = globalThis.__snarkyTsBindings; + +// Provides: tsRustConversion +// Requires: tsBindings, plonk_wasm +var tsRustConversion = tsBindings.rustConversion(plonk_wasm); + +// Provides: tsSrs +// Requires: tsBindings, plonk_wasm +var tsSrs = tsBindings.srs(plonk_wasm); + +// Provides: getTsBindings +// Requires: tsBindings +function getTsBindings() { + return tsBindings; +} + +// Provides: caml_bytes_of_uint8array +// Requires: caml_create_bytes, caml_bytes_unsafe_set +var caml_bytes_of_uint8array = function (uint8array) { + var length = uint8array.length; + var ocaml_bytes = caml_create_bytes(length); + for (var i = 0; i < length; i++) { + // No need to convert here: OCaml Char.t is just an int under the hood. + caml_bytes_unsafe_set(ocaml_bytes, i, uint8array[i]); + } + return ocaml_bytes; +}; + +// Provides: caml_bytes_to_uint8array +// Requires: caml_ml_bytes_length, caml_bytes_unsafe_get +var caml_bytes_to_uint8array = function (ocaml_bytes) { + var length = caml_ml_bytes_length(ocaml_bytes); + var bytes = new globalThis.Uint8Array(length); + for (var i = 0; i < length; i++) { + // No need to convert here: OCaml Char.t is just an int under the hood. + bytes[i] = caml_bytes_unsafe_get(ocaml_bytes, i); + } + return bytes; +}; + +// Provides: caml_option_of_maybe_undefined +var caml_option_of_maybe_undefined = function (x) { + if (x === undefined) { + return 0; // None + } else { + return [0, x]; // Some(x) + } +}; + +// Provides: caml_option_to_maybe_undefined +var caml_option_to_maybe_undefined = function (x) { + if (x === 0) { + // None + return undefined; + } else { + return x[1]; + } +}; + +// Provides: free_finalization_registry +var free_finalization_registry = new globalThis.FinalizationRegistry(function ( + instance_representative +) { + instance_representative.free(); +}); + +// Provides: free_on_finalize +// Requires: free_finalization_registry +var free_on_finalize = function (x) { + // This is an unfortunate hack: we're creating a second instance of the + // class to be able to call free on it. We can't pass the value itself, + // since the registry holds a strong reference to the representative value. + // + // However, the class is only really a wrapper around a pointer, with a + // reference to the class' prototype as its __prototype__. + // + // It might seem cleaner to call the destructor here on the pointer + // directly, but unfortunately the destructor name is some mangled internal + // string generated by wasm_bindgen. For now, this is the best, + // least-brittle way to free once the original class instance gets collected. + var instance_representative = x.constructor.__wrap(x.__wbg_ptr); + free_finalization_registry.register(x, instance_representative, x); + return x; +}; + +// srs + +// Provides: caml_fp_srs_create +// Requires: tsSrs +var caml_fp_srs_create = tsSrs.fp.create; + +// Provides: caml_fp_srs_write +// Requires: plonk_wasm, caml_jsstring_of_string +var caml_fp_srs_write = function (append, t, path) { + if (append === 0) { + append = undefined; + } else { + append = append[1]; + } + return plonk_wasm.caml_fp_srs_write(append, t, caml_jsstring_of_string(path)); +}; + +// Provides: caml_fp_srs_read +// Requires: plonk_wasm, caml_jsstring_of_string +var caml_fp_srs_read = function (offset, path) { + if (offset === 0) { + offset = undefined; + } else { + offset = offset[1]; + } + var res = plonk_wasm.caml_fp_srs_read(offset, caml_jsstring_of_string(path)); + if (res) { + return [0, res]; // Some(res) + } else { + return 0; // None + } +}; + +// Provides: caml_fp_srs_lagrange_commitment +// Requires: tsSrs +var caml_fp_srs_lagrange_commitment = tsSrs.fp.lagrangeCommitment; + +// Provides: caml_fp_srs_commit_evaluations +// Requires: plonk_wasm, tsRustConversion +var caml_fp_srs_commit_evaluations = function (t, domain_size, fps) { + var res = plonk_wasm.caml_fp_srs_commit_evaluations( + t, + domain_size, + tsRustConversion.fp.vectorToRust(fps) + ); + return tsRustConversion.fp.polyCommFromRust(res); +}; + +// Provides: caml_fp_srs_b_poly_commitment +// Requires: plonk_wasm, tsRustConversion +var caml_fp_srs_b_poly_commitment = function (srs, chals) { + var res = plonk_wasm.caml_fp_srs_b_poly_commitment( + srs, + tsRustConversion.fieldsToRustFlat(chals) + ); + return tsRustConversion.fp.polyCommFromRust(res); +}; + +// Provides: caml_fp_srs_batch_accumulator_check +// Requires: plonk_wasm, tsRustConversion +var caml_fp_srs_batch_accumulator_check = function (srs, comms, chals) { + var rust_comms = tsRustConversion.fp.pointsToRust(comms); + var rust_chals = tsRustConversion.fp.vectorToRust(chals); + var ok = plonk_wasm.caml_fp_srs_batch_accumulator_check( + srs, + rust_comms, + rust_chals + ); + return ok; +}; + +// Provides: caml_fp_srs_batch_accumulator_generate +// Requires: plonk_wasm, tsRustConversion +var caml_fp_srs_batch_accumulator_generate = function (srs, n_comms, chals) { + var rust_chals = tsRustConversion.fp.vectorToRust(chals); + var rust_comms = plonk_wasm.caml_fp_srs_batch_accumulator_generate( + srs, + n_comms, + rust_chals + ); + return tsRustConversion.fp.pointsFromRust(rust_comms); +}; + +// Provides: caml_fp_srs_h +// Requires: plonk_wasm, tsRustConversion +var caml_fp_srs_h = function (t) { + return tsRustConversion.fp.pointFromRust(plonk_wasm.caml_fp_srs_h(t)); +}; + +// Provides: caml_fp_srs_add_lagrange_basis +// Requires: tsSrs +var caml_fp_srs_add_lagrange_basis = tsSrs.fp.addLagrangeBasis; + +// Provides: caml_fq_srs_create +// Requires: tsSrs +var caml_fq_srs_create = tsSrs.fq.create; + +// Provides: caml_fq_srs_write +// Requires: plonk_wasm, caml_jsstring_of_string +var caml_fq_srs_write = function (append, t, path) { + if (append === 0) { + append = undefined; + } else { + append = append[1]; + } + return plonk_wasm.caml_fq_srs_write(append, t, caml_jsstring_of_string(path)); +}; + +// Provides: caml_fq_srs_read +// Requires: plonk_wasm, caml_jsstring_of_string +var caml_fq_srs_read = function (offset, path) { + if (offset === 0) { + offset = undefined; + } else { + offset = offset[1]; + } + var res = plonk_wasm.caml_fq_srs_read(offset, caml_jsstring_of_string(path)); + if (res) { + return [0, res]; // Some(res) + } else { + return 0; // None + } +}; + +// Provides: caml_fq_srs_lagrange_commitment +// Requires: tsSrs +var caml_fq_srs_lagrange_commitment = tsSrs.fq.lagrangeCommitment; + +// Provides: caml_fq_srs_commit_evaluations +// Requires: plonk_wasm, tsRustConversion +var caml_fq_srs_commit_evaluations = function (t, domain_size, fqs) { + var res = plonk_wasm.caml_fq_srs_commit_evaluations( + t, + domain_size, + tsRustConversion.fq.vectorToRust(fqs) + ); + return tsRustConversion.fq.polyCommFromRust(res); +}; + +// Provides: caml_fq_srs_b_poly_commitment +// Requires: plonk_wasm, tsRustConversion +var caml_fq_srs_b_poly_commitment = function (srs, chals) { + var res = plonk_wasm.caml_fq_srs_b_poly_commitment( + srs, + tsRustConversion.fieldsToRustFlat(chals) + ); + return tsRustConversion.fq.polyCommFromRust(res); +}; + +// Provides: caml_fq_srs_batch_accumulator_check +// Requires: plonk_wasm, tsRustConversion +var caml_fq_srs_batch_accumulator_check = function (srs, comms, chals) { + var rust_comms = tsRustConversion.fq.pointsToRust(comms); + var rust_chals = tsRustConversion.fq.vectorToRust(chals); + var ok = plonk_wasm.caml_fq_srs_batch_accumulator_check( + srs, + rust_comms, + rust_chals + ); + return ok; +}; + +// Provides: caml_fq_srs_batch_accumulator_generate +// Requires: plonk_wasm, tsRustConversion +var caml_fq_srs_batch_accumulator_generate = function (srs, comms, chals) { + var rust_chals = tsRustConversion.fq.vectorToRust(chals); + var rust_comms = plonk_wasm.caml_fq_srs_batch_accumulator_generate( + srs, + comms, + rust_chals + ); + return tsRustConversion.fq.pointsFromRust(rust_comms); +}; + +// Provides: caml_fq_srs_h +// Requires: plonk_wasm, tsRustConversion +var caml_fq_srs_h = function (t) { + return tsRustConversion.fq.pointFromRust(plonk_wasm.caml_fq_srs_h(t)); +}; + +// Provides: caml_fq_srs_add_lagrange_basis +// Requires: tsSrs +var caml_fq_srs_add_lagrange_basis = tsSrs.fq.addLagrangeBasis; + +// gate vector + +// Provides: caml_pasta_fp_plonk_gate_vector_create +// Requires: plonk_wasm, free_on_finalize +var caml_pasta_fp_plonk_gate_vector_create = function () { + return free_on_finalize(plonk_wasm.caml_pasta_fp_plonk_gate_vector_create()); +}; + +// Provides: caml_pasta_fp_plonk_gate_vector_add +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_gate_vector_add = function (v, x) { + return plonk_wasm.caml_pasta_fp_plonk_gate_vector_add( + v, + tsRustConversion.fp.gateToRust(x) + ); +}; + +// Provides: caml_pasta_fp_plonk_gate_vector_get +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_gate_vector_get = function (v, i) { + return tsRustConversion.fp.gateFromRust( + plonk_wasm.caml_pasta_fp_plonk_gate_vector_get(v, i) + ); +}; + +// Provides: caml_pasta_fp_plonk_gate_vector_len +// Requires: plonk_wasm +var caml_pasta_fp_plonk_gate_vector_len = function (v) { + return plonk_wasm.caml_pasta_fp_plonk_gate_vector_len(v); +}; + +// Provides: caml_pasta_fp_plonk_gate_vector_wrap +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_gate_vector_wrap = function (v, x, y) { + return plonk_wasm.caml_pasta_fp_plonk_gate_vector_wrap( + v, + tsRustConversion.wireToRust(x), + tsRustConversion.wireToRust(y) + ); +}; + +// Provides: caml_pasta_fp_plonk_gate_vector_digest +// Requires: plonk_wasm, caml_bytes_of_uint8array +var caml_pasta_fp_plonk_gate_vector_digest = function ( + public_input_size, + gate_vector +) { + var uint8array = plonk_wasm.caml_pasta_fp_plonk_gate_vector_digest( + public_input_size, + gate_vector + ); + return caml_bytes_of_uint8array(uint8array); +}; + +// Provides: caml_pasta_fp_plonk_circuit_serialize +// Requires: plonk_wasm, caml_string_of_jsstring +var caml_pasta_fp_plonk_circuit_serialize = function ( + public_input_size, + gate_vector +) { + return caml_string_of_jsstring( + plonk_wasm.caml_pasta_fp_plonk_circuit_serialize( + public_input_size, + gate_vector + ) + ); +}; + +// prover index + +// Provides: caml_pasta_fq_plonk_gate_vector_create +// Requires: plonk_wasm, free_on_finalize +var caml_pasta_fq_plonk_gate_vector_create = function () { + return free_on_finalize(plonk_wasm.caml_pasta_fq_plonk_gate_vector_create()); +}; + +// Provides: caml_pasta_fq_plonk_gate_vector_add +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fq_plonk_gate_vector_add = function (v, x) { + return plonk_wasm.caml_pasta_fq_plonk_gate_vector_add( + v, + tsRustConversion.fq.gateToRust(x) + ); +}; + +// Provides: caml_pasta_fq_plonk_gate_vector_get +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fq_plonk_gate_vector_get = function (v, i) { + return tsRustConversion.fq.gateFromRust( + plonk_wasm.caml_pasta_fq_plonk_gate_vector_get(v, i) + ); +}; + +// Provides: caml_pasta_fq_plonk_gate_vector_len +// Requires: plonk_wasm +var caml_pasta_fq_plonk_gate_vector_len = function (v) { + return plonk_wasm.caml_pasta_fq_plonk_gate_vector_len(v); +}; + +// Provides: caml_pasta_fq_plonk_gate_vector_wrap +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fq_plonk_gate_vector_wrap = function (v, x, y) { + return plonk_wasm.caml_pasta_fq_plonk_gate_vector_wrap( + v, + tsRustConversion.wireToRust(x), + tsRustConversion.wireToRust(y) + ); +}; + +// Provides: caml_pasta_fq_plonk_gate_vector_digest +// Requires: plonk_wasm, caml_bytes_of_uint8array +var caml_pasta_fq_plonk_gate_vector_digest = function ( + public_input_size, + gate_vector +) { + var uint8array = plonk_wasm.caml_pasta_fq_plonk_gate_vector_digest( + public_input_size, + gate_vector + ); + return caml_bytes_of_uint8array(uint8array); +}; + +// Provides: caml_pasta_fq_plonk_circuit_serialize +// Requires: plonk_wasm, caml_string_of_jsstring +var caml_pasta_fq_plonk_circuit_serialize = function ( + public_input_size, + gate_vector +) { + return caml_string_of_jsstring( + plonk_wasm.caml_pasta_fq_plonk_circuit_serialize( + public_input_size, + gate_vector + ) + ); +}; + +// Provides: caml_pasta_fp_plonk_index_create +// Requires: plonk_wasm, free_on_finalize, tsRustConversion +var caml_pasta_fp_plonk_index_create = function ( + gates, + public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, + prev_challenges, + urs +) { + var wasm_lookup_tables = + tsRustConversion.fp.lookupTablesToRust(caml_lookup_tables); + var wasm_runtime_table_cfgs = tsRustConversion.fp.runtimeTableCfgsToRust( + caml_runtime_table_cfgs + ); + + var t = plonk_wasm.caml_pasta_fp_plonk_index_create( + gates, + public_inputs, + wasm_lookup_tables, + wasm_runtime_table_cfgs, + prev_challenges, + urs + ); + return free_on_finalize(t); +}; + +// Provides: caml_pasta_fp_plonk_index_create_bytecode +// Requires: caml_pasta_fp_plonk_index_create +var caml_pasta_fp_plonk_index_create_bytecode = function ( + gates, + public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, + prev_challenges, + urs +) { + return caml_pasta_fp_plonk_index_create( + gates, + public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, + prev_challenges, + urs + ); +}; + +// Provides: caml_pasta_fp_plonk_index_max_degree +// Requires: plonk_wasm +var caml_pasta_fp_plonk_index_max_degree = + plonk_wasm.caml_pasta_fp_plonk_index_max_degree; + +// Provides: caml_pasta_fp_plonk_index_public_inputs +// Requires: plonk_wasm +var caml_pasta_fp_plonk_index_public_inputs = + plonk_wasm.caml_pasta_fp_plonk_index_public_inputs; + +// Provides: caml_pasta_fp_plonk_index_domain_d1_size +// Requires: plonk_wasm +var caml_pasta_fp_plonk_index_domain_d1_size = + plonk_wasm.caml_pasta_fp_plonk_index_domain_d1_size; + +// Provides: caml_pasta_fp_plonk_index_domain_d4_size +// Requires: plonk_wasm +var caml_pasta_fp_plonk_index_domain_d4_size = + plonk_wasm.caml_pasta_fp_plonk_index_domain_d4_size; + +// Provides: caml_pasta_fp_plonk_index_domain_d8_size +// Requires: plonk_wasm +var caml_pasta_fp_plonk_index_domain_d8_size = + plonk_wasm.caml_pasta_fp_plonk_index_domain_d8_size; + +// Provides: caml_pasta_fp_plonk_index_read +// Requires: plonk_wasm, caml_jsstring_of_string +var caml_pasta_fp_plonk_index_read = function (offset, urs, path) { + if (offset === 0) { + offset = undefined; + } else { + offset = offset[1]; + } + return plonk_wasm.caml_pasta_fp_plonk_index_read( + offset, + urs, + caml_jsstring_of_string(path) + ); +}; + +// Provides: caml_pasta_fp_plonk_index_write +// Requires: plonk_wasm, caml_jsstring_of_string +var caml_pasta_fp_plonk_index_write = function (append, t, path) { + if (append === 0) { + append = undefined; + } else { + append = append[1]; + } + return plonk_wasm.caml_pasta_fp_plonk_index_write( + append, + t, + caml_jsstring_of_string(path) + ); +}; + +// Provides: caml_pasta_fq_plonk_index_create +// Requires: plonk_wasm, free_on_finalize, tsRustConversion +var caml_pasta_fq_plonk_index_create = function ( + gates, + public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, + prev_challenges, + urs +) { + var wasm_lookup_tables = + tsRustConversion.fq.lookupTablesToRust(caml_lookup_tables); + var wasm_runtime_table_cfgs = tsRustConversion.fq.runtimeTableCfgsToRust( + caml_runtime_table_cfgs + ); + + return free_on_finalize( + plonk_wasm.caml_pasta_fq_plonk_index_create( + gates, + public_inputs, + wasm_lookup_tables, + wasm_runtime_table_cfgs, + prev_challenges, + urs + ) + ); +}; + +// Provides: caml_pasta_fq_plonk_index_create_bytecode +// Requires: caml_pasta_fq_plonk_index_create +var caml_pasta_fq_plonk_index_create_bytecode = function ( + gates, + public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, + prev_challenges, + urs +) { + return caml_pasta_fq_plonk_index_create( + gates, + public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, + prev_challenges, + urs + ); +}; + +// Provides: caml_pasta_fq_plonk_index_max_degree +// Requires: plonk_wasm +var caml_pasta_fq_plonk_index_max_degree = + plonk_wasm.caml_pasta_fq_plonk_index_max_degree; + +// Provides: caml_pasta_fq_plonk_index_public_inputs +// Requires: plonk_wasm +var caml_pasta_fq_plonk_index_public_inputs = + plonk_wasm.caml_pasta_fq_plonk_index_public_inputs; + +// Provides: caml_pasta_fq_plonk_index_domain_d1_size +// Requires: plonk_wasm +var caml_pasta_fq_plonk_index_domain_d1_size = + plonk_wasm.caml_pasta_fq_plonk_index_domain_d1_size; + +// Provides: caml_pasta_fq_plonk_index_domain_d4_size +// Requires: plonk_wasm +var caml_pasta_fq_plonk_index_domain_d4_size = + plonk_wasm.caml_pasta_fq_plonk_index_domain_d4_size; + +// Provides: caml_pasta_fq_plonk_index_domain_d8_size +// Requires: plonk_wasm +var caml_pasta_fq_plonk_index_domain_d8_size = + plonk_wasm.caml_pasta_fq_plonk_index_domain_d8_size; + +// Provides: caml_pasta_fq_plonk_index_read +// Requires: plonk_wasm, caml_jsstring_of_string +var caml_pasta_fq_plonk_index_read = function (offset, urs, path) { + if (offset === 0) { + offset = undefined; + } else { + offset = offset[1]; + } + return plonk_wasm.caml_pasta_fq_plonk_index_read( + offset, + urs, + caml_jsstring_of_string(path) + ); +}; + +// Provides: caml_pasta_fq_plonk_index_write +// Requires: plonk_wasm, caml_jsstring_of_string +var caml_pasta_fq_plonk_index_write = function (append, t, path) { + if (append === 0) { + append = undefined; + } else { + append = append[1]; + } + return plonk_wasm.caml_pasta_fq_plonk_index_write( + append, + t, + caml_jsstring_of_string(path) + ); +}; + +// verifier index + +// Provides: caml_opt_of_rust +var caml_opt_of_rust = function (value, value_of_rust) { + if (value === undefined) { + return 0; + } else { + return [0, value_of_rust(value)]; + } +}; + +// Provides: caml_opt_to_rust +var caml_opt_to_rust = function (caml_optional_value, to_rust) { + // to_rust expects the parameters of the variant. A `Some vx` is represented + // as [0, vx] + if (caml_optional_value === 0) { + return undefined; + } else { + return to_rust(caml_optional_value[1]); + } +}; + +// Provides: caml_pasta_fp_plonk_verifier_index_create +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_verifier_index_create = function (x) { + var vk = plonk_wasm.caml_pasta_fp_plonk_verifier_index_create(x); + return tsRustConversion.fp.verifierIndexFromRust(vk); +}; + +// Provides: caml_pasta_fp_plonk_verifier_index_read +// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion +var caml_pasta_fp_plonk_verifier_index_read = function (offset, urs, path) { + if (offset === 0) { + offset = undefined; + } else { + offset = offset[1]; + } + return tsRustConversion.fp.verifierIndexFromRust( + plonk_wasm.caml_pasta_fp_plonk_verifier_index_read( + offset, + urs, + caml_jsstring_of_string(path) + ) + ); +}; + +// Provides: caml_pasta_fp_plonk_verifier_index_write +// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion +var caml_pasta_fp_plonk_verifier_index_write = function (append, t, path) { + if (append === 0) { + append = undefined; + } else { + append = append[1]; + } + return plonk_wasm.caml_pasta_fp_plonk_verifier_index_write( + append, + tsRustConversion.fp.verifierIndexToRust(t), + caml_jsstring_of_string(path) + ); +}; + +// Provides: caml_pasta_fp_plonk_verifier_index_shifts +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_verifier_index_shifts = function (log2_size) { + return tsRustConversion.fp.shiftsFromRust( + plonk_wasm.caml_pasta_fp_plonk_verifier_index_shifts(log2_size) + ); +}; + +// Provides: caml_pasta_fp_plonk_verifier_index_dummy +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_verifier_index_dummy = function () { + var res = plonk_wasm.caml_pasta_fp_plonk_verifier_index_dummy(); + return tsRustConversion.fp.verifierIndexFromRust(res); +}; + +// Provides: caml_pasta_fp_plonk_verifier_index_deep_copy +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_verifier_index_deep_copy = function (x) { + return tsRustConversion.fp.verifierIndexFromRust( + plonk_wasm.caml_pasta_fp_plonk_verifier_index_deep_copy( + tsRustConversion.fp.verifierIndexToRust(x) + ) + ); +}; + +// Provides: caml_pasta_fq_plonk_verifier_index_create +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fq_plonk_verifier_index_create = function (x) { + return tsRustConversion.fq.verifierIndexFromRust( + plonk_wasm.caml_pasta_fq_plonk_verifier_index_create(x) + ); +}; + +// Provides: caml_pasta_fq_plonk_verifier_index_read +// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion +var caml_pasta_fq_plonk_verifier_index_read = function (offset, urs, path) { + if (offset === 0) { + offset = undefined; + } else { + offset = offset[1]; + } + return tsRustConversion.fq.verifierIndexFromRust( + plonk_wasm.caml_pasta_fq_plonk_verifier_index_read( + offset, + urs, + caml_jsstring_of_string(path) + ) + ); +}; + +// Provides: caml_pasta_fq_plonk_verifier_index_write +// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion +var caml_pasta_fq_plonk_verifier_index_write = function (append, t, path) { + if (append === 0) { + append = undefined; + } else { + append = append[1]; + } + return plonk_wasm.caml_pasta_fq_plonk_verifier_index_write( + append, + tsRustConversion.fq.verifierIndexToRust(t), + caml_jsstring_of_string(path) + ); +}; + +// Provides: caml_pasta_fq_plonk_verifier_index_shifts +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fq_plonk_verifier_index_shifts = function (log2_size) { + return tsRustConversion.fq.shiftsFromRust( + plonk_wasm.caml_pasta_fq_plonk_verifier_index_shifts(log2_size) + ); +}; + +// Provides: caml_pasta_fq_plonk_verifier_index_dummy +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fq_plonk_verifier_index_dummy = function () { + return tsRustConversion.fq.verifierIndexFromRust( + plonk_wasm.caml_pasta_fq_plonk_verifier_index_dummy() + ); +}; + +// Provides: caml_pasta_fq_plonk_verifier_index_deep_copy +// Requires: plonk_wasm, tsRustConversion, tsRustConversion +var caml_pasta_fq_plonk_verifier_index_deep_copy = function (x) { + return tsRustConversion.fq.verifierIndexFromRust( + plonk_wasm.caml_pasta_fq_plonk_verifier_index_deep_copy( + tsRustConversion.fq.verifierIndexToRust(x) + ) + ); +}; + +// proof + +// Provides: caml_pasta_fp_plonk_proof_create +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_proof_create = function ( + index, + witness_cols, + caml_runtime_tables, + prev_challenges, + prev_sgs +) { + var w = new plonk_wasm.WasmVecVecFp(witness_cols.length - 1); + for (var i = 1; i < witness_cols.length; i++) { + w.push(tsRustConversion.fp.vectorToRust(witness_cols[i])); + } + witness_cols = w; + prev_challenges = tsRustConversion.fp.vectorToRust(prev_challenges); + var wasm_runtime_tables = + tsRustConversion.fp.runtimeTablesToRust(caml_runtime_tables); + prev_sgs = tsRustConversion.fp.pointsToRust(prev_sgs); + var proof = plonk_wasm.caml_pasta_fp_plonk_proof_create( + index, + witness_cols, + wasm_runtime_tables, + prev_challenges, + prev_sgs + ); + return tsRustConversion.fp.proofFromRust(proof); +}; + +// Provides: caml_pasta_fp_plonk_proof_verify +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_proof_verify = function (index, proof) { + index = tsRustConversion.fp.verifierIndexToRust(index); + proof = tsRustConversion.fp.proofToRust(proof); + return plonk_wasm.caml_pasta_fp_plonk_proof_verify(index, proof); +}; + +// Provides: caml_pasta_fp_plonk_proof_batch_verify +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_proof_batch_verify = function (indexes, proofs) { + indexes = tsRustConversion.mapMlArrayToRustVector( + indexes, + tsRustConversion.fp.verifierIndexToRust + ); + proofs = tsRustConversion.mapMlArrayToRustVector( + proofs, + tsRustConversion.fp.proofToRust + ); + return plonk_wasm.caml_pasta_fp_plonk_proof_batch_verify(indexes, proofs); +}; + +// Provides: caml_pasta_fp_plonk_proof_dummy +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_proof_dummy = function () { + return tsRustConversion.fp.proofFromRust( + plonk_wasm.caml_pasta_fp_plonk_proof_dummy() + ); +}; + +// Provides: caml_pasta_fp_plonk_proof_deep_copy +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fp_plonk_proof_deep_copy = function (proof) { + return tsRustConversion.fp.proofFromRust( + plonk_wasm.caml_pasta_fp_plonk_proof_deep_copy( + tsRustConversion.fp.proofToRust(proof) + ) + ); +}; + +// Provides: caml_pasta_fq_plonk_proof_create +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fq_plonk_proof_create = function ( + index, + witness_cols, + caml_runtime_tables, + prev_challenges, + prev_sgs +) { + var w = new plonk_wasm.WasmVecVecFq(witness_cols.length - 1); + for (var i = 1; i < witness_cols.length; i++) { + w.push(tsRustConversion.fq.vectorToRust(witness_cols[i])); + } + witness_cols = w; + prev_challenges = tsRustConversion.fq.vectorToRust(prev_challenges); + var wasm_runtime_tables = + tsRustConversion.fq.runtimeTablesToRust(caml_runtime_tables); + prev_sgs = tsRustConversion.fq.pointsToRust(prev_sgs); + var proof = plonk_wasm.caml_pasta_fq_plonk_proof_create( + index, + witness_cols, + wasm_runtime_tables, + prev_challenges, + prev_sgs + ); + return tsRustConversion.fq.proofFromRust(proof); +}; + +// Provides: caml_pasta_fq_plonk_proof_verify +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fq_plonk_proof_verify = function (index, proof) { + index = tsRustConversion.fq.verifierIndexToRust(index); + proof = tsRustConversion.fq.proofToRust(proof); + return plonk_wasm.caml_pasta_fq_plonk_proof_verify(index, proof); +}; + +// Provides: caml_pasta_fq_plonk_proof_batch_verify +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fq_plonk_proof_batch_verify = function (indexes, proofs) { + indexes = tsRustConversion.mapMlArrayToRustVector( + indexes, + tsRustConversion.fq.verifierIndexToRust + ); + proofs = tsRustConversion.mapMlArrayToRustVector( + proofs, + tsRustConversion.fq.proofToRust + ); + return plonk_wasm.caml_pasta_fq_plonk_proof_batch_verify(indexes, proofs); +}; + +// Provides: caml_pasta_fq_plonk_proof_dummy +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fq_plonk_proof_dummy = function () { + return tsRustConversion.fq.proofFromRust( + plonk_wasm.caml_pasta_fq_plonk_proof_dummy() + ); +}; + +// Provides: caml_pasta_fq_plonk_proof_deep_copy +// Requires: plonk_wasm, tsRustConversion +var caml_pasta_fq_plonk_proof_deep_copy = function (proof) { + return tsRustConversion.fq.proofFromRust( + plonk_wasm.caml_pasta_fq_plonk_proof_deep_copy( + tsRustConversion.fq.proofToRust(proof) + ) + ); +}; + +// oracles + +// Provides: fp_oracles_create +// Requires: plonk_wasm, tsRustConversion +var fp_oracles_create = function (lgr_comm, verifier_index, proof) { + return tsRustConversion.fp.oraclesFromRust( + plonk_wasm.fp_oracles_create( + tsRustConversion.fp.polyCommsToRust(lgr_comm), + tsRustConversion.fp.verifierIndexToRust(verifier_index), + tsRustConversion.fp.proofToRust(proof) + ) + ); +}; + +// Provides: fp_oracles_create_no_public +// Requires: fp_oracles_create +var fp_oracles_create_no_public = function (lgr_comm, verifier_index, proof) { + return fp_oracles_create(lgr_comm, verifier_index, [0, 0, proof]); +}; + +// Provides: fp_oracles_dummy +// Requires: plonk_wasm, tsRustConversion +var fp_oracles_dummy = function () { + return tsRustConversion.fp.oraclesFromRust(plonk_wasm.fp_oracles_dummy()); +}; + +// Provides: fp_oracles_deep_copy +// Requires: plonk_wasm, tsRustConversion +var fp_oracles_deep_copy = function (x) { + return tsRustConversion.fp.oraclesFromRust( + plonk_wasm.fp_oracles_deep_copy(tsRustConversion.fp.oraclesToRust(x)) + ); +}; + +// Provides: fq_oracles_create +// Requires: plonk_wasm, tsRustConversion +var fq_oracles_create = function (lgr_comm, verifier_index, proof) { + return tsRustConversion.fq.oraclesFromRust( + plonk_wasm.fq_oracles_create( + tsRustConversion.fq.polyCommsToRust(lgr_comm), + tsRustConversion.fq.verifierIndexToRust(verifier_index), + tsRustConversion.fq.proofToRust(proof) + ) + ); +}; + +// Provides: fq_oracles_create_no_public +// Requires: fq_oracles_create +var fq_oracles_create_no_public = function (lgr_comm, verifier_index, proof) { + return fq_oracles_create(lgr_comm, verifier_index, [0, 0, proof]); +}; + +// Provides: fq_oracles_dummy +// Requires: plonk_wasm, tsRustConversion +var fq_oracles_dummy = function () { + return tsRustConversion.fq.oraclesFromRust(plonk_wasm.fq_oracles_dummy()); +}; + +// Provides: fq_oracles_deep_copy +// Requires: plonk_wasm, tsRustConversion +var fq_oracles_deep_copy = function (x) { + return tsRustConversion.fq.oraclesFromRust( + plonk_wasm.fq_oracles_deep_copy(tsRustConversion.fq.oraclesToRust(x)) + ); +}; + +// This is fake -- parameters are only needed on the Rust side, so no need to return something meaningful +// Provides: caml_pasta_fp_poseidon_params_create +function caml_pasta_fp_poseidon_params_create() { + return [0]; +} +// Provides: caml_pasta_fq_poseidon_params_create +function caml_pasta_fq_poseidon_params_create() { + return [0]; +} + +// Provides: caml_pasta_fp_poseidon_block_cipher +// Requires: plonk_wasm, tsRustConversion, tsRustConversion +function caml_pasta_fp_poseidon_block_cipher(_fake_params, fp_vector) { + // 1. get permuted field vector from rust + var wasm_flat_vector = plonk_wasm.caml_pasta_fp_poseidon_block_cipher( + tsRustConversion.fp.vectorToRust(fp_vector) + ); + var new_fp_vector = tsRustConversion.fp.vectorFromRust(wasm_flat_vector); + // 2. write back modified field vector to original one + new_fp_vector.forEach(function (a, i) { + fp_vector[i] = a; + }); +} + +// Provides: caml_pasta_fq_poseidon_block_cipher +// Requires: plonk_wasm, tsRustConversion, tsRustConversion +function caml_pasta_fq_poseidon_block_cipher(_fake_params, fq_vector) { + // 1. get permuted field vector from rust + var wasm_flat_vector = plonk_wasm.caml_pasta_fq_poseidon_block_cipher( + tsRustConversion.fq.vectorToRust(fq_vector) + ); + var new_fq_vector = tsRustConversion.fq.vectorFromRust(wasm_flat_vector); + // 2. write back modified field vector to original one + new_fq_vector.forEach(function (a, i) { + fq_vector[i] = a; + }); +} + +// Provides: caml_pasta_fp_plonk_proof_example_with_lookup +function caml_pasta_fp_plonk_proof_example_with_lookup() { + // This is only used in the pickles unit tests + throw new Error( + 'Unimplemented caml_pasta_fp_plonk_proof_example_with_lookup' + ); +} + +// Provides: prover_to_json +// Requires: plonk_wasm +var prover_to_json = plonk_wasm.prover_to_json; + +// Provides: integers_uint64_of_uint32 +// Requires: UInt64, caml_int64_of_int32 +function integers_uint64_of_uint32(i) { + // Same as integers_uint64_of_int + return new UInt64(caml_int64_of_int32(i)); +} + +///////////////////////////////////////////////////////////////////////////// +// The *_example_* functions below are only used in the pickles unit tests // +///////////////////////////////////////////////////////////////////////////// + +// Provides: caml_pasta_fp_plonk_proof_example_with_ffadd +function caml_pasta_fp_plonk_proof_example_with_ffadd() { + throw new Error('Unimplemented caml_pasta_fp_plonk_proof_example_with_ffadd'); +} + +// Provides: caml_pasta_fp_plonk_proof_example_with_foreign_field_mul +function caml_pasta_fp_plonk_proof_example_with_foreign_field_mul() { + throw new Error( + 'Unimplemented caml_pasta_fp_plonk_proof_example_with_foreign_field_mul' + ); +} + +// Provides: caml_pasta_fp_plonk_proof_example_with_range_check +function caml_pasta_fp_plonk_proof_example_with_range_check() { + throw new Error( + 'Unimplemented caml_pasta_fp_plonk_proof_example_with_range_check' + ); +} + +// Provides: caml_pasta_fp_plonk_proof_example_with_range_check0 +function caml_pasta_fp_plonk_proof_example_with_range_check0() { + throw new Error( + 'Unimplemented caml_pasta_fp_plonk_proof_example_with_range_check0' + ); +} + +// Provides: caml_pasta_fp_plonk_proof_example_with_rot +function caml_pasta_fp_plonk_proof_example_with_rot() { + throw new Error('Unimplemented caml_pasta_fp_plonk_proof_example_with_rot'); +} + +// Provides: caml_pasta_fp_plonk_proof_example_with_xor +function caml_pasta_fp_plonk_proof_example_with_xor() { + throw new Error('Unimplemented caml_pasta_fp_plonk_proof_example_with_xor'); +} diff --git a/src/lib/crypto/kimchi_bindings/js/dune b/src/lib/crypto/kimchi_bindings/js/dune new file mode 100644 index 00000000000..f8746ce705d --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/dune @@ -0,0 +1,14 @@ +(library + (name bindings_js) + (public_name bindings_js) + (js_of_ocaml + (javascript_files + bindings.js + bindings-bigint256.js + bindings-field.js + bindings-curve.js + bindings-vector.js)) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_version))) diff --git a/src/lib/crypto/kimchi_bindings/js/node_js/build.sh b/src/lib/crypto/kimchi_bindings/js/node_js/build.sh new file mode 100755 index 00000000000..41a7c57bc5c --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/node_js/build.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash +set -euo pipefail + +if [[ -z "${PLONK_WASM_NODEJS-}" ]]; then + export RUSTFLAGS="-C target-feature=+atomics,+bulk-memory,+mutable-globals -C link-arg=--no-check-features -C link-arg=--max-memory=4294967296" + # The version should stay in line with the one in kimchi_bindings/wasm/rust-toolchain.toml + rustup run nightly-2023-09-01 wasm-pack build --target nodejs --out-dir ../js/node_js ../../wasm -- -Z build-std=panic_abort,std --features nodejs +else + cp "$PLONK_WASM_NODEJS"/* -R . +fi diff --git a/src/lib/crypto/kimchi_bindings/js/node_js/dune b/src/lib/crypto/kimchi_bindings/js/node_js/dune new file mode 100644 index 00000000000..11b660b56f4 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/node_js/dune @@ -0,0 +1,34 @@ +(library + (name node_backend) + (public_name bindings_js.node_backend) + (js_of_ocaml + (flags + (:include flags.sexp)) + (javascript_files node_backend.js)) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_version js_of_ocaml-ppx))) + +(rule + (targets + plonk_wasm_bg.wasm.d.ts + plonk_wasm_bg.wasm + plonk_wasm.d.ts + plonk_wasm.js + flags.sexp) + (deps + ../../wasm/Cargo.toml + ../../wasm/Cargo.lock + (source_tree ../../wasm/src) + (source_tree ../../wasm/.cargo/config) + (source_tree ../../../proof-systems)) + (locks /cargo-lock) ; lock for rustup + (action + (progn + (run chmod -R +w ../../wasm .) + (setenv + CARGO_TARGET_DIR + "%{read:../../../kimchi_bindings/stubs/dune-build-root}/cargo_kimchi_wasm" + (run ./build.sh)) + (write-file flags.sexp "()")))) diff --git a/src/lib/crypto/kimchi_bindings/js/node_js/node_backend.js b/src/lib/crypto/kimchi_bindings/js/node_js/node_backend.js new file mode 100644 index 00000000000..76e22cc82b2 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/node_js/node_backend.js @@ -0,0 +1,2 @@ +// Provides: plonk_wasm +var plonk_wasm = require('./plonk_wasm.js'); diff --git a/src/lib/crypto/kimchi_bindings/js/test/bindings_js_test.ml b/src/lib/crypto/kimchi_bindings/js/test/bindings_js_test.ml new file mode 100644 index 00000000000..f7d9cc49dfd --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/test/bindings_js_test.ml @@ -0,0 +1,1189 @@ +open Kimchi_types +open Pasta_bindings +open Kimchi_bindings +open Js_of_ocaml +module Bigint_256 = BigInt256 +module Pasta_fp = Fp +module Pasta_fq = Fq +module Pasta_fp_vector = FieldVectors.Fp +module Pasta_fq_vector = FieldVectors.Fq +module Pasta_pallas = Pallas +module Pasta_vesta = Vesta +module Pasta_fp_urs = Protocol.SRS.Fp +module Pasta_fq_urs = Protocol.SRS.Fq +module Pasta_fp_index = Protocol.Index.Fp +module Pasta_fq_index = Protocol.Index.Fq +module Pasta_fp_verifier_index = Protocol.VerifierIndex.Fp +module Pasta_fq_verifier_index = Protocol.VerifierIndex.Fq + +(* NOTE: For nodejs, we need to manually add the following line to the javascript bindings, after imports['env'] has been declared. + imports['env']['memory'] = new WebAssembly.Memory({initial: 18, maximum: 16384, shared: true}); +*) + +type a = { a : int; b : bool; c : int option option } + +type b = A | B | C | D | Aplus of b | Bplus of int | Cplus of bool + +let _ = + Js.export "testing" + (object%js (_self) + method a = { a = 15; b = true; c = Some (Some 125) } + + method b = { a = 20; b = false; c = Some None } + + method c = { a = 25; b = false; c = None } + + method create i = + match i with + | 0 -> + A + | 1 -> + B + | 2 -> + C + | 3 -> + D + | 4 -> + Aplus A + | 5 -> + Aplus (Aplus (Aplus (Bplus 15))) + | 6 -> + Bplus 20 + | 7 -> + Cplus true + | _ -> + A + + method returnString = "String" + + method returnString2 = + String.concat "" [ "String1"; "String2"; "String3" ] + + method returnBytes = Bytes.of_string "Bytes" + + method returnStringAll = + let bytes = Bytes.make 256 ' ' in + let rec go i = + if i < 256 then ( + Bytes.set bytes i (Char.chr i) ; + go (i + 1) ) + in + go 0 ; Bytes.to_string bytes + + method bytesOfJsString x = Js.to_string x |> Bytes.of_string + + method bytesToJsString x = Bytes.to_string x |> Js.string + end ) + +let _ = + let open Bigint_256 in + Js.export "bigint_256" + (object%js (_self) + method ofNumeral x _len base = of_numeral x _len base + + method ofDecimalString x = of_decimal_string x + + method numLimbs = num_limbs () + + method bytesPerLimb = bytes_per_limb () + + method div x y = div x y + + method compare x y = compare x y + + method print x = print x + + method toString x = to_string x + + method testBit x i = test_bit x i + + method toBytes x = to_bytes x + + method ofBytes x = of_bytes x + + method deepCopy x = deep_copy x + end ) + +let _ = + let open Pasta_fp in + Js.export "pasta_fp" + (object%js (_self) + method sizeInBits = size_in_bits () + + method size = size () + + method add x y = add x y + + method sub x y = sub x y + + method negate x = negate x + + method mul x y = mul x y + + method div x y = div x y + + method inv x = inv x + + method square x = square x + + method sqrt x = sqrt x + + method ofInt x = of_int x + + method toString x = to_string x + + method ofString x = of_string x + + method print x = print x + + method copy x y = copy x y + + method mutAdd x y = mut_add x y + + method mutSub x y = mut_sub x y + + method mutMul x y = mut_sub x y + + method mutSquare x = mut_square x + + method compare x y = compare x y + + method equal x y = equal x y + + method random = random () + + method rng i = rng i + + method toBigint x = to_bigint x + + method ofBigint x = of_bigint x + + method twoAdicRootOfUnity = two_adic_root_of_unity () + + method domainGenerator i = domain_generator i + + method toBytes x = to_bytes x + + method ofBytes x = of_bytes x + + method deepCopy x = deep_copy x + end ) + +let _ = + let open Pasta_fq in + Js.export "pasta_fq" + (object%js (_self) + method sizeInBits = size_in_bits () + + method size = size () + + method add x y = add x y + + method sub x y = sub x y + + method negate x = negate x + + method mul x y = mul x y + + method div x y = div x y + + method inv x = inv x + + method square x = square x + + method sqrt x = sqrt x + + method ofInt x = of_int x + + method toString x = to_string x + + method ofString x = of_string x + + method print x = print x + + method copy x y = copy x y + + method mutAdd x y = mut_add x y + + method mutSub x y = mut_sub x y + + method mutMul x y = mut_sub x y + + method mutSquare x = mut_square x + + method compare x y = compare x y + + method equal x y = equal x y + + method random = random () + + method rng i = rng i + + method toBigint x = to_bigint x + + method ofBigint x = of_bigint x + + method twoAdicRootOfUnity = two_adic_root_of_unity () + + method domainGenerator i = domain_generator i + + method toBytes x = to_bytes x + + method ofBytes x = of_bytes x + + method deepCopy x = deep_copy x + end ) + +let _ = + let open Bigint_256 in + Js.export "bigint_256_test" + (object%js (_self) + method run = + let ten = of_numeral "A" 1 16 in + let two = of_numeral "10" 2 2 in + let five = of_numeral "5" 1 10 in + let six = of_decimal_string "6" in + let num_limbs = num_limbs () in + let bytes_per_limb = bytes_per_limb () in + assert (num_limbs * bytes_per_limb * 8 = 256) ; + let two_again = div ten five in + assert (compare ten two > 0) ; + assert (compare two ten < 0) ; + assert (compare two two = 0) ; + assert (compare two two_again = 0) ; + print ten ; + assert (String.equal (to_string ten) "10") ; + assert (String.equal (to_string two_again) "2") ; + assert (test_bit five 0) ; + assert (not (test_bit five 1)) ; + assert (test_bit five 2) ; + let ten_bytes = to_bytes ten in + assert (compare (of_bytes ten_bytes) ten = 0) ; + assert (compare (deep_copy six) six = 0) + end ) + +let _ = + let open Pasta_fp in + Js.export "pasta_fp_test" + (object%js (_self) + method run = + let size_in_bits = size_in_bits () in + assert (size_in_bits = 255) ; + let size = size () in + assert ( + String.equal + (Bigint_256.to_string size) + "28948022309329048855892746252171976963363056481941560715954676764349967630337" ) ; + let one = of_int 1 in + let two = of_string "2" in + let rand1 = random () in + let rand2 = rng 15 in + let ten = of_bigint (Bigint_256.of_decimal_string "10") in + let eleven = of_bigint (Bigint_256.of_decimal_string "11") in + let twenty_one = add ten eleven in + let one_again = sub eleven ten in + let twenty = mul two ten in + let five = div ten two in + assert (String.equal (to_string twenty_one) "21") ; + assert (String.equal (to_string one_again) "1") ; + assert (String.equal (to_string twenty) "20") ; + assert (String.equal (to_string five) "5") ; + assert (equal (of_string "5") five) ; + assert (equal (of_string (to_string five)) five) ; + assert ( + equal twenty_one + ( match sqrt (square twenty_one) with + | Some x -> + x + | None -> + assert false ) ) ; + print twenty_one ; + copy eleven twenty_one ; + assert (equal eleven twenty_one) ; + assert (String.equal (to_string eleven) "21") ; + mut_add one_again ten ; + assert (String.equal (to_string one_again) "11") ; + mut_sub one_again one ; + assert (String.equal (to_string one_again) "10") ; + mut_mul one_again ten ; + assert (String.equal (to_string one_again) "100") ; + mut_square one_again ; + assert (String.equal (to_string one_again) "10000") ; + assert (equal (of_bigint (to_bigint rand1)) rand1) ; + assert (compare (of_bigint (to_bigint rand1)) rand1 = 0) ; + assert (compare one ten < 0) ; + assert (compare ten one > 0) ; + let root_of_unity = two_adic_root_of_unity () in + assert (equal (of_bytes (to_bytes root_of_unity)) root_of_unity) ; + let gen = domain_generator 2 in + assert (equal (of_bytes (to_bytes gen)) gen) ; + assert (equal (deep_copy rand2) rand2) + end ) + +let _ = + let open Pasta_fq in + Js.export "pasta_fq_test" + (object%js (_self) + method run = + let size_in_bits = size_in_bits () in + assert (size_in_bits = 255) ; + let size = size () in + assert ( + String.equal + (Bigint_256.to_string size) + "28948022309329048855892746252171976963363056481941647379679742748393362948097" ) ; + let one = of_int 1 in + let two = of_string "2" in + let rand1 = random () in + let rand2 = rng 15 in + let ten = of_bigint (Bigint_256.of_decimal_string "10") in + let eleven = of_bigint (Bigint_256.of_decimal_string "11") in + let twenty_one = add ten eleven in + let one_again = sub eleven ten in + let twenty = mul two ten in + let five = div ten two in + assert (String.equal (to_string twenty_one) "21") ; + assert (String.equal (to_string one_again) "1") ; + assert (String.equal (to_string twenty) "20") ; + assert (String.equal (to_string five) "5") ; + assert (equal (of_string "5") five) ; + assert (equal (of_string (to_string five)) five) ; + assert ( + equal twenty_one + ( match sqrt (square twenty_one) with + | Some x -> + x + | None -> + assert false ) ) ; + print twenty_one ; + copy eleven twenty_one ; + assert (equal eleven twenty_one) ; + assert (String.equal (to_string eleven) "21") ; + mut_add one_again ten ; + assert (String.equal (to_string one_again) "11") ; + mut_sub one_again one ; + assert (String.equal (to_string one_again) "10") ; + mut_mul one_again ten ; + assert (String.equal (to_string one_again) "100") ; + mut_square one_again ; + assert (String.equal (to_string one_again) "10000") ; + assert (equal (of_bigint (to_bigint rand1)) rand1) ; + assert (compare (of_bigint (to_bigint rand1)) rand1 = 0) ; + assert (compare one ten < 0) ; + assert (compare ten one > 0) ; + let root_of_unity = two_adic_root_of_unity () in + assert (equal (of_bytes (to_bytes root_of_unity)) root_of_unity) ; + let gen = domain_generator 2 in + assert (equal (of_bytes (to_bytes gen)) gen) ; + assert (equal (deep_copy rand2) rand2) + end ) + +let _ = + let open Pasta_fp_vector in + Js.export "pasta_fp_vector_test" + (object%js (_self) + method run = + let first = create () in + let second = create () in + assert (length first = 0) ; + assert (length second = 0) ; + assert (not (first == second)) ; + emplace_back first (Pasta_fp.of_int 0) ; + assert (length first = 1) ; + assert (length second = 0) ; + emplace_back second (Pasta_fp.of_int 1) ; + assert (length first = 1) ; + assert (length second = 1) ; + emplace_back first (Pasta_fp.of_int 10) ; + assert (length first = 2) ; + assert (length second = 1) ; + emplace_back first (Pasta_fp.of_int 30) ; + assert (length first = 3) ; + assert (length second = 1) ; + assert (Pasta_fp.equal (Pasta_fp.of_int 0) (get first 0)) ; + assert (Pasta_fp.equal (Pasta_fp.of_int 10) (get first 1)) ; + assert (Pasta_fp.equal (Pasta_fp.of_int 30) (get first 2)) ; + assert (Pasta_fp.equal (Pasta_fp.of_int 1) (get second 0)) + end ) + +let _ = + let open Pasta_fq_vector in + Js.export "pasta_fq_vector_test" + (object%js (_self) + method run = + let first = create () in + let second = create () in + assert (length first = 0) ; + assert (length second = 0) ; + assert (not (first == second)) ; + emplace_back first (Pasta_fq.of_int 0) ; + assert (length first = 1) ; + assert (length second = 0) ; + emplace_back second (Pasta_fq.of_int 1) ; + assert (length first = 1) ; + assert (length second = 1) ; + emplace_back first (Pasta_fq.of_int 10) ; + assert (length first = 2) ; + assert (length second = 1) ; + emplace_back first (Pasta_fq.of_int 30) ; + assert (length first = 3) ; + assert (length second = 1) ; + assert (Pasta_fq.equal (Pasta_fq.of_int 0) (get first 0)) ; + assert (Pasta_fq.equal (Pasta_fq.of_int 10) (get first 1)) ; + assert (Pasta_fq.equal (Pasta_fq.of_int 30) (get first 2)) ; + assert (Pasta_fq.equal (Pasta_fq.of_int 1) (get second 0)) + end ) + +let eq_affine ~field_equal x y = + match (x, y) with + | Infinity, Infinity -> + true + | Finite (x1, y1), Finite (x2, y2) -> + field_equal x1 x2 && field_equal y1 y2 + | _ -> + false + +let _ = + let open Pasta_pallas in + Js.export "pasta_pallas_test" + (object%js (_self) + method run = + let eq x y = eq_affine ~field_equal:Pasta_fp.equal x y in + let one_ = one () in + let two = add one_ one_ in + let infinity = sub one_ one_ in + let one_again = sub two one_ in + let neg_one = negate one_ in + let two_again = double one_ in + let two_again_ = scale one_ (Pasta_fq.of_int 2) in + let rand1 = random () in + let rand2 = rng 15 in + let affine_one = to_affine one_ in + let affine_two = to_affine two in + assert (not (eq affine_one affine_two)) ; + let affine_infinity = to_affine infinity in + assert (not (eq affine_one affine_infinity)) ; + assert (not (eq affine_two affine_infinity)) ; + assert (eq affine_infinity Infinity) ; + let affine_neg_one = to_affine neg_one in + assert (not (eq affine_one affine_neg_one)) ; + assert (not (eq affine_two affine_neg_one)) ; + assert (not (eq affine_infinity affine_neg_one)) ; + let affine_one_again = to_affine one_again in + assert (eq affine_one affine_one_again) ; + let affine_two_again = to_affine two_again in + assert (eq affine_two affine_two_again) ; + let affine_two_again_ = to_affine two_again_ in + assert (eq affine_two affine_two_again_) ; + let affine_rand1 = to_affine rand1 in + let affine_rand2 = to_affine rand2 in + let copy_using_of_affine_coordinates pt = + match pt with + | Infinity -> + of_affine Infinity + | Finite (x, y) -> + of_affine_coordinates x y + in + let rand1_again = copy_using_of_affine_coordinates affine_rand1 in + let rand2_again = copy_using_of_affine_coordinates affine_rand2 in + let affine_rand1_again = to_affine rand1_again in + let affine_rand2_again = to_affine rand2_again in + ( match + ( eq affine_rand1 affine_rand2 + , eq affine_rand1_again affine_rand2_again ) + with + | true, true | false, false -> + () + | _ -> + assert false ) ; + assert (eq affine_rand1 affine_rand1_again) ; + assert (eq affine_rand2 affine_rand2_again) ; + assert ( + eq + (to_affine (negate (sub rand1 rand2))) + (to_affine (sub rand2_again rand1_again)) ) ; + let endo_base = endo_base () in + assert ( + String.equal + (Pasta_fp.to_string endo_base) + "20444556541222657078399132219657928148671392403212669005631716460534733845831" ) ; + let endo_scalar = endo_scalar () in + assert ( + String.equal + (Pasta_fq.to_string endo_scalar) + "26005156700822196841419187675678338661165322343552424574062261873906994770353" ) ; + let one_copied = deep_copy affine_one in + assert (eq affine_one one_copied) ; + let infinity_copied = deep_copy affine_infinity in + assert (eq affine_infinity infinity_copied) ; + assert (eq affine_infinity Infinity) ; + let infinity_copied_ = deep_copy Infinity in + assert (eq infinity_copied_ Infinity) + end ) + +let _ = + let open Pasta_vesta in + Js.export "pasta_vesta_test" + (object%js (_self) + method run = + let eq x y = eq_affine ~field_equal:Pasta_fq.equal x y in + let one_ = one () in + let two = add one_ one_ in + let infinity = sub one_ one_ in + let one_again = sub two one_ in + let neg_one = negate one_ in + let two_again = double one_ in + let two_again_ = scale one_ (Pasta_fp.of_int 2) in + let rand1 = random () in + let rand2 = rng 15 in + let affine_one = to_affine one_ in + let affine_two = to_affine two in + assert (not (eq affine_one affine_two)) ; + let affine_infinity = to_affine infinity in + assert (not (eq affine_one affine_infinity)) ; + assert (not (eq affine_two affine_infinity)) ; + assert (eq affine_infinity Infinity) ; + let affine_neg_one = to_affine neg_one in + assert (not (eq affine_one affine_neg_one)) ; + assert (not (eq affine_two affine_neg_one)) ; + assert (not (eq affine_infinity affine_neg_one)) ; + let affine_one_again = to_affine one_again in + assert (eq affine_one affine_one_again) ; + let affine_two_again = to_affine two_again in + assert (eq affine_two affine_two_again) ; + let affine_two_again_ = to_affine two_again_ in + assert (eq affine_two affine_two_again_) ; + let affine_rand1 = to_affine rand1 in + let affine_rand2 = to_affine rand2 in + let copy_using_of_affine_coordinates pt = + match pt with + | Infinity -> + of_affine Infinity + | Finite (x, y) -> + of_affine_coordinates x y + in + let rand1_again = copy_using_of_affine_coordinates affine_rand1 in + let rand2_again = copy_using_of_affine_coordinates affine_rand2 in + let affine_rand1_again = to_affine rand1_again in + let affine_rand2_again = to_affine rand2_again in + ( match + ( eq affine_rand1 affine_rand2 + , eq affine_rand1_again affine_rand2_again ) + with + | true, true | false, false -> + () + | _ -> + assert false ) ; + assert (eq affine_rand1 affine_rand1_again) ; + assert (eq affine_rand2 affine_rand2_again) ; + assert ( + eq + (to_affine (negate (sub rand1 rand2))) + (to_affine (sub rand2_again rand1_again)) ) ; + let endo_base = endo_base () in + assert ( + String.equal + (Pasta_fq.to_string endo_base) + "2942865608506852014473558576493638302197734138389222805617480874486368177743" ) ; + let endo_scalar = endo_scalar () in + assert ( + String.equal + (Pasta_fp.to_string endo_scalar) + "8503465768106391777493614032514048814691664078728891710322960303815233784505" ) ; + let one_copied = deep_copy affine_one in + assert (eq affine_one one_copied) ; + let infinity_copied = deep_copy affine_infinity in + assert (eq affine_infinity infinity_copied) ; + assert (eq affine_infinity Infinity) ; + let infinity_copied_ = deep_copy Infinity in + assert (eq infinity_copied_ Infinity) + end ) + +let eq_poly_comm ~field_equal (x : _ poly_comm) (y : _ poly_comm) = + Array.for_all2 (eq_affine ~field_equal) x.unshifted y.unshifted + && Option.equal (eq_affine ~field_equal) x.shifted y.shifted + +module Backend = Kimchi_backend.Pasta.Pallas_based_plonk + +let () = Backend.Keypair.set_urs_info [] + +module Impl = + Snarky_backendless.Snark.Run.Make (Kimchi_backend.Pasta.Pallas_based_plonk) + +let _ = + Js.export "snarky_test" + (object%js (_self) + method run = + let log x = (Js.Unsafe.js_expr "console.log" : _ -> unit) x in + let time label f = + let start = new%js Js_of_ocaml.Js.date_now in + let x = f () in + let stop = new%js Js_of_ocaml.Js.date_now in + log + (Core_kernel.ksprintf Js.string "%s: %f seconds" label + ((stop##getTime -. start##getTime) /. 1000.) ) ; + x + in + let open Impl in + let main x () = + let rec go i acc = + if i = 0 then acc else go (i - 1) (Field.mul acc acc) + in + let _ = go 1000 x in + () + in + let (pk : Backend.Keypair.t) = + time "generate_keypair" (fun () -> + constraint_system ~input_typ:Typ.field ~return_typ:Typ.unit main + |> Backend.Keypair.create ~prev_challenges:0 ) + in + let (_ : Backend.Keypair.t) = + time "generate_keypair2" (fun () -> + constraint_system ~input_typ:Typ.field ~return_typ:Typ.unit main + |> Backend.Keypair.create ~prev_challenges:0 ) + in + let x = Backend.Field.of_int 2 in + let (pi : Backend.Proof.with_public_evals) = + time "generate witness conv" (fun () -> + Impl.generate_witness_conv ~input_typ:Typ.field + ~return_typ:Typ.unit main + ~f:(fun { Proof_inputs.auxiliary_inputs; public_inputs } () -> + time "create proof" (fun () -> + Backend.Proof.create pk ~auxiliary:auxiliary_inputs + ~primary:public_inputs ) ) + x ) + in + let vk = Backend.Keypair.vk pk in + let vec = Backend.Field.Vector.create () in + Backend.Field.Vector.emplace_back vec x ; + assert (time "verify proof" (fun () -> Backend.Proof.verify pi vk vec)) + end ) + +let _ = + let open Pasta_fp_urs in + Js.export "pasta_fp_urs_test" + (object%js (_self) + method run = + (let time label f = + let start = new%js Js_of_ocaml.Js.date_now in + let x = f () in + let stop = new%js Js_of_ocaml.Js.date_now in + let log x = (Js.Unsafe.js_expr "console.log" : _ -> unit) x in + log + (Core_kernel.ksprintf Js.string "%s: %f seconds" label + ((stop##getTime -. start##getTime) /. 1000.) ) ; + x + in + let n = 131072 in + let log_n = Core_kernel.Int.ceil_log2 n in + let urs = time "create" (fun () -> create n) in + let inputs = + time "inputs" (fun () -> Array.init n (fun i -> Pasta_fp.of_int i)) + in + let _ = time "commit" (fun () -> commit_evaluations urs n inputs) in + let _ = + let xs = Array.init log_n (fun _ -> Pasta_fp.random ()) in + time "b_poly" (fun () -> b_poly_commitment urs xs) + in + () ) ; + let eq_affine x y = eq_affine ~field_equal:Pasta_fq.equal x y in + let eq = eq_poly_comm ~field_equal:Pasta_fq.equal in + let first = create 10 in + let second = create 16 in + let lcomm1 = lagrange_commitment first 8 0 in + let lcomm1_again = lagrange_commitment second 8 0 in + assert (eq lcomm1 lcomm1_again) ; + let inputs = Pasta_fp.[| of_int 1; of_int 2; of_int 3; of_int 4 |] in + let commits = commit_evaluations second 8 inputs in + let commits_again = commit_evaluations second 8 inputs in + assert (eq commits commits_again) ; + let inputs2 = Array.init 64 Pasta_fp.of_int in + let affines = + Array.init 16 (fun i -> + try lcomm1.unshifted.(i) + with _ -> Pasta_vesta.random () |> Pasta_vesta.to_affine ) + in + let res = batch_accumulator_check second affines inputs2 in + assert (res || not res) ; + let h_first = urs_h first in + let h_second = urs_h second in + let h_first_again = Pasta_vesta.deep_copy h_first in + let h_second_again = Pasta_vesta.deep_copy h_second in + assert (eq_affine h_first h_first_again) ; + assert (eq_affine h_second h_second_again) + end ) + +let _ = + let open Pasta_fq_urs in + Js.export "pasta_fq_urs_test" + (object%js (_self) + method run = + let eq_affine x y = eq_affine ~field_equal:Pasta_fp.equal x y in + let eq = eq_poly_comm ~field_equal:Pasta_fp.equal in + let first = create 10 in + let second = create 16 in + let lcomm1 = lagrange_commitment first 8 0 in + let lcomm1_again = lagrange_commitment second 8 0 in + assert (eq lcomm1 lcomm1_again) ; + let inputs = Pasta_fq.[| of_int 1; of_int 2; of_int 3; of_int 4 |] in + let commits = commit_evaluations second 8 inputs in + let commits_again = commit_evaluations second 8 inputs in + assert (eq commits commits_again) ; + let inputs2 = Array.init 64 Pasta_fq.of_int in + let affines = + Array.init 16 (fun i -> + try lcomm1.unshifted.(i) + with _ -> Pasta_pallas.random () |> Pasta_pallas.to_affine ) + in + let res = batch_accumulator_check second affines inputs2 in + assert (res || not res) ; + let h_first = urs_h first in + let h_second = urs_h second in + let h_first_again = Pasta_pallas.deep_copy h_first in + let h_second_again = Pasta_pallas.deep_copy h_second in + assert (eq_affine h_first h_first_again) ; + assert (eq_affine h_second h_second_again) + end ) + +let mk_wires typ i (r1, c1) (r2, c2) (r3, c3) coeffs : _ circuit_gate = + { typ + ; wires = + ( { row = r1; col = c1 } + , { row = r2; col = c2 } + , { row = r3; col = c3 } + , { row = i; col = 3 } + , { row = i; col = 4 } + , { row = i; col = 5 } + , { row = i; col = 6 } ) + ; coeffs + } + +let _ = + let open Protocol.Gates.Vector.Fp in + Js.export "pasta_fp_gate_vector_test" + (object%js (_self) + method run = + let vec1 = create () in + let vec2 = create () in + let eq { typ = kind1; wires = wires1; coeffs = c1 } + { typ = kind2; wires = wires2; coeffs = c2 } = + kind1 = kind2 && wires1 = wires2 + && try Array.for_all2 Pasta_fp.equal c1 c2 with _ -> false + in + let assert_eq_or_log ?extra ~loc x y = + if not (eq x y) then ( + let log x = (Js.Unsafe.js_expr "console.log" : _ -> unit) x in + log loc ; + Option.iter log extra ; + log x ; + log y ; + assert false ) + in + let rand_fields i = Array.init i Pasta_fp.rng in + let zero = mk_wires Zero 0 (0, 0) (0, 1) (0, 2) (rand_fields 0) in + let generic = + mk_wires Generic 1 (1, 0) (1, 1) (1, 2) (rand_fields 1) + in + let add1 = + mk_wires CompleteAdd 1 (1, 0) (1, 1) (1, 2) (rand_fields 2) + in + let add2 = + mk_wires CompleteAdd 2 (2, 0) (2, 1) (2, 2) (rand_fields 3) + in + let vbmul1 = + mk_wires VarBaseMul 3 (3, 0) (3, 1) (3, 2) (rand_fields 5) + in + let vbmul2 = + mk_wires VarBaseMul 4 (4, 0) (4, 1) (4, 2) (rand_fields 10) + in + let vbmul3 = + mk_wires VarBaseMul 5 (5, 0) (5, 1) (5, 2) (rand_fields 20) + in + let endomul1 = + mk_wires EndoMul 6 (6, 0) (6, 1) (6, 2) (rand_fields 30) + in + let endomul2 = + mk_wires EndoMul 7 (7, 0) (7, 1) (7, 2) (rand_fields 31) + in + let endomul3 = + mk_wires EndoMul 8 (8, 0) (8, 1) (8, 2) (rand_fields 32) + in + let endomul4 = + mk_wires EndoMul 9 (9, 0) (9, 1) (9, 2) (rand_fields 33) + in + let poseidon = + mk_wires Poseidon 10 (10, 0) (10, 1) (10, 2) (rand_fields 34) + in + let all = + [ zero + ; generic + ; add1 + ; add2 + ; vbmul1 + ; vbmul2 + ; vbmul3 + ; endomul1 + ; endomul2 + ; endomul3 + ; endomul4 + ; poseidon + ] + in + let test_vec vec = + List.iter (add vec) all ; + List.iteri + (fun i x -> assert_eq_or_log ~extra:i ~loc:__LOC__ x (get vec i)) + all ; + let l, r, o, _, _, _, _ = zero.wires in + wrap vec l r ; + assert_eq_or_log ~loc:__LOC__ (get vec 0) + (mk_wires Zero 0 (0, 1) (0, 1) (0, 2) zero.coeffs) ; + wrap vec o l ; + assert_eq_or_log ~loc:__LOC__ (get vec 0) + (mk_wires Zero 0 (0, 1) (0, 1) (0, 0) zero.coeffs) + in + test_vec vec1 ; test_vec vec2 + end ) + +let _ = + let open Protocol.Gates.Vector.Fq in + Js.export "pasta_fq_gate_vector_test" + (object%js (_self) + method run = + let vec1 = create () in + let vec2 = create () in + let eq { typ = kind1; wires = wires1; coeffs = c1 } + { typ = kind2; wires = wires2; coeffs = c2 } = + kind1 = kind2 && wires1 = wires2 + && try Array.for_all2 Pasta_fq.equal c1 c2 with _ -> false + in + let rand_fields i = Array.init i Pasta_fq.rng in + let zero = mk_wires Zero 0 (0, 0) (0, 1) (0, 2) (rand_fields 0) in + let generic = + mk_wires Generic 1 (1, 0) (1, 1) (1, 2) (rand_fields 1) + in + let add1 = mk_wires Poseidon 1 (1, 0) (1, 1) (1, 2) (rand_fields 2) in + let add2 = + mk_wires CompleteAdd 2 (2, 0) (2, 1) (2, 2) (rand_fields 3) + in + let vbmul1 = + mk_wires VarBaseMul 3 (3, 0) (3, 1) (3, 2) (rand_fields 5) + in + let vbmul2 = + mk_wires EndoMul 4 (4, 0) (4, 1) (4, 2) (rand_fields 10) + in + let vbmul3 = + mk_wires EndoMulScalar 5 (5, 0) (5, 1) (5, 2) (rand_fields 20) + in + let all = [ zero; generic; add1; add2; vbmul1; vbmul2; vbmul3 ] in + let test_vec vec = + List.iter (add vec) all ; + List.iteri (fun i x -> assert (eq x (get vec i))) all ; + let l, r, o, _, _, _, _ = zero.wires in + wrap vec l r ; + assert ( + eq (get vec 0) (mk_wires Zero 0 (0, 1) (0, 1) (0, 2) zero.coeffs) ) ; + wrap vec o l ; + assert ( + eq (get vec 0) (mk_wires Zero 0 (0, 1) (0, 1) (0, 0) zero.coeffs) ) + in + test_vec vec1 ; test_vec vec2 + end ) + +let _ = + let open Pasta_fp_index in + Js.export "pasta_fp_index_test" + (object%js (_self) + method run = + let gate_vector = + let open Protocol.Gates.Vector.Fp in + let vec = create () in + let fields = Array.map Pasta_fp.of_int in + let zero = mk_wires Zero 0 (0, 0) (0, 1) (0, 2) (fields [||]) in + let generic = + mk_wires Generic 1 (1, 0) (1, 1) (1, 2) + (fields [| 0; 0; 0; 0; 0 |]) + in + let add1 = mk_wires Poseidon 1 (1, 0) (1, 1) (1, 2) (fields [||]) in + let add2 = + mk_wires CompleteAdd 2 (2, 0) (2, 1) (2, 2) (fields [||]) + in + let vbmul1 = + mk_wires VarBaseMul 3 (3, 0) (3, 1) (3, 2) (fields [||]) + in + let vbmul2 = mk_wires EndoMul 4 (4, 0) (4, 1) (4, 2) (fields [||]) in + let vbmul3 = + mk_wires EndoMulScalar 5 (5, 0) (5, 1) (5, 2) (fields [||]) + in + let all = [ zero; generic; add1; add2; vbmul1; vbmul2; vbmul3 ] in + List.iter (add vec) all ; + vec + in + let urs = Pasta_fp_urs.create 16 in + (* TODO(dw) write tests with lookup tables *) + let lookup_tables = [||] in + (* TODO(dw) write tests with runtime tables *) + let runtime_table_cfg = [||] in + let index0 = + create gate_vector 0 lookup_tables runtime_table_cfg 0 urs + in + let index2 = + create gate_vector 2 lookup_tables runtime_table_cfg 0 urs + in + assert (max_degree index0 = 16) ; + assert (max_degree index2 = 16) ; + assert (public_inputs index0 = 0) ; + assert (public_inputs index2 = 2) ; + assert (domain_d1_size index0 = 16) ; + assert (domain_d1_size index2 = 16) ; + assert (domain_d4_size index0 = 64) ; + assert (domain_d4_size index2 = 64) ; + assert (domain_d8_size index0 = 128) ; + assert (domain_d8_size index2 = 128) + end ) + +let _ = + let open Pasta_fq_index in + Js.export "pasta_fq_index_test" + (object%js (_self) + method run = + let gate_vector = + let open Protocol.Gates.Vector.Fq in + let vec = create () in + let rand_fields i = Array.init i Pasta_fq.rng in + let zero = mk_wires Zero 0 (0, 0) (0, 1) (0, 2) (rand_fields 0) in + let generic = + mk_wires Generic 1 (1, 0) (1, 1) (1, 2) (rand_fields 1) + in + let add1 = + mk_wires Poseidon 1 (1, 0) (1, 1) (1, 2) (rand_fields 2) + in + let add2 = + mk_wires CompleteAdd 2 (2, 0) (2, 1) (2, 2) (rand_fields 3) + in + let vbmul1 = + mk_wires VarBaseMul 3 (3, 0) (3, 1) (3, 2) (rand_fields 5) + in + let vbmul2 = + mk_wires EndoMul 4 (4, 0) (4, 1) (4, 2) (rand_fields 10) + in + let vbmul3 = + mk_wires EndoMulScalar 5 (5, 0) (5, 1) (5, 2) (rand_fields 20) + in + let all = [ zero; generic; add1; add2; vbmul1; vbmul2; vbmul3 ] in + List.iter (add vec) all ; + vec + in + let urs = Pasta_fq_urs.create 16 in + (* TODO(dw) write tests with lookup tables *) + let lookup_tables = [||] in + (* TODO(dw) write tests with runtime tables *) + let runtime_table_cfg = [||] in + let index0 = + create gate_vector 0 lookup_tables runtime_table_cfg 0 urs + in + let index2 = + create gate_vector 2 lookup_tables runtime_table_cfg 0 urs + in + assert (max_degree index0 = 16) ; + assert (max_degree index2 = 16) ; + assert (public_inputs index0 = 0) ; + assert (public_inputs index2 = 2) ; + assert (domain_d1_size index0 = 16) ; + assert (domain_d1_size index2 = 16) ; + assert (domain_d4_size index0 = 64) ; + assert (domain_d4_size index2 = 64) ; + assert (domain_d8_size index0 = 128) ; + assert (domain_d8_size index2 = 128) + end ) + +let eq_verification_shifts ~field_equal l r = Array.for_all2 field_equal l r + +let verification_evals_to_list + { VerifierIndex.sigma_comm : 'PolyComm array + ; coefficients_comm : 'PolyComm array + ; generic_comm : 'PolyComm + ; psm_comm : 'PolyComm + ; complete_add_comm : 'PolyComm + ; mul_comm : 'PolyComm + ; emul_comm : 'PolyComm + ; endomul_scalar_comm : 'PolyComm + ; xor_comm : 'PolyComm option + ; range_check0_comm : 'PolyComm option + ; range_check1_comm : 'PolyComm option + ; foreign_field_add_comm : 'PolyComm option + ; foreign_field_mul_comm : 'PolyComm option + ; rot_comm : 'PolyComm option + } = + let non_opt_comms = + generic_comm :: psm_comm :: complete_add_comm :: mul_comm :: emul_comm + :: endomul_scalar_comm + :: (Array.append sigma_comm coefficients_comm |> Array.to_list) + in + let opt_comms = + [ xor_comm + ; range_check0_comm + ; range_check1_comm + ; foreign_field_add_comm + ; foreign_field_mul_comm + ; rot_comm + ] + in + List.map Option.some non_opt_comms @ opt_comms + +let eq_verifier_index ~field_equal ~other_field_equal + { VerifierIndex.domain = { log_size_of_group = i1_1; group_gen = f1 } + ; max_poly_size = i1_2 + ; srs = _ + ; evals = evals1 + ; shifts = shifts1 + ; lookup_index = _ + ; public = public1 + ; prev_challenges = prev_challenges1 + } + { VerifierIndex.domain = { log_size_of_group = i2_1; group_gen = f2 } + ; max_poly_size = i2_2 + ; srs = _ + ; evals = evals2 + ; shifts = shifts2 + ; lookup_index = _ + ; public = public2 + ; prev_challenges = prev_challenges2 + } = + i1_1 = i2_1 && field_equal f1 f2 && i1_2 = i2_2 + && List.for_all2 + (fun x y -> + match (x, y) with + | Some x, Some y -> + eq_poly_comm ~field_equal:other_field_equal x y + | None, None -> + true + | _, _ -> + false ) + (verification_evals_to_list evals1) + (verification_evals_to_list evals2) + && eq_verification_shifts ~field_equal shifts1 shifts2 + && public1 = public2 + && prev_challenges1 = prev_challenges2 + +let _ = + let open Pasta_fp_verifier_index in + Js.export "pasta_fp_verifier_index_test" + (object%js (_self) + method run = + let gate_vector = + let open Protocol.Gates.Vector.Fp in + let vec = create () in + let fields = Array.map Pasta_fp.of_int in + let zero = mk_wires Zero 0 (0, 0) (0, 1) (0, 2) (fields [||]) in + let generic = + mk_wires Generic 1 (1, 0) (1, 1) (1, 2) + (fields [| 0; 0; 0; 0; 0 |]) + in + let add1 = mk_wires Poseidon 1 (1, 0) (1, 1) (1, 2) (fields [||]) in + let add2 = + mk_wires CompleteAdd 2 (2, 0) (2, 1) (2, 2) (fields [||]) + in + let vbmul1 = + mk_wires VarBaseMul 3 (3, 0) (3, 1) (3, 2) (fields [||]) + in + let vbmul2 = mk_wires EndoMul 4 (4, 0) (4, 1) (4, 2) (fields [||]) in + let vbmul3 = + mk_wires EndoMulScalar 5 (5, 0) (5, 1) (5, 2) (fields [||]) + in + let all = [ zero; generic; add1; add2; vbmul1; vbmul2; vbmul3 ] in + List.iter (add vec) all ; + vec + in + let eq = + eq_verifier_index ~field_equal:Pasta_fp.equal + ~other_field_equal:Pasta_fq.equal + in + let urs = Pasta_fp_urs.create 16 in + (* TODO(dw) write tests with lookup tables *) + let lookup_tables = [||] in + (* TODO(dw) write tests with runtime tables *) + let runtime_table_cfg = [||] in + let index0 = + Pasta_fp_index.create gate_vector 0 lookup_tables runtime_table_cfg 0 + urs + in + let index2 = + Pasta_fp_index.create gate_vector 2 lookup_tables runtime_table_cfg 0 + urs + in + let vindex0_0 = create index0 in + let vindex0_1 = create index0 in + assert (eq vindex0_0 vindex0_1) ; + let vindex2_0 = create index2 in + let vindex2_1 = create index2 in + assert (eq vindex2_0 vindex2_1) ; + let dummy0 = dummy () in + let dummy1 = dummy () in + assert (eq dummy0 dummy1) ; + List.iter + (fun x -> assert (eq (deep_copy x) x)) + [ vindex0_0; vindex2_0; dummy0 ] + end ) + +let _ = + let open Pasta_fq_verifier_index in + Js.export "pasta_fq_verifier_index_test" + (object%js (_self) + method run = + let gate_vector = + let open Protocol.Gates.Vector.Fq in + let vec = create () in + let fields = Array.map Pasta_fq.of_int in + let zero = mk_wires Zero 0 (0, 0) (0, 1) (0, 2) (fields [||]) in + let generic = + mk_wires Generic 1 (1, 0) (1, 1) (1, 2) + (fields [| 0; 0; 0; 0; 0 |]) + in + let add1 = mk_wires Poseidon 1 (1, 0) (1, 1) (1, 2) (fields [||]) in + let add2 = + mk_wires CompleteAdd 2 (2, 0) (2, 1) (2, 2) (fields [||]) + in + let vbmul1 = + mk_wires VarBaseMul 3 (3, 0) (3, 1) (3, 2) (fields [||]) + in + let vbmul2 = mk_wires EndoMul 4 (4, 0) (4, 1) (4, 2) (fields [||]) in + let vbmul3 = + mk_wires EndoMulScalar 5 (5, 0) (5, 1) (5, 2) (fields [||]) + in + let all = [ zero; generic; add1; add2; vbmul1; vbmul2; vbmul3 ] in + List.iter (add vec) all ; + vec + in + let eq = + eq_verifier_index ~field_equal:Pasta_fq.equal + ~other_field_equal:Pasta_fp.equal + in + let urs = Pasta_fq_urs.create 16 in + (* TODO(dw) write tests with lookup tables *) + let lookup_tables = [||] in + (* TODO(dw) write tests with runtime tables *) + let runtime_table_cfg = [||] in + let index0 = + Pasta_fq_index.create gate_vector 0 lookup_tables runtime_table_cfg 0 + urs + in + let index2 = + Pasta_fq_index.create gate_vector 2 lookup_tables runtime_table_cfg 0 + urs + in + let vindex0_0 = create index0 in + let vindex0_1 = create index0 in + assert (eq vindex0_0 vindex0_1) ; + let vindex2_0 = create index2 in + let vindex2_1 = create index2 in + assert (eq vindex2_0 vindex2_1) ; + let dummy0 = dummy () in + let dummy1 = dummy () in + assert (eq dummy0 dummy1) ; + List.iter + (fun x -> assert (eq (deep_copy x) x)) + [ vindex0_0; vindex2_0; dummy0 ] + end ) + +let linkme = () diff --git a/src/lib/crypto/kimchi_bindings/js/test/dune b/src/lib/crypto/kimchi_bindings/js/test/dune new file mode 100644 index 00000000000..4403d1885ad --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/test/dune @@ -0,0 +1,25 @@ +(library + (name bindings_js_test) + (js_of_ocaml (flags +toplevel.js +dynlink.js)) + (libraries + base + bindings_js + core_kernel + digestif.ocaml + digestif + integers_stubs_js + js_of_ocaml + kimchi_bindings + kimchi_types + pasta_bindings + kimchi_backend + kimchi_backend.common + kimchi_pasta + kimchi_backend.pasta.basic + kimchi_pasta_constraint_system + mina_metrics.none + run_in_thread.fake + snarky.backendless + snarky.intf) + (instrumentation (backend bisect_ppx)) + (preprocess (pps ppx_version js_of_ocaml-ppx))) diff --git a/src/lib/crypto/kimchi_bindings/js/test/nodejs/copy_over.sh b/src/lib/crypto/kimchi_bindings/js/test/nodejs/copy_over.sh new file mode 100755 index 00000000000..e51dc000eb7 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/test/nodejs/copy_over.sh @@ -0,0 +1,4 @@ +#!/bin/bash +chmod -R +w _build/default/src/lib/crypto/kimchi_bindings/js/test/nodejs +cp -t _build/default/src/lib/crypto/kimchi_bindings/js/test/nodejs _build/default/src/lib/crypto/kimchi_bindings/js/node_js/plonk_wasm* +nodejs --experimental-wasm-modules --experimental-modules --experimental-wasm-threads -i -r ./_build/default/src/lib/crypto/kimchi_bindings/js/test/nodejs/nodejs_test.bc.js -e "var bindings = require('./_build/default/src/lib/crypto/kimchi_bindings/js/test/nodejs/nodejs_test.bc.js'); console.log('Bindings attached to global variable \\'bindings\\'')" diff --git a/src/lib/crypto/kimchi_bindings/js/test/nodejs/dune b/src/lib/crypto/kimchi_bindings/js/test/nodejs/dune new file mode 100644 index 00000000000..e65a59b351b --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/test/nodejs/dune @@ -0,0 +1,21 @@ +(executable + (name nodejs_test) + (modes js) + (js_of_ocaml + (flags +toplevel.js +dynlink.js)) + (libraries + bindings_js_test + kimchi_bindings + kimchi_types + pasta_bindings + js_of_ocaml + bindings_js + node_backend + logger.fake + pasta_bindings.backend.none) + (link_deps ../../node_js/plonk_wasm.js ../../node_js/plonk_wasm_bg.wasm) + (instrumentation + (backend bisect_ppx)) + (forbidden_libraries core async) + (preprocess + (pps ppx_version js_of_ocaml-ppx))) diff --git a/src/lib/crypto/kimchi_bindings/js/test/nodejs/nodejs_test.ml b/src/lib/crypto/kimchi_bindings/js/test/nodejs/nodejs_test.ml new file mode 100644 index 00000000000..9e2dc1fba1e --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/test/nodejs/nodejs_test.ml @@ -0,0 +1 @@ +let () = Bindings_js_test.linkme diff --git a/src/lib/crypto/kimchi_bindings/js/test/web/copy_over.sh b/src/lib/crypto/kimchi_bindings/js/test/web/copy_over.sh new file mode 100755 index 00000000000..1ee3b111d5b --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/test/web/copy_over.sh @@ -0,0 +1,5 @@ +#!/bin/bash +chmod -R +w _build/default/src/lib/marlin_plonk_bindings/js/test/web +cp -r -t _build/default/src/lib/marlin_plonk_bindings/js/test/web _build/default/src/lib/marlin_plonk_bindings/js/web/plonk_wasm* _build/default/src/lib/marlin_plonk_bindings/js/web/snippets +cp -t _build/default/src/lib/marlin_plonk_bindings/js/test/web src/lib/marlin_plonk_bindings/js/test/web/{server.py,index.html} +(cd _build/default/src/lib/marlin_plonk_bindings/js/test/web/; ./server.py) diff --git a/src/lib/crypto/kimchi_bindings/js/test/web/dune b/src/lib/crypto/kimchi_bindings/js/test/web/dune new file mode 100644 index 00000000000..f57a005f242 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/test/web/dune @@ -0,0 +1,18 @@ +(executable + (name web_test) + (modes js) + (js_of_ocaml + (flags +toplevel.js +dynlink.js)) + (libraries + bindings_js_test + kimchi_bindings + js_of_ocaml + bindings_js + web_backend + logger.fake + pasta_bindings.backend.none) + (link_deps ../../web/plonk_wasm.js ../../web/plonk_wasm_bg.wasm) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_version js_of_ocaml-ppx))) diff --git a/src/lib/crypto/kimchi_bindings/js/test/web/index.html b/src/lib/crypto/kimchi_bindings/js/test/web/index.html new file mode 100644 index 00000000000..7716a125c9e --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/test/web/index.html @@ -0,0 +1,75 @@ + + + + + hello-wasm example + + + + +
+ + + + + diff --git a/src/lib/crypto/kimchi_bindings/js/test/web/server.py b/src/lib/crypto/kimchi_bindings/js/test/web/server.py new file mode 100755 index 00000000000..02902b68651 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/test/web/server.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python3 +from http.server import HTTPServer, SimpleHTTPRequestHandler, test +import sys + +class CORSRequestHandler (SimpleHTTPRequestHandler): + def end_headers (self): + self.send_header('Cross-Origin-Embedder-Policy', 'require-corp') + self.send_header('Cross-Origin-Opener-Policy', 'same-origin') + SimpleHTTPRequestHandler.end_headers(self) + +if __name__ == '__main__': + test(CORSRequestHandler, HTTPServer, port=int(sys.argv[1]) if len(sys.argv) > 1 else 8000) diff --git a/src/lib/crypto/kimchi_bindings/js/test/web/web_test.ml b/src/lib/crypto/kimchi_bindings/js/test/web/web_test.ml new file mode 100644 index 00000000000..9e2dc1fba1e --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/test/web/web_test.ml @@ -0,0 +1 @@ +let () = Bindings_js_test.linkme diff --git a/src/lib/crypto/kimchi_bindings/js/web/build.sh b/src/lib/crypto/kimchi_bindings/js/web/build.sh new file mode 100755 index 00000000000..f99564ef81d --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/web/build.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash +set -euo pipefail + +if [[ -z "${PLONK_WASM_WEB-}" ]]; then + export RUSTFLAGS="-C target-feature=+atomics,+bulk-memory,+mutable-globals -C link-arg=--no-check-features -C link-arg=--max-memory=4294967296" + # The version should stay in line with the one in kimchi_bindings/wasm/rust-toolchain.toml + rustup run nightly-2023-09-01 wasm-pack build --target web --out-dir ../js/web ../../wasm -- -Z build-std=panic_abort,std +else + cp "$PLONK_WASM_WEB"/* -R . +fi diff --git a/src/lib/crypto/kimchi_bindings/js/web/dune b/src/lib/crypto/kimchi_bindings/js/web/dune new file mode 100644 index 00000000000..bcc52fe55d0 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/web/dune @@ -0,0 +1,34 @@ +(library + (name web_backend) + (public_name bindings_js.web_backend) + (js_of_ocaml + (flags + (:include flags.sexp)) + (javascript_files web_backend.js)) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_version js_of_ocaml-ppx))) + +(rule + (targets + plonk_wasm_bg.wasm.d.ts + plonk_wasm_bg.wasm + plonk_wasm.d.ts + plonk_wasm.js + flags.sexp) + (deps + ../../wasm/Cargo.toml + ../../wasm/Cargo.lock + (source_tree ../../wasm/src) + (source_tree ../../wasm/.cargo/config) + (source_tree ../../../proof-systems)) + (locks /cargo-lock) ; lock for rustup + (action + (progn + (run chmod -R +w ../../wasm .) + (setenv + CARGO_TARGET_DIR + "%{read:../../../kimchi_bindings/stubs/dune-build-root}/cargo_kimchi_wasm" + (run ./build.sh)) + (write-file flags.sexp "()")))) diff --git a/src/lib/crypto/kimchi_bindings/js/web/web_backend.js b/src/lib/crypto/kimchi_bindings/js/web/web_backend.js new file mode 100644 index 00000000000..53db813af7f --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/web/web_backend.js @@ -0,0 +1,2 @@ +// Provides: plonk_wasm +var plonk_wasm = globalThis.plonk_wasm; diff --git a/src/lib/crypto/kimchi_bindings/wasm/.cargo/config b/src/lib/crypto/kimchi_bindings/wasm/.cargo/config new file mode 100644 index 00000000000..9600999e0cb --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/.cargo/config @@ -0,0 +1,8 @@ +[build] +target = "wasm32-unknown-unknown" + +[target.wasm32-unknown-unknown] +rustflags = ["-C", "target-feature=+atomics,+bulk-memory", "-C", "link-arg=--no-check-features"] + +[unstable] +build-std = ["panic_abort", "std"] diff --git a/src/lib/crypto/kimchi_bindings/wasm/.gitignore b/src/lib/crypto/kimchi_bindings/wasm/.gitignore new file mode 100644 index 00000000000..eb5a316cbd1 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/.gitignore @@ -0,0 +1 @@ +target diff --git a/src/lib/crypto/kimchi_bindings/wasm/Cargo.lock b/src/lib/crypto/kimchi_bindings/wasm/Cargo.lock new file mode 100644 index 00000000000..4d70407e2e3 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/Cargo.lock @@ -0,0 +1,1475 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom", + "once_cell", + "version_check", +] + +[[package]] +name = "alga" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f823d037a7ec6ea2197046bafd4ae150e6bc36f9ca347404f46a46823fa84f2" +dependencies = [ + "approx", + "num-complex 0.2.4", + "num-traits", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "approx" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0e60b75072ecd4168020818c0107f2857bb6c4e64252d8d3983f6263b40a5c3" +dependencies = [ + "num-traits", +] + +[[package]] +name = "ark-ec" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dea978406c4b1ca13c2db2373b05cc55429c3575b8b21f1b9ee859aa5b03dd42" +dependencies = [ + "ark-ff", + "ark-serialize", + "ark-std", + "derivative", + "num-traits", + "rayon", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" +dependencies = [ + "ark-ff-asm", + "ark-ff-macros", + "ark-serialize", + "ark-std", + "derivative", + "num-bigint", + "num-traits", + "paste", + "rayon", + "rustc_version", + "zeroize", +] + +[[package]] +name = "ark-ff-asm" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" +dependencies = [ + "num-bigint", + "num-traits", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-poly" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0f78f47537c2f15706db7e98fe64cc1711dbf9def81218194e17239e53e5aa" +dependencies = [ + "ark-ff", + "ark-serialize", + "ark-std", + "derivative", + "hashbrown 0.11.2", + "rayon", +] + +[[package]] +name = "ark-serialize" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" +dependencies = [ + "ark-serialize-derive", + "ark-std", + "digest 0.9.0", +] + +[[package]] +name = "ark-serialize-derive" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8dd4e5f0bf8285d5ed538d27fab7411f3e297908fd93c62195de8bee3f199e82" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-std" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" +dependencies = [ + "num-traits", + "rand", + "rayon", +] + +[[package]] +name = "array-init" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d62b7694a562cdf5a74227903507c56ab2cc8bdd1f781ed5cb4cf9c9f810bfc" + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "bcs" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd3ffe8b19a604421a5d461d4a70346223e535903fbc3067138bddbebddcf77" +dependencies = [ + "serde", + "thiserror", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "cc" +version = "1.0.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +dependencies = [ + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "num-traits", + "serde", + "windows-targets", +] + +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if", + "wasm-bindgen", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" + +[[package]] +name = "cpufeatures" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +dependencies = [ + "libc", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +dependencies = [ + "autocfg", + "cfg-if", + "crossbeam-utils", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core 0.13.4", + "darling_macro 0.13.4", +] + +[[package]] +name = "darling" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" +dependencies = [ + "darling_core 0.20.3", + "darling_macro 0.20.3", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 1.0.109", +] + +[[package]] +name = "darling_core" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.20", +] + +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core 0.13.4", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "darling_macro" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" +dependencies = [ + "darling_core 0.20.3", + "quote", + "syn 2.0.20", +] + +[[package]] +name = "deranged" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" +dependencies = [ + "serde", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "disjoint-set" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d102f1a462fdcdddce88d6d46c06c074a2d2749b262230333726b06c52bb7585" + +[[package]] +name = "either" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "groupmap" +version = "0.1.0" +dependencies = [ + "ark-ec", + "ark-ff", + "rand", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "internal-tracing" +version = "0.1.0" + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "js-sys" +version = "0.3.62" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68c16e1bfd491478ab155fd8b4896b86f9ede344949b641e61501e07c2b8b4d5" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "kimchi" +version = "0.1.0" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-poly", + "ark-serialize", + "blake2", + "disjoint-set", + "groupmap", + "hex", + "internal-tracing", + "itertools", + "mina-curves", + "mina-poseidon", + "num-bigint", + "num-derive", + "num-integer", + "num-traits", + "o1-utils", + "once_cell", + "poly-commitment", + "rand", + "rand_core", + "rayon", + "rmp-serde", + "serde", + "serde_with 1.14.0", + "strum", + "strum_macros", + "thiserror", + "turshi", + "wasm-bindgen", +] + +[[package]] +name = "libc" +version = "0.2.148" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" + +[[package]] +name = "libm" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" + +[[package]] +name = "log" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "matrixmultiply" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7574c1cf36da4798ab73da5b215bbf444f50718207754cb522201d78d1cd0ff2" +dependencies = [ + "autocfg", + "rawpointer", +] + +[[package]] +name = "memchr" +version = "2.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" + +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mina-curves" +version = "0.1.0" +dependencies = [ + "ark-ec", + "ark-ff", +] + +[[package]] +name = "mina-poseidon" +version = "0.1.0" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-poly", + "mina-curves", + "o1-utils", + "once_cell", + "rand", + "rayon", + "serde", + "serde_with 1.14.0", +] + +[[package]] +name = "ndarray" +version = "0.15.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb12d4e967ec485a5f71c6311fe28158e9d6f4bc4a447b474184d0f91a8fa32" +dependencies = [ + "matrixmultiply", + "num-complex 0.4.4", + "num-integer", + "num-traits", + "rawpointer", +] + +[[package]] +name = "num-bigint" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", + "rand", + "serde", +] + +[[package]] +name = "num-complex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6b19411a9719e753aff12e5187b74d60d3dc449ec3f4dc21e3989c3f554bc95" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-complex" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ba157ca0885411de85d6ca030ba7e2a83a28636056c7c699b07c8b6f7383214" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-derive" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "o1-utils" +version = "0.1.0" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-poly", + "ark-serialize", + "bcs", + "hex", + "num-bigint", + "num-integer", + "num-traits", + "rand", + "rand_core", + "rayon", + "serde", + "serde_with 1.14.0", + "sha2", + "thiserror", +] + +[[package]] +name = "once_cell" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + +[[package]] +name = "pest" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c022f1e7b65d6a24c0dbbd5fb344c66881bc01f3e5ae74a1c8100f2f985d98a4" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + +[[package]] +name = "plonk_wasm" +version = "0.1.0" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-poly", + "ark-serialize", + "array-init", + "base64", + "console_error_panic_hook", + "getrandom", + "groupmap", + "js-sys", + "kimchi", + "libc", + "mina-curves", + "mina-poseidon", + "num-bigint", + "o1-utils", + "once_cell", + "paste", + "poly-commitment", + "proc-macro2", + "rand", + "rayon", + "rmp-serde", + "serde", + "serde-wasm-bindgen", + "serde_json", + "serde_with 2.3.3", + "spmc", + "sprs", + "wasm-bindgen", + "wasm-bindgen-test", + "web-sys", +] + +[[package]] +name = "poly-commitment" +version = "0.1.0" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-poly", + "ark-serialize", + "blake2", + "groupmap", + "itertools", + "mina-curves", + "mina-poseidon", + "o1-utils", + "once_cell", + "rand", + "rand_core", + "rayon", + "rmp-serde", + "serde", + "serde_with 1.14.0", + "thiserror", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "proc-macro2" +version = "1.0.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rawpointer" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" + +[[package]] +name = "rayon" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "rmp" +version = "0.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f9860a6cc38ed1da53456442089b4dfa35e7cedaa326df63017af88385e6b20" +dependencies = [ + "byteorder", + "num-traits", + "paste", +] + +[[package]] +name = "rmp-serde" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bffea85eea980d8a74453e5d02a8d93028f3c34725de143085a844ebe953258a" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + +[[package]] +name = "rustc_version" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" +dependencies = [ + "semver", +] + +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "semver" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +dependencies = [ + "semver-parser", +] + +[[package]] +name = "semver-parser" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +dependencies = [ + "pest", +] + +[[package]] +name = "serde" +version = "1.0.164" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-wasm-bindgen" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b4c031cd0d9014307d82b8abf653c0290fbdaeb4c02d00c63cf52f728628bf" +dependencies = [ + "js-sys", + "serde", + "wasm-bindgen", +] + +[[package]] +name = "serde_derive" +version = "1.0.164" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.20", +] + +[[package]] +name = "serde_json" +version = "1.0.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +dependencies = [ + "serde", + "serde_with_macros 1.5.2", +] + +[[package]] +name = "serde_with" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07ff71d2c147a7b57362cead5e22f772cd52f6ab31cfcd9edcd7f6aeb2a0afbe" +dependencies = [ + "base64", + "chrono", + "hex", + "indexmap", + "serde", + "serde_json", + "serde_with_macros 2.3.3", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +dependencies = [ + "darling 0.13.4", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "serde_with_macros" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" +dependencies = [ + "darling 0.20.3", + "proc-macro2", + "quote", + "syn 2.0.20", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "smallvec" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" + +[[package]] +name = "spmc" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02a8428da277a8e3a15271d79943e80ccc2ef254e78813a166a08d65e4c3ece5" + +[[package]] +name = "sprs" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88bab60b0a18fb9b3e0c26e92796b3c3a278bf5fa4880f5ad5cc3bdfb843d0b1" +dependencies = [ + "alga", + "ndarray", + "num-complex 0.4.4", + "num-traits", + "num_cpus", + "rayon", + "smallvec", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 1.0.109", +] + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb8d4cebc40aa517dfb69618fa647a346562e67228e2236ae0042ee6ac14775" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "thiserror" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.20", +] + +[[package]] +name = "time" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a79d09ac6b08c1ab3906a2f7cc2e81a0e27c7ae89c63812df75e52bef0751e07" +dependencies = [ + "deranged", + "itoa", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" + +[[package]] +name = "time-macros" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75c65469ed6b3a4809d987a41eb1dc918e9bc1d92211cbad7ae82931846f7451" +dependencies = [ + "time-core", +] + +[[package]] +name = "turshi" +version = "0.1.0" +dependencies = [ + "ark-ff", + "hex", + "o1-utils", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b6cb788c4e39112fbe1822277ef6fb3c55cd86b95cb3d3c4c1c9597e4ac74b4" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35e522ed4105a9d626d885b35d62501b30d9666283a5c8be12c14a8bdafe7822" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.20", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "083abe15c5d88556b77bdf7aef403625be9e327ad37c62c4e4129af740168163" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "358a79a0cb89d21db8120cbfb91392335913e4890665b1a7981d9e956903b434" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4783ce29f09b9d93134d41297aded3a712b7b979e9c6f28c32cb88c973a94869" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.20", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a901d592cafaa4d711bc324edfaff879ac700b19c3dfd60058d2b445be2691eb" + +[[package]] +name = "wasm-bindgen-test" +version = "0.3.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b27e15b4a3030b9944370ba1d8cec6f21f66a1ad4fd14725c5685600460713ec" +dependencies = [ + "console_error_panic_hook", + "js-sys", + "scoped-tls", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-bindgen-test-macro", +] + +[[package]] +name = "wasm-bindgen-test-macro" +version = "0.3.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dbaa9b9a574eac00c4f3a9c4941ac051f07632ecd0484a8588abd95af6b99d2" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "web-sys" +version = "0.3.62" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b5f940c7edfdc6d12126d98c9ef4d1b3d470011c47c76a6581df47ad9ba721" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "windows" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "zeroize" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.20", +] diff --git a/src/lib/crypto/kimchi_bindings/wasm/Cargo.toml b/src/lib/crypto/kimchi_bindings/wasm/Cargo.toml new file mode 100644 index 00000000000..65cde660060 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/Cargo.toml @@ -0,0 +1,77 @@ +[package] +name = "plonk_wasm" +version = "0.1.0" +authors = ["opensource@o1labs.org"] +description = "WASM stubs for plonk proof systems" +repository = "https://github.com/MinaProtocol/mina" +license = "MIT/Apache-2.0" +edition = "2018" + +[lib] +name = "plonk_wasm" +crate-type = ["cdylib"] + +################################# Dependencies ################################ + +[dependencies] +# Strictly enforcing 0.2.84. It seems that 0.2.87 generates unwanted behavior. +# See https://github.com/MinaProtocol/mina/issues/14253 +wasm-bindgen = { version = "=0.2.85" } +console_error_panic_hook = { version = "0.1.6" } +web-sys = { version = "0.3.35", features = ["Window", "Document", "HtmlElement", "Text", "Node", "Element" ] } + +once_cell = "1.10.0" +libc = { version = "0.2.0" } + +# arkworks +ark-ff = { version = "0.3.0", features = ["parallel"] } +ark-serialize = "0.3.0" +ark-ec = { version = "0.3.0", features = ["parallel"] } +ark-poly = { version = "0.3.0", features = ["parallel"] } + +# proof-systems +poly-commitment = { path = "../../proof-systems/poly-commitment" } +groupmap = { path = "../../proof-systems/groupmap" } +mina-curves = { path = "../../proof-systems/curves" } +o1-utils = { path = "../../proof-systems/utils" } +mina-poseidon = { path = "../../proof-systems/poseidon" } +kimchi = { path = "../../proof-systems/kimchi", features = ["wasm_types"] } + +array-init = "2.0.0" +base64 = "0.13.0" +getrandom = { version = "0.2", features = ["js"] } +num-bigint = { version = "0.4.0" } +paste = "1.0.5" +rand = { version = "0.8.0" } +rayon = { version = "1" } +rmp-serde = "1.0.0" +sprs = { version = "0.11.0" } +spmc = {version = "0.3.0"} +serde = "1.0.130" +serde_json = "1.0" +serde_with = "2.0.1" +serde-wasm-bindgen = "0.4" +js-sys = "0.3" +# Version for proc-macro2 needs to be 1.0.60+ to be compatible with newer rust versions +# https://github.com/rust-lang/rust/issues/113152 +proc-macro2 = { version = "=1.0.60", features=["default", "proc-macro"] } + +[dev-dependencies] +wasm-bindgen-test = "0.3.0" + +[profile.release] +debug = true + +[features] +nodejs = [] + +[package.metadata.wasm-pack.profile.release] +wasm-opt = false +#wasm-opt = ["-O4", "--detect-features", "--enable-mutable-globals" ] +#wasm-opt = ["-O4", "--enable-mutable-globals"] + +[build] +rustflags = ["-C", "target-feature=+atomics,+bulk-memory", "-C", "link-arg=--no-check-features"] + +[target.wasm32-unknown-unknown] +rustflags = ["-C", "target-feature=+atomics,+bulk-memory", "-C", "link-arg=--no-check-features"] diff --git a/src/lib/crypto/kimchi_bindings/wasm/README.md b/src/lib/crypto/kimchi_bindings/wasm/README.md new file mode 100644 index 00000000000..4cf6353e13f --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/README.md @@ -0,0 +1,34 @@ +# Kimchi WASM + +This code allows us to compile parts of Kimchi into [Web Assembly (WASM)](https://webassembly.org/). + +## Requirements + +For this to work, you will need to install the following dependencies: + +- [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/) +- [wasm-bindgen-cli](https://rustwasm.github.io/docs/wasm-bindgen/reference/cli.html) (optional) + +## Usage + +To build for nodejs: + +```console +$ wasm-pack build --mode no-install --target nodejs --out-dir ./nodejs ./. -- --features nodejs +``` + +To build for web browsers: + +```console +$ wasm-pack build --mode no-install --target web --out-dir ./web ./. +``` + +Note that optimized versions of these commands are available in: + +- [/src/lib/crypto/kimchi_bindings/js/node_js/build.sh](/src/lib/crypto/kimchi_bindings/js/node_js/build.sh) (also called from the `dune` file in the same folder) +- [/src/lib/crypto/kimchi_bindings/js/web/build.sh](/src/lib/crypto/kimchi_bindings/js/web/build.sh) (also called from the `dune` file in the same folder) + +## Resources + +- [Rust WASM book](https://rustwasm.github.io/docs/book/game-of-life/hello-world.html) +- [WASM-bindgen book](https://rustwasm.github.io/docs/wasm-bindgen/) diff --git a/src/lib/crypto/kimchi_bindings/wasm/dune b/src/lib/crypto/kimchi_bindings/wasm/dune new file mode 100644 index 00000000000..1e073fbe82f --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/dune @@ -0,0 +1,3 @@ +(data_only_dirs src) + +(dirs :standard \ target) diff --git a/src/lib/crypto/kimchi_bindings/wasm/rust-toolchain.toml b/src/lib/crypto/kimchi_bindings/wasm/rust-toolchain.toml new file mode 100644 index 00000000000..b04d9639e65 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/rust-toolchain.toml @@ -0,0 +1,6 @@ +# A nightly is necessary to enable the use of #![feature] +[toolchain] +# This should stay in line with the versions in +# - kimchi_bindings/js/node_js/build.sh +# - kimchi_bindings/js/web/build.sh +channel = "nightly-2023-09-01" # roughly matches 1.72 diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/bigint_256.rs b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/bigint_256.rs new file mode 100644 index 00000000000..6c94d5b500d --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/bigint_256.rs @@ -0,0 +1,139 @@ +use ark_ff::{BigInteger as ark_BigInteger, BigInteger256, FromBytes, ToBytes}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use num_bigint::BigUint; +use std::cmp::Ordering::{Equal, Greater, Less}; +use std::convert::TryInto; +use wasm_bindgen::convert::{FromWasmAbi, IntoWasmAbi}; +use wasm_bindgen::prelude::*; + +// +// Handy constants +// + +const BIGINT256_NUM_BITS: i32 = 256; +const BIGINT256_LIMB_BITS: i32 = 64; +const BIGINT256_LIMB_BYTES: i32 = BIGINT256_LIMB_BITS / 8; +const BIGINT256_NUM_LIMBS: i32 = + (BIGINT256_NUM_BITS + BIGINT256_LIMB_BITS - 1) / BIGINT256_LIMB_BITS; +const BIGINT256_NUM_BYTES: usize = (BIGINT256_NUM_LIMBS as usize) * 8; + +pub struct WasmBigInteger256(pub BigInteger256); + +impl wasm_bindgen::describe::WasmDescribe for WasmBigInteger256 { + fn describe() { + as wasm_bindgen::describe::WasmDescribe>::describe() + } +} + +impl FromWasmAbi for WasmBigInteger256 { + type Abi = as FromWasmAbi>::Abi; + unsafe fn from_abi(js: Self::Abi) -> Self { + let bytes: Vec = FromWasmAbi::from_abi(js); + WasmBigInteger256(BigInteger256(FromBytes::read(bytes.as_slice()).unwrap())) + } +} + +impl IntoWasmAbi for WasmBigInteger256 { + type Abi = as FromWasmAbi>::Abi; + fn into_abi(self) -> Self::Abi { + let mut bytes: Vec = vec![]; + self.0.write(&mut bytes).unwrap(); + bytes.into_abi() + } +} + +pub fn to_biguint(x: &BigInteger256) -> BigUint { + let x_ = x.0.as_ptr() as *const u8; + let x_ = unsafe { std::slice::from_raw_parts(x_, BIGINT256_NUM_BYTES) }; + num_bigint::BigUint::from_bytes_le(x_) +} + +pub fn of_biguint(x: &BigUint) -> BigInteger256 { + let mut bytes = x.to_bytes_le(); + bytes.resize(BIGINT256_NUM_BYTES, 0); + let limbs = bytes.as_ptr(); + let limbs = limbs as *const [u64; BIGINT256_NUM_LIMBS as usize]; + let limbs = unsafe { &(*limbs) }; + BigInteger256(*limbs) +} + +#[wasm_bindgen] +pub fn caml_bigint_256_of_numeral(s: String, _len: u32, base: u32) -> WasmBigInteger256 { + match BigUint::parse_bytes(&s.into_bytes(), base) { + Some(data) => WasmBigInteger256(of_biguint(&data)), + None => panic!("caml_bigint_256_of_numeral"), + } +} + +#[wasm_bindgen] +pub fn caml_bigint_256_of_decimal_string(s: String) -> WasmBigInteger256 { + match BigUint::parse_bytes(&s.into_bytes(), 10) { + Some(data) => WasmBigInteger256(of_biguint(&data)), + None => panic!("caml_bigint_256_of_decimal_string"), + } +} + +#[wasm_bindgen] +pub fn caml_bigint_256_num_limbs() -> i32 { + BIGINT256_NUM_LIMBS +} + +#[wasm_bindgen] +pub fn caml_bigint_256_bytes_per_limb() -> i32 { + BIGINT256_LIMB_BYTES +} + +#[wasm_bindgen] +pub fn caml_bigint_256_div(x: WasmBigInteger256, y: WasmBigInteger256) -> WasmBigInteger256 { + let res: BigUint = to_biguint(&x.0) / to_biguint(&y.0); + WasmBigInteger256(of_biguint(&res)) +} + +#[wasm_bindgen] +pub fn caml_bigint_256_compare(x: WasmBigInteger256, y: WasmBigInteger256) -> i8 { + match x.0.cmp(&y.0) { + Less => -1, + Equal => 0, + Greater => 1, + } +} + +#[wasm_bindgen] +pub fn caml_bigint_256_print(x: WasmBigInteger256) { + println!("{}", to_biguint(&x.0)); +} + +#[wasm_bindgen] +pub fn caml_bigint_256_to_string(x: WasmBigInteger256) -> String { + to_biguint(&x.0).to_string() +} + +#[wasm_bindgen] +pub fn caml_bigint_256_test_bit(x: WasmBigInteger256, i: i32) -> bool { + match i.try_into() { + Ok(i) => x.0.get_bit(i), + Err(_) => panic!("caml_bigint_256_test_bit"), + } +} + +#[wasm_bindgen] +pub fn caml_bigint_256_to_bytes(x: WasmBigInteger256) -> Vec { + let mut serialized_bytes = vec![]; + x.0.serialize(&mut serialized_bytes) + .expect("serialize failed"); + serialized_bytes +} + +#[wasm_bindgen] +pub fn caml_bigint_256_of_bytes(x: &[u8]) -> WasmBigInteger256 { + let len = std::mem::size_of::(); + if x.len() != len { + panic!("caml_bigint_256_of_bytes"); + }; + WasmBigInteger256(BigInteger256::deserialize(&mut &x[..]).expect("deserialization error")) +} + +#[wasm_bindgen] +pub fn caml_bigint_256_deep_copy(x: WasmBigInteger256) -> WasmBigInteger256 { + x +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/group_affine.rs b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/group_affine.rs new file mode 100644 index 00000000000..0d6dc1cfcca --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/group_affine.rs @@ -0,0 +1,204 @@ +use crate::arkworks::pasta_fp::WasmPastaFp; +use crate::arkworks::pasta_fq::WasmPastaFq; +use mina_curves::pasta::{ + curves::{ + pallas::G_GENERATOR_X as GeneratorPallasX, pallas::G_GENERATOR_Y as GeneratorPallasY, + vesta::G_GENERATOR_X as GeneratorVestaX, vesta::G_GENERATOR_Y as GeneratorVestaY, + }, + Pallas as AffinePallas, Vesta as AffineVesta, +}; +use wasm_bindgen::prelude::*; + +// +// handy types +// + +#[wasm_bindgen] +#[derive(Clone, Copy, Debug)] +pub struct WasmGPallas { + pub x: WasmPastaFp, + pub y: WasmPastaFp, + pub infinity: bool, +} + +#[wasm_bindgen] +#[derive(Clone, Copy, Debug)] +pub struct WasmGVesta { + pub x: WasmPastaFq, + pub y: WasmPastaFq, + pub infinity: bool, +} + +// Conversions from/to AffineVesta + +impl From for WasmGVesta { + fn from(point: AffineVesta) -> Self { + WasmGVesta { + x: point.x.into(), + y: point.y.into(), + infinity: point.infinity, + } + } +} + +impl From<&AffineVesta> for WasmGVesta { + fn from(point: &AffineVesta) -> Self { + WasmGVesta { + x: point.x.into(), + y: point.y.into(), + infinity: point.infinity, + } + } +} + +impl From for AffineVesta { + fn from(point: WasmGVesta) -> Self { + AffineVesta::new(point.x.into(), point.y.into(), point.infinity) + } +} + +impl From<&WasmGVesta> for AffineVesta { + fn from(point: &WasmGVesta) -> Self { + AffineVesta::new(point.x.into(), point.y.into(), point.infinity) + } +} + +// Conversion from/to AffinePallas + +impl From for WasmGPallas { + fn from(point: AffinePallas) -> Self { + WasmGPallas { + x: point.x.into(), + y: point.y.into(), + infinity: point.infinity, + } + } +} + +impl From<&AffinePallas> for WasmGPallas { + fn from(point: &AffinePallas) -> Self { + WasmGPallas { + x: point.x.into(), + y: point.y.into(), + infinity: point.infinity, + } + } +} + +impl From for AffinePallas { + fn from(point: WasmGPallas) -> Self { + AffinePallas::new(point.x.into(), point.y.into(), point.infinity) + } +} + +impl From<&WasmGPallas> for AffinePallas { + fn from(point: &WasmGPallas) -> Self { + AffinePallas::new(point.x.into(), point.y.into(), point.infinity) + } +} + +#[wasm_bindgen] +pub fn caml_pallas_affine_one() -> WasmGPallas { + WasmGPallas { + x: WasmPastaFp::from(GeneratorPallasX), + y: WasmPastaFp::from(GeneratorPallasY), + infinity: false, + } +} + +#[wasm_bindgen] +pub fn caml_vesta_affine_one() -> WasmGVesta { + WasmGVesta { + x: WasmPastaFq::from(GeneratorVestaX), + y: WasmPastaFq::from(GeneratorVestaY), + infinity: false, + } +} + +/* +#[wasm_bindgen] +pub fn caml_pasta_pallas_one() -> WasmPallasGProjective { + ProjectivePallas::prime_subgroup_generator().into() +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_add( + x: &WasmPallasGProjective, + y: &WasmPallasGProjective, +) -> WasmPallasGProjective { + (**x + **y).into() +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_sub( + x: &WasmPallasGProjective, + y: &WasmPallasGProjective, +) -> WasmPallasGProjective { + (**x - **y).into() +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_negate(x: &WasmPallasGProjective) -> WasmPallasGProjective { + (-(**x)).into() +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_double(x: &WasmPallasGProjective) -> WasmPallasGProjective { + (x.double()).into() +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_scale(x: &WasmPallasGProjective, y: WasmPastaFq) -> WasmPallasGProjective { + (x.mul(y.0)).into() +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_random() -> WasmPallasGProjective { + let rng = &mut rand_core::OsRng; + WasmPallasGProjective(UniformRand::rand(rng)) +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_rng(i: u32) -> WasmPallasGProjective { + let i: u64 = i.into(); + let mut rng: StdRng = rand::SeedableRng::seed_from_u64(i); + WasmPallasGProjective(UniformRand::rand(&mut rng)) +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_endo_base() -> WasmPastaFp { + let (endo_q, _endo_r) = poly_commitment::srs::endos::(); + WasmPastaFp(endo_q) +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_endo_scalar() -> WasmPastaFq { + let (_endo_q, endo_r) = poly_commitment::srs::endos::(); + WasmPastaFq(endo_r) +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_to_affine(x: &WasmPallasGProjective) -> WasmPallasGAffine { + Into::<&GProjective>::into(x).into_affine().into() +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_of_affine(x: &WasmPallasGAffine) -> WasmPallasGProjective { + Into::::into(x).into_projective().into() +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_of_affine_coordinates(x: WasmPastaFp, y: WasmPastaFp) -> WasmPallasGProjective { + GProjective::new(x.0, y.0, Fp::one()).into() +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_affine_deep_copy(x: &WasmPallasGAffine) -> WasmPallasGAffine { + x.clone() +} + +#[wasm_bindgen] +pub fn caml_pasta_pallas_affine_one() -> WasmPallasGAffine { + GAffine::prime_subgroup_generator().into() +} +*/ diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/group_projective.rs b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/group_projective.rs new file mode 100644 index 00000000000..0af3d3c80ae --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/group_projective.rs @@ -0,0 +1,192 @@ +use std::ops::{Add, Deref, Neg, Sub}; +use wasm_bindgen::prelude::*; + +use mina_curves::pasta::{ProjectivePallas, ProjectiveVesta}; + +// Pallas +#[wasm_bindgen] +#[derive(Clone, Copy)] +pub struct WasmPallasGProjective(ProjectivePallas); + +impl AsRef for WasmPallasGProjective { + fn as_ref(&self) -> &WasmPallasGProjective { + self + } +} + +impl Deref for WasmPallasGProjective { + type Target = ProjectivePallas; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +// Handy implementations + +impl From for WasmPallasGProjective { + fn from(x: ProjectivePallas) -> Self { + WasmPallasGProjective(x) + } +} + +impl From<&ProjectivePallas> for WasmPallasGProjective { + fn from(x: &ProjectivePallas) -> Self { + WasmPallasGProjective(*x) + } +} + +impl From for ProjectivePallas { + fn from(x: WasmPallasGProjective) -> Self { + x.0 + } +} + +impl From<&WasmPallasGProjective> for ProjectivePallas { + fn from(x: &WasmPallasGProjective) -> Self { + x.0 + } +} + +impl Add for WasmPallasGProjective { + type Output = Self; + + fn add(self, other: Self) -> Self { + Self(self.0 + other.0) + } +} + +impl Add for &WasmPallasGProjective { + type Output = WasmPallasGProjective; + + fn add(self, other: Self) -> Self::Output { + WasmPallasGProjective(self.0 + other.0) + } +} + +impl Sub for WasmPallasGProjective { + type Output = WasmPallasGProjective; + + fn sub(self, other: Self) -> Self::Output { + WasmPallasGProjective(self.0 - other.0) + } +} + +impl Sub for &WasmPallasGProjective { + type Output = WasmPallasGProjective; + + fn sub(self, other: Self) -> Self::Output { + WasmPallasGProjective(self.0 - other.0) + } +} + +impl Neg for WasmPallasGProjective { + type Output = WasmPallasGProjective; + + fn neg(self) -> Self::Output { + WasmPallasGProjective(-self.0) + } +} + +impl Neg for &WasmPallasGProjective { + type Output = WasmPallasGProjective; + + fn neg(self) -> Self::Output { + WasmPallasGProjective(-self.0) + } +} + +// Vesta + +#[wasm_bindgen] +#[derive(Clone, Copy)] +pub struct WasmVestaGProjective(ProjectiveVesta); + +impl AsRef for WasmVestaGProjective { + fn as_ref(&self) -> &WasmVestaGProjective { + self + } +} + +impl Deref for WasmVestaGProjective { + type Target = ProjectiveVesta; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +// +// Handy implementations +// + +impl From for WasmVestaGProjective { + fn from(x: ProjectiveVesta) -> Self { + WasmVestaGProjective(x) + } +} + +impl From<&ProjectiveVesta> for WasmVestaGProjective { + fn from(x: &ProjectiveVesta) -> Self { + WasmVestaGProjective(*x) + } +} + +impl From for ProjectiveVesta { + fn from(x: WasmVestaGProjective) -> Self { + x.0 + } +} + +impl From<&WasmVestaGProjective> for ProjectiveVesta { + fn from(x: &WasmVestaGProjective) -> Self { + x.0 + } +} + +impl Add for WasmVestaGProjective { + type Output = Self; + + fn add(self, other: Self) -> Self { + Self(self.0 + other.0) + } +} +impl Add for &WasmVestaGProjective { + type Output = WasmVestaGProjective; + + fn add(self, other: Self) -> Self::Output { + WasmVestaGProjective(self.0 + other.0) + } +} + +impl Sub for WasmVestaGProjective { + type Output = WasmVestaGProjective; + + fn sub(self, other: Self) -> Self::Output { + WasmVestaGProjective(self.0 - other.0) + } +} + +impl Sub for &WasmVestaGProjective { + type Output = WasmVestaGProjective; + + fn sub(self, other: Self) -> Self::Output { + WasmVestaGProjective(self.0 - other.0) + } +} + +impl Neg for WasmVestaGProjective { + type Output = WasmVestaGProjective; + + fn neg(self) -> Self::Output { + WasmVestaGProjective(-self.0) + } +} + +impl Neg for &WasmVestaGProjective { + type Output = WasmVestaGProjective; + + fn neg(self) -> Self::Output { + WasmVestaGProjective(-self.0) + } +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/mod.rs b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/mod.rs new file mode 100644 index 00000000000..b1d5af216c6 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/mod.rs @@ -0,0 +1,32 @@ +//! This module contains wrapper types to Arkworks types. +//! To use Arkwork types in OCaml, you have to convert to these types, +//! and convert back from them to use them in Rust. +//! +//! For example: +//! +//! ``` +//! use marlin_plonk_bindings::arkworks::CamlBiginteger256; +//! use ark_ff::BigInteger256; +//! +//! #[ocaml::func] +//! pub fn caml_add(x: CamlBigInteger256, y: CamlBigInteger256) -> CamlBigInteger256 { +//! let x: BigInteger256 = x.into(); +//! let y: BigInteger256 = y.into(); +//! (x + y).into() +//! } +//! ``` +//! + +pub mod bigint_256; +pub mod group_affine; +pub mod group_projective; +pub mod pasta_fp; +pub mod pasta_fq; + +// re-export what's important + +pub use bigint_256::WasmBigInteger256; +pub use group_affine::{WasmGPallas, WasmGVesta}; +pub use group_projective::{WasmPallasGProjective, WasmVestaGProjective}; +pub use pasta_fp::WasmPastaFp; +pub use pasta_fq::WasmPastaFq; diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/pasta_fp.rs b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/pasta_fp.rs new file mode 100644 index 00000000000..06914e15f8b --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/pasta_fp.rs @@ -0,0 +1,244 @@ +use crate::arkworks::bigint_256::{self, WasmBigInteger256}; +use ark_ff::{ + fields::{Field, FpParameters, PrimeField, SquareRootField}, + FftField, One, UniformRand, Zero, +}; +use ark_ff::{FromBytes, ToBytes}; +use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain}; +use mina_curves::pasta::{fields::fp::FpParameters as Fp_params, Fp}; +use num_bigint::BigUint; +use rand::rngs::StdRng; +use std::cmp::Ordering::{Equal, Greater, Less}; +use wasm_bindgen::convert::{FromWasmAbi, IntoWasmAbi, OptionFromWasmAbi, OptionIntoWasmAbi}; +use wasm_bindgen::prelude::*; + +#[repr(C)] +#[derive(Clone, Copy, Debug)] +pub struct WasmPastaFp(pub Fp); + +impl crate::wasm_flat_vector::FlatVectorElem for WasmPastaFp { + const FLATTENED_SIZE: usize = std::mem::size_of::(); + fn flatten(self) -> Vec { + let mut bytes: Vec = Vec::with_capacity(Self::FLATTENED_SIZE); + self.0.write(&mut bytes).unwrap(); + bytes + } + fn unflatten(flat: Vec) -> Self { + WasmPastaFp(FromBytes::read(flat.as_slice()).unwrap()) + } +} + +impl From for WasmPastaFp { + fn from(x: Fp) -> Self { + WasmPastaFp(x) + } +} + +impl From for Fp { + fn from(x: WasmPastaFp) -> Self { + x.0 + } +} + +impl<'a> From<&'a WasmPastaFp> for &'a Fp { + fn from(x: &'a WasmPastaFp) -> Self { + &x.0 + } +} + +impl wasm_bindgen::describe::WasmDescribe for WasmPastaFp { + fn describe() { + as wasm_bindgen::describe::WasmDescribe>::describe() + } +} + +impl FromWasmAbi for WasmPastaFp { + type Abi = as FromWasmAbi>::Abi; + unsafe fn from_abi(js: Self::Abi) -> Self { + let bytes: Vec = FromWasmAbi::from_abi(js); + WasmPastaFp(FromBytes::read(bytes.as_slice()).unwrap()) + } +} + +impl IntoWasmAbi for WasmPastaFp { + type Abi = as FromWasmAbi>::Abi; + fn into_abi(self) -> Self::Abi { + let mut bytes: Vec = vec![]; + self.0.write(&mut bytes).unwrap(); + bytes.into_abi() + } +} + +impl OptionIntoWasmAbi for WasmPastaFp { + fn none() -> Self::Abi { + as OptionIntoWasmAbi>::none() + } +} + +impl OptionFromWasmAbi for WasmPastaFp { + fn is_none(abi: &Self::Abi) -> bool { + as OptionFromWasmAbi>::is_none(abi) + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_size_in_bits() -> isize { + Fp_params::MODULUS_BITS as isize +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_size() -> WasmBigInteger256 { + WasmBigInteger256(Fp_params::MODULUS) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_add(x: WasmPastaFp, y: WasmPastaFp) -> WasmPastaFp { + WasmPastaFp(x.0 + y.0) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_sub(x: WasmPastaFp, y: WasmPastaFp) -> WasmPastaFp { + WasmPastaFp(x.0 - y.0) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_negate(x: WasmPastaFp) -> WasmPastaFp { + WasmPastaFp(-x.0) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_mul(x: WasmPastaFp, y: WasmPastaFp) -> WasmPastaFp { + WasmPastaFp(x.0 * y.0) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_div(x: WasmPastaFp, y: WasmPastaFp) -> WasmPastaFp { + WasmPastaFp(x.0 / y.0) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_inv(x: WasmPastaFp) -> Option { + x.0.inverse().map(WasmPastaFp) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_square(x: WasmPastaFp) -> WasmPastaFp { + WasmPastaFp(x.0.square()) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_is_square(x: WasmPastaFp) -> bool { + let s = x.0.pow(Fp_params::MODULUS_MINUS_ONE_DIV_TWO); + s.is_zero() || s.is_one() +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_sqrt(x: WasmPastaFp) -> Option { + x.0.sqrt().map(WasmPastaFp) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_of_int(i: i32) -> WasmPastaFp { + WasmPastaFp(Fp::from(i as u64)) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_to_string(x: WasmPastaFp) -> String { + bigint_256::to_biguint(&x.0.into_repr()).to_string() +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_of_string(s: String) -> Result { + let biguint = BigUint::parse_bytes(s.as_bytes(), 10) + .ok_or(JsValue::from_str("caml_pasta_fp_of_string"))?; + + match Fp::from_repr(bigint_256::of_biguint(&biguint)) { + Some(x) => Ok(x.into()), + None => Err(JsValue::from_str("caml_pasta_fp_of_string")), + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_print(x: WasmPastaFp) { + println!("{}", bigint_256::to_biguint(&(x.0.into_repr()))); +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_compare(x: WasmPastaFp, y: WasmPastaFp) -> i32 { + match x.0.cmp(&y.0) { + Less => -1, + Equal => 0, + Greater => 1, + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_equal(x: WasmPastaFp, y: WasmPastaFp) -> bool { + x.0 == y.0 +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_random() -> WasmPastaFp { + WasmPastaFp(UniformRand::rand(&mut rand::thread_rng())) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_rng(i: i32) -> WasmPastaFp { + // We only care about entropy here, so we force a conversion i32 -> u32. + let i: u64 = (i as u32).into(); + let mut rng: StdRng = rand::SeedableRng::seed_from_u64(i); + WasmPastaFp(UniformRand::rand(&mut rng)) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_to_bigint(x: WasmPastaFp) -> WasmBigInteger256 { + WasmBigInteger256(x.0.into_repr()) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_of_bigint(x: WasmBigInteger256) -> Result { + match Fp::from_repr(x.0) { + Some(x) => Ok(x.into()), + None => Err(JsValue::from_str("caml_pasta_fp_of_bigint")), + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_two_adic_root_of_unity() -> WasmPastaFp { + WasmPastaFp(FftField::two_adic_root_of_unity()) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_domain_generator(log2_size: i32) -> WasmPastaFp { + match Domain::new(1 << log2_size) { + Some(x) => WasmPastaFp(x.group_gen), + None => panic!("caml_pasta_fp_domain_generator"), + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_to_bytes(x: WasmPastaFp) -> Vec { + let len = std::mem::size_of::(); + let mut str: Vec = Vec::with_capacity(len); + str.resize(len, 0); + let str_as_fp: *mut Fp = str.as_mut_ptr().cast::(); + unsafe { + *str_as_fp = x.0; + } + str +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_of_bytes(x: &[u8]) -> WasmPastaFp { + let len = std::mem::size_of::(); + if x.len() != len { + panic!("caml_pasta_fp_of_bytes"); + }; + let x = unsafe { *(x.as_ptr() as *const Fp) }; + WasmPastaFp(x) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_deep_copy(x: WasmPastaFp) -> WasmPastaFp { + x +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/pasta_fq.rs b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/pasta_fq.rs new file mode 100644 index 00000000000..32d9ff78701 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/arkworks/pasta_fq.rs @@ -0,0 +1,244 @@ +use crate::arkworks::bigint_256::{self, WasmBigInteger256}; +use ark_ff::{ + fields::{Field, FpParameters, PrimeField, SquareRootField}, + FftField, One, UniformRand, Zero, +}; +use ark_ff::{FromBytes, ToBytes}; +use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain}; +use mina_curves::pasta::{fields::fq::FqParameters as Fq_params, Fq}; +use num_bigint::BigUint; +use rand::rngs::StdRng; +use std::cmp::Ordering::{Equal, Greater, Less}; +use wasm_bindgen::convert::{FromWasmAbi, IntoWasmAbi, OptionFromWasmAbi, OptionIntoWasmAbi}; +use wasm_bindgen::prelude::*; + +#[repr(C)] +#[derive(Clone, Copy, Debug)] +pub struct WasmPastaFq(pub Fq); + +impl crate::wasm_flat_vector::FlatVectorElem for WasmPastaFq { + const FLATTENED_SIZE: usize = std::mem::size_of::(); + fn flatten(self) -> Vec { + let mut bytes: Vec = Vec::with_capacity(Self::FLATTENED_SIZE); + self.0.write(&mut bytes).unwrap(); + bytes + } + fn unflatten(flat: Vec) -> Self { + WasmPastaFq(FromBytes::read(flat.as_slice()).unwrap()) + } +} + +impl From for WasmPastaFq { + fn from(x: Fq) -> Self { + WasmPastaFq(x) + } +} + +impl From for Fq { + fn from(x: WasmPastaFq) -> Self { + x.0 + } +} + +impl<'a> From<&'a WasmPastaFq> for &'a Fq { + fn from(x: &'a WasmPastaFq) -> Self { + &x.0 + } +} + +impl wasm_bindgen::describe::WasmDescribe for WasmPastaFq { + fn describe() { + as wasm_bindgen::describe::WasmDescribe>::describe() + } +} + +impl FromWasmAbi for WasmPastaFq { + type Abi = as FromWasmAbi>::Abi; + unsafe fn from_abi(js: Self::Abi) -> Self { + let bytes: Vec = FromWasmAbi::from_abi(js); + WasmPastaFq(FromBytes::read(bytes.as_slice()).unwrap()) + } +} + +impl IntoWasmAbi for WasmPastaFq { + type Abi = as FromWasmAbi>::Abi; + fn into_abi(self) -> Self::Abi { + let mut bytes: Vec = vec![]; + self.0.write(&mut bytes).unwrap(); + bytes.into_abi() + } +} + +impl OptionIntoWasmAbi for WasmPastaFq { + fn none() -> Self::Abi { + as OptionIntoWasmAbi>::none() + } +} + +impl OptionFromWasmAbi for WasmPastaFq { + fn is_none(abi: &Self::Abi) -> bool { + as OptionFromWasmAbi>::is_none(abi) + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_size_in_bits() -> isize { + Fq_params::MODULUS_BITS as isize +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_size() -> WasmBigInteger256 { + WasmBigInteger256(Fq_params::MODULUS) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_add(x: WasmPastaFq, y: WasmPastaFq) -> WasmPastaFq { + WasmPastaFq(x.0 + y.0) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_sub(x: WasmPastaFq, y: WasmPastaFq) -> WasmPastaFq { + WasmPastaFq(x.0 - y.0) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_negate(x: WasmPastaFq) -> WasmPastaFq { + WasmPastaFq(-x.0) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_mul(x: WasmPastaFq, y: WasmPastaFq) -> WasmPastaFq { + WasmPastaFq(x.0 * y.0) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_div(x: WasmPastaFq, y: WasmPastaFq) -> WasmPastaFq { + WasmPastaFq(x.0 / y.0) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_inv(x: WasmPastaFq) -> Option { + x.0.inverse().map(WasmPastaFq) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_square(x: WasmPastaFq) -> WasmPastaFq { + WasmPastaFq(x.0.square()) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_is_square(x: WasmPastaFq) -> bool { + let s = x.0.pow(Fq_params::MODULUS_MINUS_ONE_DIV_TWO); + s.is_zero() || s.is_one() +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_sqrt(x: WasmPastaFq) -> Option { + x.0.sqrt().map(WasmPastaFq) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_of_int(i: i32) -> WasmPastaFq { + WasmPastaFq(Fq::from(i as u64)) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_to_string(x: WasmPastaFq) -> String { + bigint_256::to_biguint(&x.0.into_repr()).to_string() +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_of_string(s: String) -> Result { + let biguint = BigUint::parse_bytes(s.as_bytes(), 10) + .ok_or(JsValue::from_str("caml_pasta_fq_of_string"))?; + + match Fq::from_repr(bigint_256::of_biguint(&biguint)) { + Some(x) => Ok(x.into()), + None => Err(JsValue::from_str("caml_pasta_fq_of_string")), + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_print(x: WasmPastaFq) { + println!("{}", bigint_256::to_biguint(&(x.0.into_repr()))); +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_compare(x: WasmPastaFq, y: WasmPastaFq) -> i32 { + match x.0.cmp(&y.0) { + Less => -1, + Equal => 0, + Greater => 1, + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_equal(x: WasmPastaFq, y: WasmPastaFq) -> bool { + x.0 == y.0 +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_random() -> WasmPastaFq { + WasmPastaFq(UniformRand::rand(&mut rand::thread_rng())) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_rng(i: i32) -> WasmPastaFq { + // We only care about entropy here, so we force a conversion i32 -> u32. + let i: u64 = (i as u32).into(); + let mut rng: StdRng = rand::SeedableRng::seed_from_u64(i); + WasmPastaFq(UniformRand::rand(&mut rng)) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_to_bigint(x: WasmPastaFq) -> WasmBigInteger256 { + WasmBigInteger256(x.0.into_repr()) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_of_bigint(x: WasmBigInteger256) -> Result { + match Fq::from_repr(x.0) { + Some(x) => Ok(x.into()), + None => Err(JsValue::from_str("caml_pasta_fq_of_bigint")), + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_two_adic_root_of_unity() -> WasmPastaFq { + WasmPastaFq(FftField::two_adic_root_of_unity()) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_domain_generator(log2_size: i32) -> WasmPastaFq { + match Domain::new(1 << log2_size) { + Some(x) => WasmPastaFq(x.group_gen), + None => panic!("caml_pasta_fq_domain_generator"), + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_to_bytes(x: WasmPastaFq) -> Vec { + let len = std::mem::size_of::(); + let mut str: Vec = Vec::with_capacity(len); + str.resize(len, 0); + let str_as_fq: *mut Fq = str.as_mut_ptr().cast::(); + unsafe { + *str_as_fq = x.0; + } + str +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_of_bytes(x: &[u8]) -> WasmPastaFq { + let len = std::mem::size_of::(); + if x.len() != len { + panic!("caml_pasta_fq_of_bytes"); + }; + let x = unsafe { *(x.as_ptr() as *const Fq) }; + WasmPastaFq(x) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_deep_copy(x: WasmPastaFq) -> WasmPastaFq { + x +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/circuit.rs b/src/lib/crypto/kimchi_bindings/wasm/src/circuit.rs new file mode 100644 index 00000000000..6d4f389f672 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/circuit.rs @@ -0,0 +1,36 @@ +use ark_ff::PrimeField; +use kimchi::circuits::constraints::ConstraintSystem; +use kimchi::circuits::gate::CircuitGate; +use mina_curves::pasta::Fp; +use serde::Serialize; +use wasm_bindgen::prelude::wasm_bindgen; + +use crate::pasta_fp_plonk_index::WasmPastaFpPlonkIndex; + +#[derive(Serialize)] +struct Circuit +where + F: PrimeField, +{ + public_input_size: usize, + #[serde(bound = "CircuitGate: Serialize")] + gates: Vec>, +} + +impl From<&ConstraintSystem> for Circuit +where + F: PrimeField, +{ + fn from(cs: &ConstraintSystem) -> Self { + Circuit { + public_input_size: cs.public, + gates: cs.gates.clone(), + } + } +} + +#[wasm_bindgen] +pub fn prover_to_json(prover_index: &WasmPastaFpPlonkIndex) -> String { + let circuit: Circuit = (&prover_index.0.cs).into(); + serde_json::to_string(&circuit).expect("couldn't serialize constraints") +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/gate_vector.rs b/src/lib/crypto/kimchi_bindings/wasm/src/gate_vector.rs new file mode 100644 index 00000000000..ef9bc93da4e --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/gate_vector.rs @@ -0,0 +1,198 @@ +//! A GateVector: this is used to represent a list of gates. + +use crate::wasm_flat_vector::WasmFlatVector; +use kimchi::circuits::{ + gate::GateType, + gate::{Circuit, CircuitGate}, + wires::Wire, +}; +use o1_utils::hasher::CryptoDigest; +use wasm_bindgen::prelude::*; + +use paste::paste; + +#[wasm_bindgen] +#[derive(Clone, Copy, Debug)] +pub struct WasmGateWires( + pub Wire, + pub Wire, + pub Wire, + pub Wire, + pub Wire, + pub Wire, + pub Wire, +); + +#[wasm_bindgen] +impl WasmGateWires { + #[wasm_bindgen(constructor)] + pub fn new(w0: Wire, w1: Wire, w2: Wire, w3: Wire, w4: Wire, w5: Wire, w6: Wire) -> Self { + WasmGateWires(w0, w1, w2, w3, w4, w5, w6) + } +} + +macro_rules! impl_gate_vector { + ($name: ident, + $WasmF: ty, + $F: ty, + $field_name: ident) => { + paste! { + #[wasm_bindgen] + pub struct []( + #[wasm_bindgen(skip)] pub Vec>); + pub type WasmGateVector = []; + + #[wasm_bindgen] + pub struct [] { + pub typ: GateType, // type of the gate + pub wires: WasmGateWires, // gate wires + #[wasm_bindgen(skip)] pub coeffs: Vec<$WasmF>, // constraints vector + } + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new( + typ: GateType, + wires: WasmGateWires, + coeffs: WasmFlatVector<$WasmF>) -> Self { + Self { + typ, + wires, + coeffs: coeffs.into(), + } + } + } + + impl From> for [] + { + fn from(cg: CircuitGate<$F>) -> Self { + Self { + typ: cg.typ, + wires: WasmGateWires( + cg.wires[0], + cg.wires[1], + cg.wires[2], + cg.wires[3], + cg.wires[4], + cg.wires[5], + cg.wires[6]), + coeffs: cg.coeffs.into_iter().map(Into::into).collect(), + } + } + } + + impl From<&CircuitGate<$F>> for [] + { + fn from(cg: &CircuitGate<$F>) -> Self { + Self { + typ: cg.typ, + wires: WasmGateWires( + cg.wires[0], + cg.wires[1], + cg.wires[2], + cg.wires[3], + cg.wires[4], + cg.wires[5], + cg.wires[6]), + coeffs: cg.coeffs.clone().into_iter().map(Into::into).collect(), + } + } + } + + impl From<[]> for CircuitGate<$F> + { + fn from(ccg: []) -> Self { + Self { + typ: ccg.typ, + wires: [ + ccg.wires.0, + ccg.wires.1, + ccg.wires.2, + ccg.wires.3, + ccg.wires.4, + ccg.wires.5, + ccg.wires.6 + ], + coeffs: ccg.coeffs.into_iter().map(Into::into).collect(), + } + } + } + + #[wasm_bindgen] + pub fn []() -> WasmGateVector { + [](Vec::new()) + } + + #[wasm_bindgen] + pub fn []( + v: &mut WasmGateVector, + gate: [], + ) { + let gate: CircuitGate<$F> = gate.into(); + v.0.push(gate); + } + + #[wasm_bindgen] + pub fn []( + v: &WasmGateVector, + i: i32, + ) -> [] { + (&(v.0)[i as usize]).into() + } + + #[wasm_bindgen] + pub fn []( + v: &WasmGateVector, + ) -> usize { + v.0.len() + } + + #[wasm_bindgen] + pub fn []( + v: &mut WasmGateVector, + t: Wire, + h: Wire, + ) { + (v.0)[t.row as usize].wires[t.col as usize] = h.into(); + } + + #[wasm_bindgen] + pub fn []( + public_input_size: usize, + v: &WasmGateVector + ) -> Box<[u8]> { + Circuit::new(public_input_size, &(v.0)).digest().to_vec().into_boxed_slice() + } + + #[wasm_bindgen] + pub fn []( + public_input_size: usize, + v: &WasmGateVector + ) -> String { + let circuit = Circuit::new(public_input_size, &v.0); + serde_json::to_string(&circuit).expect("couldn't serialize constraints") + } + } + }; +} + +pub mod fp { + use super::*; + use crate::arkworks::WasmPastaFp as WasmF; + use mina_curves::pasta::Fp as F; + + impl_gate_vector!(fp, WasmF, F, Fp); +} + +// +// Fq +// + +pub mod fq { + use super::*; + use crate::arkworks::WasmPastaFq as WasmF; + use mina_curves::pasta::Fq as F; + + impl_gate_vector!(fq, WasmF, F, Fq); +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/lib.rs b/src/lib/crypto/kimchi_bindings/wasm/src/lib.rs new file mode 100644 index 00000000000..3f994cc2caa --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/lib.rs @@ -0,0 +1,114 @@ +#![feature(get_mut_unchecked)] +//! The Marlin_plonk_stubs crate exports some functionalities +//! and structures from the following the Rust crates to OCaml: +//! +//! * [Marlin](https://github.com/o1-labs/marlin), +//! a PLONK implementation. +//! * [Arkworks](http://arkworks.rs/), +//! a math library that Marlin builds on top of. +//! + +use wasm_bindgen::prelude::*; + +mod wasm_flat_vector; +mod wasm_vector; + +#[wasm_bindgen] +extern "C" { + pub fn alert(s: &str); +} + +#[wasm_bindgen] +pub fn greet(name: &str) { + alert(&format!("Hello, {name}!")); +} + +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(js_namespace = console)] + fn log(s: &str); +} + +// produces a warning, but can be useful +// macro_rules! console_log { +// ($($t:tt)*) => (crate::log(&format_args!($($t)*).to_string())) +// } + +#[wasm_bindgen] +pub fn console_log(s: &str) { + log(s); +} + +#[wasm_bindgen] +pub fn create_zero_u32_ptr() -> *mut u32 { + Box::into_raw(std::boxed::Box::new(0)) +} + +#[wasm_bindgen] +pub fn free_u32_ptr(ptr: *mut u32) { + let _drop_me = unsafe { std::boxed::Box::from_raw(ptr) }; +} + +#[wasm_bindgen] +pub fn set_u32_ptr(ptr: *mut u32, arg: u32) { + // The rust docs explicitly forbid using this for cross-thread syncronization. Oh well, we + // don't have anything better. As long as it works in practice, we haven't upset the undefined + // behavior dragons. + unsafe { + std::ptr::write_volatile(ptr, arg); + } +} + +#[allow(unreachable_code)] +#[wasm_bindgen] +pub fn wait_until_non_zero(ptr: *const u32) -> u32 { + // The rust docs explicitly forbid using this for cross-thread syncronization. Oh well, we + // don't have anything better. As long as it works in practice, we haven't upset the undefined + // behavior dragons. + loop { + let contents = unsafe { std::ptr::read_volatile(ptr) }; + if contents != 0 { + return contents; + } + } + unreachable!(); +} + +pub mod rayon; + +/// Arkworks types +pub mod arkworks; + +/// Utils +pub mod urs_utils; // TODO: move this logic to proof-systems + +/// Vectors +pub mod gate_vector; + +pub mod poly_comm; +/// Curves +pub mod projective; + +/// SRS +pub mod srs; + +/// Indexes +pub mod pasta_fp_plonk_index; +pub mod pasta_fq_plonk_index; + +/// Verifier indexes/keys +pub mod plonk_verifier_index; + +/// Oracles +pub mod oracles; + +/// Proofs +pub mod plonk_proof; + +/// Poseidon +pub mod poseidon; + +// exposes circuit for inspection +pub mod circuit; + +pub mod wasm_ocaml_serde; diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/oracles.rs b/src/lib/crypto/kimchi_bindings/wasm/src/oracles.rs new file mode 100644 index 00000000000..96a17baa432 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/oracles.rs @@ -0,0 +1,325 @@ +use kimchi::circuits::scalars::RandomOracles; +use kimchi::proof::ProverProof; +use kimchi::verifier_index::VerifierIndex as DlogVerifierIndex; +use mina_poseidon::{ + self, + constants::PlonkSpongeConstantsKimchi, + sponge::{DefaultFqSponge, DefaultFrSponge}, + FqSponge, +}; +use paste::paste; +use poly_commitment::commitment::{shift_scalar, PolyComm}; +use poly_commitment::evaluation_proof::OpeningProof; +use poly_commitment::SRS; +use wasm_bindgen::prelude::*; +// use wasm_bindgen::convert::{IntoWasmAbi, FromWasmAbi}; +use crate::wasm_vector::WasmVector; +// use crate::wasm_flat_vector::WasmFlatVector; +use ark_ff::{One, Zero}; + +// +// CamlOracles +// + +// +// Implementation +// + +macro_rules! impl_oracles { + ($WasmF: ty, + $F: ty, + $WasmG: ty, + $G: ty, + $WasmPolyComm: ty, + $WasmProverProof: ty, + $index: ty, + $curve_params: ty, + $field_name: ident) => { + + paste! { + use crate::wasm_flat_vector::WasmFlatVector; + use mina_poseidon::sponge::ScalarChallenge; + + #[wasm_bindgen] + #[derive(Clone, Copy)] + pub struct [] { + pub joint_combiner_chal: Option<$WasmF>, + pub joint_combiner: Option<$WasmF>, + pub beta: $WasmF, + pub gamma: $WasmF, + pub alpha_chal: $WasmF, + pub alpha: $WasmF, + pub zeta: $WasmF, + pub v: $WasmF, + pub u: $WasmF, + pub zeta_chal: $WasmF, + pub v_chal: $WasmF, + pub u_chal: $WasmF, + } + type WasmRandomOracles = []; + + #[wasm_bindgen] + impl [] { + #[allow(clippy::too_many_arguments)] + #[wasm_bindgen(constructor)] + pub fn new( + joint_combiner_chal: Option<$WasmF>, + joint_combiner: Option<$WasmF>, + beta: $WasmF, + gamma: $WasmF, + alpha_chal: $WasmF, + alpha: $WasmF, + zeta: $WasmF, + v: $WasmF, + u: $WasmF, + zeta_chal: $WasmF, + v_chal: $WasmF, + u_chal: $WasmF) -> Self { + Self { + joint_combiner_chal, + joint_combiner, + beta, + gamma, + alpha_chal, + alpha, + zeta, + v, + u, + zeta_chal, + v_chal, + u_chal, + } + } + } + + impl From> for WasmRandomOracles + { + fn from(ro: RandomOracles<$F>) -> Self { + Self { + joint_combiner_chal: ro.joint_combiner.as_ref().map(|x| x.0.0.into()), + joint_combiner: ro.joint_combiner.as_ref().map(|x| x.1.into()), + beta: ro.beta.into(), + gamma: ro.gamma.into(), + alpha_chal: ro.alpha_chal.0.into(), + alpha: ro.alpha.into(), + zeta: ro.zeta.into(), + v: ro.v.into(), + u: ro.u.into(), + zeta_chal: ro.zeta_chal.0.into(), + v_chal: ro.v_chal.0.into(), + u_chal: ro.u_chal.0.into(), + } + } + } + + impl Into> for WasmRandomOracles + { + fn into(self) -> RandomOracles<$F> { + let joint_combiner = + match (self.joint_combiner_chal, self.joint_combiner) { + (Some(joint_combiner_chal), Some(joint_combiner)) => { + Some((ScalarChallenge(joint_combiner_chal.into()), joint_combiner.into())) + }, + _ => None + }; + RandomOracles { + joint_combiner, + beta: self.beta.into(), + gamma: self.gamma.into(), + alpha_chal: ScalarChallenge(self.alpha_chal.into()), + alpha: self.alpha.into(), + zeta: self.zeta.into(), + v: self.v.into(), + u: self.u.into(), + zeta_chal: ScalarChallenge(self.zeta_chal.into()), + v_chal: ScalarChallenge(self.v_chal.into()), + u_chal: ScalarChallenge(self.u_chal.into()), + } + } + } + + #[wasm_bindgen] + #[derive(Clone)] + pub struct [] { + pub o: [], + pub p_eval0: $WasmF, + pub p_eval1: $WasmF, + #[wasm_bindgen(skip)] + pub opening_prechallenges: WasmFlatVector<$WasmF>, + pub digest_before_evaluations: $WasmF, + } + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new( + o: WasmRandomOracles, + p_eval0: $WasmF, + p_eval1: $WasmF, + opening_prechallenges: WasmFlatVector<$WasmF>, + digest_before_evaluations: $WasmF) -> Self { + Self {o, p_eval0, p_eval1, opening_prechallenges, digest_before_evaluations} + } + + #[wasm_bindgen(getter)] + pub fn opening_prechallenges(&self) -> WasmFlatVector<$WasmF> { + self.opening_prechallenges.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_opening_prechallenges(&mut self, x: WasmFlatVector<$WasmF>) { + self.opening_prechallenges = x; + } + } + + #[wasm_bindgen] + pub fn [<$F:snake _oracles_create>]( + lgr_comm: WasmVector<$WasmPolyComm>, // the bases to commit polynomials + index: $index, // parameters + proof: $WasmProverProof, // the final proof (contains public elements at the beginning) + ) -> Result<[], JsError> { + // conversions + let result = crate::rayon::run_in_pool(|| { + let index: DlogVerifierIndex<$G, OpeningProof<$G>> = index.into(); + + let lgr_comm: Vec> = lgr_comm + .into_iter() + .take(proof.public.len()) + .map(Into::into) + .collect(); + let lgr_comm_refs: Vec<_> = lgr_comm.iter().collect(); + + let p_comm = PolyComm::<$G>::multi_scalar_mul( + &lgr_comm_refs, + &proof + .public + .iter() + .map(|a| a.clone().into()) + .map(|s: $F| -s) + .collect::>(), + ); + let p_comm = { + index + .srs() + .mask_custom( + p_comm.clone(), + &p_comm.map(|_| $F::one()), + ) + .unwrap() + .commitment + }; + + let (proof, public_input): (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) = proof.into(); + + let oracles_result = + proof.oracles::< + DefaultFqSponge<$curve_params, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<$F, PlonkSpongeConstantsKimchi> + >(&index, &p_comm, Some(&public_input)); + let oracles_result = match oracles_result { + Err(e) => { + return Err(format!("oracles_create: {}", e)); + } + Ok(cs) => cs, + }; + + let (mut sponge, combined_inner_product, p_eval, digest, oracles) = ( + oracles_result.fq_sponge, + oracles_result.combined_inner_product, + oracles_result.public_evals, + oracles_result.digest, + oracles_result.oracles, + ); + + sponge.absorb_fr(&[shift_scalar::<$G>(combined_inner_product)]); + + let opening_prechallenges = proof + .proof + .prechallenges(&mut sponge) + .into_iter() + .map(|x| x.0.into()) + .collect(); + + Ok((oracles, p_eval, opening_prechallenges, digest)) + }); + + match result { + Ok((oracles, p_eval, opening_prechallenges, digest)) => Ok([] { + o: oracles.into(), + p_eval0: p_eval[0][0].into(), + p_eval1: p_eval[1][0].into(), + opening_prechallenges, + digest_before_evaluations: digest.into() + }), + Err(err) => Err(JsError::new(&err)) + } + } + + #[wasm_bindgen] + pub fn [<$F:snake _oracles_dummy>]() -> [] { + [] { + o: RandomOracles::<$F>::default().into(), + p_eval0: $F::zero().into(), + p_eval1: $F::zero().into(), + opening_prechallenges: vec![].into(), + digest_before_evaluations: $F::zero().into(), + } + } + + #[wasm_bindgen] + pub fn [<$F:snake _oracles_deep_copy>]( + x: $WasmProverProof, + ) -> $WasmProverProof { + x + } + } + } +} + +// +// +// + +pub mod fp { + use super::*; + use crate::{ + arkworks::WasmPastaFp, plonk_proof::fp::WasmFpProverProof as WasmProverProof, + plonk_verifier_index::fp::WasmFpPlonkVerifierIndex as WasmPlonkVerifierIndex, + poly_comm::vesta::WasmFpPolyComm as WasmPolyComm, + }; + use mina_curves::pasta::{Fp, Vesta as GAffine, VestaParameters}; + + impl_oracles!( + WasmPastaFp, + Fp, + WasmGVesta, + GAffine, + WasmPolyComm, + WasmProverProof, + WasmPlonkVerifierIndex, + VestaParameters, + Fp + ); +} + +pub mod fq { + use super::*; + use crate::{ + arkworks::WasmPastaFq, plonk_proof::fq::WasmFqProverProof as WasmProverProof, + plonk_verifier_index::fq::WasmFqPlonkVerifierIndex as WasmPlonkVerifierIndex, + poly_comm::pallas::WasmFqPolyComm as WasmPolyComm, + }; + use mina_curves::pasta::{Fq, Pallas as GAffine, PallasParameters}; + + impl_oracles!( + WasmPastaFq, + Fq, + WasmGPallas, + GAffine, + WasmPolyComm, + WasmProverProof, + WasmPlonkVerifierIndex, + PallasParameters, + Fq + ); +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fp_plonk_index.rs b/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fp_plonk_index.rs new file mode 100644 index 00000000000..2d1f9432448 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fp_plonk_index.rs @@ -0,0 +1,305 @@ +use ark_poly::EvaluationDomain; +use kimchi::circuits::lookup::runtime_tables::RuntimeTableCfg; + +use crate::arkworks::WasmPastaFp; +use crate::gate_vector::fp::WasmGateVector; +use crate::srs::fp::WasmFpSrs as WasmSrs; +use crate::wasm_flat_vector::WasmFlatVector; +use crate::wasm_vector::{fp::*, WasmVector}; +use kimchi::circuits::lookup::tables::LookupTable; +use kimchi::circuits::{constraints::ConstraintSystem, gate::CircuitGate}; +use kimchi::linearization::expr_linearization; +use kimchi::poly_commitment::evaluation_proof::OpeningProof; +use kimchi::prover_index::ProverIndex; +use mina_curves::pasta::{Fp, Pallas as GAffineOther, Vesta as GAffine, VestaParameters}; +use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge}; +use serde::{Deserialize, Serialize}; +use std::{ + fs::{File, OpenOptions}, + io::{BufReader, BufWriter, Seek, SeekFrom::Start}, +}; +use wasm_bindgen::prelude::*; + +// +// CamlPastaFpPlonkIndex (custom type) +// + +/// Boxed so that we don't store large proving indexes in the OCaml heap. +#[wasm_bindgen] +pub struct WasmPastaFpPlonkIndex( + #[wasm_bindgen(skip)] pub Box>>, +); + +// This should mimic LookupTable structure +#[wasm_bindgen] +pub struct WasmPastaFpLookupTable { + #[wasm_bindgen(skip)] + pub id: i32, + #[wasm_bindgen(skip)] + pub data: WasmVecVecFp, +} + +// Converter from WasmPastaFpLookupTable to LookupTable, used by the binding +// below. +impl From for LookupTable { + fn from(wasm_lt: WasmPastaFpLookupTable) -> LookupTable { + LookupTable { + id: wasm_lt.id.into(), + data: wasm_lt.data.0, + } + } +} + +// JS constructor for js/bindings.js +#[wasm_bindgen] +impl WasmPastaFpLookupTable { + #[wasm_bindgen(constructor)] + pub fn new(id: i32, data: WasmVecVecFp) -> WasmPastaFpLookupTable { + WasmPastaFpLookupTable { id, data } + } +} + +// Runtime table config + +#[wasm_bindgen] +pub struct WasmPastaFpRuntimeTableCfg { + #[wasm_bindgen(skip)] + pub id: i32, + #[wasm_bindgen(skip)] + pub first_column: WasmFlatVector, +} + +// JS constructor for js/bindings.js +#[wasm_bindgen] +impl WasmPastaFpRuntimeTableCfg { + #[wasm_bindgen(constructor)] + pub fn new(id: i32, first_column: WasmFlatVector) -> Self { + Self { id, first_column } + } +} + +impl From for RuntimeTableCfg { + fn from(wasm_rt_table_cfg: WasmPastaFpRuntimeTableCfg) -> Self { + Self { + id: wasm_rt_table_cfg.id, + first_column: wasm_rt_table_cfg + .first_column + .into_iter() + .map(Into::into) + .collect(), + } + } +} + +// CamlPastaFpPlonkIndex methods +// + +// Change js/web/worker-spec.js accordingly +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_create( + gates: &WasmGateVector, + public_: i32, + lookup_tables: WasmVector, + runtime_table_cfgs: WasmVector, + prev_challenges: i32, + srs: &WasmSrs, +) -> Result { + console_error_panic_hook::set_once(); + let index = crate::rayon::run_in_pool(|| { + // flatten the permutation information (because OCaml has a different way of keeping track of permutations) + let gates: Vec<_> = gates + .0 + .iter() + .map(|gate| CircuitGate:: { + typ: gate.typ, + wires: gate.wires, + coeffs: gate.coeffs.clone(), + }) + .collect(); + + let rust_runtime_table_cfgs: Vec> = + runtime_table_cfgs.into_iter().map(Into::into).collect(); + + let rust_lookup_tables: Vec> = + lookup_tables.into_iter().map(Into::into).collect(); + + // create constraint system + let cs = match ConstraintSystem::::create(gates) + .public(public_ as usize) + .prev_challenges(prev_challenges as usize) + .lookup(rust_lookup_tables) + .runtime(if rust_runtime_table_cfgs.is_empty() { + None + } else { + Some(rust_runtime_table_cfgs) + }) + .build() + { + Err(_) => { + return Err("caml_pasta_fp_plonk_index_create: could not create constraint system"); + } + Ok(cs) => cs, + }; + + // endo + let (endo_q, _endo_r) = poly_commitment::srs::endos::(); + + // Unsafe if we are in a multi-core ocaml + { + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) }; + ptr.add_lagrange_basis(cs.domain.d1); + } + + let mut index = + ProverIndex::>::create(cs, endo_q, srs.0.clone()); + // Compute and cache the verifier index digest + index.compute_verifier_index_digest::>(); + Ok(index) + }); + + // create index + match index { + Ok(index) => Ok(WasmPastaFpPlonkIndex(Box::new(index))), + Err(str) => Err(JsError::new(str)), + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_max_degree(index: &WasmPastaFpPlonkIndex) -> i32 { + index.0.srs.max_degree() as i32 +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_public_inputs(index: &WasmPastaFpPlonkIndex) -> i32 { + index.0.cs.public as i32 +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_domain_d1_size(index: &WasmPastaFpPlonkIndex) -> i32 { + index.0.cs.domain.d1.size() as i32 +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_domain_d4_size(index: &WasmPastaFpPlonkIndex) -> i32 { + index.0.cs.domain.d4.size() as i32 +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_domain_d8_size(index: &WasmPastaFpPlonkIndex) -> i32 { + index.0.cs.domain.d8.size() as i32 +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_decode( + bytes: &[u8], + srs: &WasmSrs, +) -> Result { + let mut deserializer = rmp_serde::Deserializer::new(bytes); + let mut index = + ProverIndex::>::deserialize(&mut deserializer) + .map_err(|e| JsError::new(&format!("caml_pasta_fp_plonk_index_decode: {}", e)))?; + + index.srs = srs.0.clone(); + let (linearization, powers_of_alpha) = + expr_linearization(Some(&index.cs.feature_flags), true, 3); + index.linearization = linearization; + index.powers_of_alpha = powers_of_alpha; + + Ok(WasmPastaFpPlonkIndex(Box::new(index))) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_encode(index: &WasmPastaFpPlonkIndex) -> Result, JsError> { + let mut buffer = Vec::new(); + let mut serializer = rmp_serde::Serializer::new(&mut buffer); + index + .0 + .serialize(&mut serializer) + .map_err(|e| JsError::new(&format!("caml_pasta_fp_plonk_index_encode: {}", e)))?; + Ok(buffer) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_read( + offset: Option, + srs: &WasmSrs, + path: String, +) -> Result { + // read from file + let file = match File::open(path) { + Err(_) => return Err(JsValue::from_str("caml_pasta_fp_plonk_index_read")), + Ok(file) => file, + }; + let mut r = BufReader::new(file); + + // optional offset in file + if let Some(offset) = offset { + r.seek(Start(offset as u64)) + .map_err(|err| JsValue::from_str(&format!("caml_pasta_fp_plonk_index_read: {err}")))?; + } + + // deserialize the index + let mut t = ProverIndex::>::deserialize( + &mut rmp_serde::Deserializer::new(r), + ) + .map_err(|err| JsValue::from_str(&format!("caml_pasta_fp_plonk_index_read: {err}")))?; + t.srs = srs.0.clone(); + let (linearization, powers_of_alpha) = expr_linearization(Some(&t.cs.feature_flags), true, 3); + t.linearization = linearization; + t.powers_of_alpha = powers_of_alpha; + + // + Ok(WasmPastaFpPlonkIndex(Box::new(t))) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_write( + append: Option, + index: &WasmPastaFpPlonkIndex, + path: String, +) -> Result<(), JsValue> { + let file = OpenOptions::new() + .append(append.unwrap_or(true)) + .open(path) + .map_err(|_| JsValue::from_str("caml_pasta_fp_plonk_index_write"))?; + let w = BufWriter::new(file); + index + .0 + .serialize(&mut rmp_serde::Serializer::new(w)) + .map_err(|e| JsValue::from_str(&format!("caml_pasta_fp_plonk_index_read: {e}"))) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_serialize(index: &WasmPastaFpPlonkIndex) -> String { + let serialized = rmp_serde::to_vec(&index.0).unwrap(); + base64::encode(serialized) +} + +// helpers + +fn format_field(f: &Fp) -> String { + // TODO this could be much nicer, should end up as "1", "-1", "0" etc + format!("{f}") +} + +pub fn format_circuit_gate(i: usize, gate: &CircuitGate) -> String { + let coeffs = gate + .coeffs + .iter() + .map(format_field) + .collect::>() + .join("\n"); + let wires = gate + .wires + .iter() + .enumerate() + .filter(|(j, wire)| wire.row != i || wire.col != *j) + .map(|(j, wire)| format!("({}, {}) --> ({}, {})", i, j, wire.row, wire.col)) + .collect::>() + .join("\n"); + format!( + "c[{}][{:?}]:\nconstraints\n{}\nwires\n{}\n", + i, gate.typ, coeffs, wires + ) +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fq_plonk_index.rs b/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fq_plonk_index.rs new file mode 100644 index 00000000000..911dbcf9b08 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fq_plonk_index.rs @@ -0,0 +1,276 @@ +use ark_poly::EvaluationDomain; +use kimchi::circuits::lookup::runtime_tables::RuntimeTableCfg; + +use crate::arkworks::WasmPastaFq; +use crate::gate_vector::fq::WasmGateVector; +use crate::srs::fq::WasmFqSrs as WasmSrs; +use crate::wasm_flat_vector::WasmFlatVector; +use crate::wasm_vector::{fq::*, WasmVector}; +use kimchi::circuits::lookup::tables::LookupTable; +use kimchi::circuits::{constraints::ConstraintSystem, gate::CircuitGate}; +use kimchi::linearization::expr_linearization; +use kimchi::poly_commitment::evaluation_proof::OpeningProof; +use kimchi::prover_index::ProverIndex; +use mina_curves::pasta::{Fq, Pallas as GAffine, PallasParameters, Vesta as GAffineOther}; +use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge}; +use serde::{Deserialize, Serialize}; +use std::{ + fs::{File, OpenOptions}, + io::{BufReader, BufWriter, Seek, SeekFrom::Start}, +}; +use wasm_bindgen::prelude::*; + +// +// CamlPastaFqPlonkIndex (custom type) +// + +/// Boxed so that we don't store large proving indexes in the OCaml heap. +#[wasm_bindgen] +pub struct WasmPastaFqPlonkIndex( + #[wasm_bindgen(skip)] pub Box>>, +); + +#[wasm_bindgen] +pub struct WasmPastaFqLookupTable { + #[wasm_bindgen(skip)] + pub id: i32, + #[wasm_bindgen(skip)] + pub data: WasmVecVecFq, +} + +impl From for LookupTable { + fn from(wasm_lt: WasmPastaFqLookupTable) -> LookupTable { + LookupTable { + id: wasm_lt.id.into(), + data: wasm_lt.data.0, + } + } +} + +// JS constructor for js/bindings.js +#[wasm_bindgen] +impl WasmPastaFqLookupTable { + #[wasm_bindgen(constructor)] + pub fn new(id: i32, data: WasmVecVecFq) -> WasmPastaFqLookupTable { + WasmPastaFqLookupTable { id, data } + } +} + +// Runtime table config + +#[wasm_bindgen] +pub struct WasmPastaFqRuntimeTableCfg { + #[wasm_bindgen(skip)] + pub id: i32, + #[wasm_bindgen(skip)] + pub first_column: WasmFlatVector, +} + +impl From for RuntimeTableCfg { + fn from(wasm_rt_cfg: WasmPastaFqRuntimeTableCfg) -> Self { + Self { + id: wasm_rt_cfg.id, + first_column: wasm_rt_cfg + .first_column + .into_iter() + .map(Into::into) + .collect(), + } + } +} + +// JS constructor for js/bindings.js +#[wasm_bindgen] +impl WasmPastaFqRuntimeTableCfg { + #[wasm_bindgen(constructor)] + pub fn new(id: i32, first_column: WasmFlatVector) -> Self { + Self { id, first_column } + } +} + +// +// CamlPastaFqPlonkIndex methods +// + +// Change js/web/worker-spec.js accordingly +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_create( + gates: &WasmGateVector, + public_: i32, + lookup_tables: WasmVector, + runtime_table_cfgs: WasmVector, + prev_challenges: i32, + srs: &WasmSrs, +) -> Result { + console_error_panic_hook::set_once(); + let index = crate::rayon::run_in_pool(|| { + // flatten the permutation information (because OCaml has a different way of keeping track of permutations) + let gates: Vec<_> = gates + .0 + .iter() + .map(|gate| CircuitGate:: { + typ: gate.typ, + wires: gate.wires, + coeffs: gate.coeffs.clone(), + }) + .collect(); + + let rust_runtime_table_cfgs: Vec> = + runtime_table_cfgs.into_iter().map(Into::into).collect(); + + let rust_lookup_tables: Vec> = + lookup_tables.into_iter().map(Into::into).collect(); + + // create constraint system + let cs = match ConstraintSystem::::create(gates) + .public(public_ as usize) + .prev_challenges(prev_challenges as usize) + .lookup(rust_lookup_tables) + .runtime(if rust_runtime_table_cfgs.is_empty() { + None + } else { + Some(rust_runtime_table_cfgs) + }) + .build() + { + Err(_) => { + return Err("caml_pasta_fq_plonk_index_create: could not create constraint system"); + } + Ok(cs) => cs, + }; + + // endo + let (endo_q, _endo_r) = poly_commitment::srs::endos::(); + + // Unsafe if we are in a multi-core ocaml + { + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) }; + ptr.add_lagrange_basis(cs.domain.d1); + } + + let mut index = + ProverIndex::>::create(cs, endo_q, srs.0.clone()); + // Compute and cache the verifier index digest + index.compute_verifier_index_digest::>(); + + Ok(index) + }); + + // create index + match index { + Ok(index) => Ok(WasmPastaFqPlonkIndex(Box::new(index))), + Err(str) => Err(JsError::new(str)), + } +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_max_degree(index: &WasmPastaFqPlonkIndex) -> i32 { + index.0.srs.max_degree() as i32 +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_public_inputs(index: &WasmPastaFqPlonkIndex) -> i32 { + index.0.cs.public as i32 +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_domain_d1_size(index: &WasmPastaFqPlonkIndex) -> i32 { + index.0.cs.domain.d1.size() as i32 +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_domain_d4_size(index: &WasmPastaFqPlonkIndex) -> i32 { + index.0.cs.domain.d4.size() as i32 +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_domain_d8_size(index: &WasmPastaFqPlonkIndex) -> i32 { + index.0.cs.domain.d8.size() as i32 +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_decode( + bytes: &[u8], + srs: &WasmSrs, +) -> Result { + let mut deserializer = rmp_serde::Deserializer::new(bytes); + let mut index = + ProverIndex::>::deserialize(&mut deserializer) + .map_err(|e| JsError::new(&format!("caml_pasta_fq_plonk_index_decode: {}", e)))?; + + index.srs = srs.0.clone(); + let (linearization, powers_of_alpha) = + expr_linearization(Some(&index.cs.feature_flags), true, 3); + index.linearization = linearization; + index.powers_of_alpha = powers_of_alpha; + + Ok(WasmPastaFqPlonkIndex(Box::new(index))) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_encode(index: &WasmPastaFqPlonkIndex) -> Result, JsError> { + let mut buffer = Vec::new(); + let mut serializer = rmp_serde::Serializer::new(&mut buffer); + index + .0 + .serialize(&mut serializer) + .map_err(|e| JsError::new(&format!("caml_pasta_fq_plonk_index_encode: {}", e)))?; + Ok(buffer) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_read( + offset: Option, + srs: &WasmSrs, + path: String, +) -> Result { + // read from file + let file = match File::open(path) { + Err(_) => return Err(JsValue::from_str("caml_pasta_fq_plonk_index_read")), + Ok(file) => file, + }; + let mut r = BufReader::new(file); + + // optional offset in file + if let Some(offset) = offset { + r.seek(Start(offset as u64)) + .map_err(|err| JsValue::from_str(&format!("caml_pasta_fq_plonk_index_read: {err}")))?; + } + + // deserialize the index + let mut t = ProverIndex::>::deserialize( + &mut rmp_serde::Deserializer::new(r), + ) + .map_err(|err| JsValue::from_str(&format!("caml_pasta_fq_plonk_index_read: {err}")))?; + t.srs = srs.0.clone(); + let (linearization, powers_of_alpha) = expr_linearization(Some(&t.cs.feature_flags), true, 3); + t.linearization = linearization; + t.powers_of_alpha = powers_of_alpha; + + // + Ok(WasmPastaFqPlonkIndex(Box::new(t))) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_write( + append: Option, + index: &WasmPastaFqPlonkIndex, + path: String, +) -> Result<(), JsValue> { + let file = OpenOptions::new() + .append(append.unwrap_or(true)) + .open(path) + .map_err(|_| JsValue::from_str("caml_pasta_fq_plonk_index_write"))?; + let w = BufWriter::new(file); + index + .0 + .serialize(&mut rmp_serde::Serializer::new(w)) + .map_err(|e| JsValue::from_str(&format!("caml_pasta_fq_plonk_index_read: {e}"))) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_serialize(index: &WasmPastaFqPlonkIndex) -> String { + let serialized = rmp_serde::to_vec(&index.0).unwrap(); + base64::encode(serialized) +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/plonk_proof.rs b/src/lib/crypto/kimchi_bindings/wasm/src/plonk_proof.rs new file mode 100644 index 00000000000..d035816288d --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/plonk_proof.rs @@ -0,0 +1,915 @@ +// use kimchi::circuits::expr::{Linearization, PolishToken, Variable, Column}; +// use kimchi::circuits::gate::{GateType, CurrOrNext}; +use crate::wasm_flat_vector::WasmFlatVector; +use crate::wasm_vector::fp::WasmVecVecFp; +use crate::wasm_vector::fq::WasmVecVecFq; +use crate::wasm_vector::WasmVector; +use paste::paste; +use std::convert::TryInto; +use wasm_bindgen::prelude::*; +// use std::sync::Arc; +// use poly_commitment::srs::SRS; +use kimchi::circuits::lookup::runtime_tables::RuntimeTable; +// use kimchi::index::{expr_linearization, VerifierIndex as DlogVerifierIndex}; +// use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain}; +use ark_ec::AffineCurve; +use ark_ff::One; +use array_init::array_init; +use kimchi::circuits::wires::COLUMNS; +use kimchi::verifier::Context; +use std::array; +// use std::path::Path; +use groupmap::GroupMap; +use kimchi::proof::{ + LookupCommitments, PointEvaluations, ProofEvaluations, ProverCommitments, ProverProof, + RecursionChallenge, +}; +use kimchi::prover_index::ProverIndex; +use kimchi::verifier::batch_verify; +use mina_poseidon::{ + constants::PlonkSpongeConstantsKimchi, + sponge::{DefaultFqSponge, DefaultFrSponge}, +}; +use poly_commitment::{ + commitment::{CommitmentCurve, PolyComm}, + evaluation_proof::OpeningProof, +}; +use serde::{Deserialize, Serialize}; + +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(js_namespace = console)] + fn log(s: &str); +} + +macro_rules! impl_proof { + ( + $name: ident, + $WasmG: ty, + $G: ty, + $WasmF: ty, + $F: ty, + $WasmPolyComm: ty, + $WasmSrs: ty, + $GOther: ty, + $FrSpongeParams: path, + $FqSpongeParams: path, + $WasmIndex: ty, + $WasmVerifierIndex: ty, + $field_name: ident + ) => { + paste! { + type WasmVecVecF = []; + + #[derive(Clone)] + pub struct []( + ProofEvaluations>> + ); + type WasmProofEvaluations = []; + + impl wasm_bindgen::describe::WasmDescribe for WasmProofEvaluations { + fn describe() { + ::describe() + } + } + + impl wasm_bindgen::convert::FromWasmAbi for WasmProofEvaluations { + type Abi = ::Abi; + unsafe fn from_abi(js: Self::Abi) -> Self { + let js: JsValue = wasm_bindgen::convert::FromWasmAbi::from_abi(js); + Self( + ProofEvaluations::deserialize( + crate::wasm_ocaml_serde::de::Deserializer::from(js), + ) + .unwrap(), + ) + } + } + + impl wasm_bindgen::convert::IntoWasmAbi for WasmProofEvaluations { + type Abi = ::Abi; + fn into_abi(self) -> Self::Abi { + let js = self + .0 + .serialize(&crate::wasm_ocaml_serde::ser::Serializer::new()) + .unwrap(); + wasm_bindgen::convert::IntoWasmAbi::into_abi(js) + } + } + + impl From<&WasmProofEvaluations> for ProofEvaluations>> { + fn from(x: &WasmProofEvaluations) -> Self { + x.0.clone() + } + } + + impl From for ProofEvaluations>> { + fn from(x: WasmProofEvaluations) -> Self { + x.0 + } + } + + impl From<&ProofEvaluations>>> for WasmProofEvaluations { + fn from(x: &ProofEvaluations>>) -> Self { + Self(x.clone()) + } + } + + impl From>>> for WasmProofEvaluations { + fn from(x: ProofEvaluations>>) -> Self { + Self(x) + } + } + + #[wasm_bindgen] + #[derive(Clone)] + pub struct [] + { + #[wasm_bindgen(skip)] + pub sorted: WasmVector<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub aggreg: $WasmPolyComm, + #[wasm_bindgen(skip)] + pub runtime: Option<$WasmPolyComm>, + } + + type WasmLookupCommitments = []; + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new( + sorted: WasmVector<$WasmPolyComm>, + aggreg: $WasmPolyComm, + runtime: Option<$WasmPolyComm>) -> Self { + WasmLookupCommitments { sorted, aggreg, runtime } + } + + #[wasm_bindgen(getter)] + pub fn sorted(&self) -> WasmVector<$WasmPolyComm> { + self.sorted.clone() + } + + #[wasm_bindgen(getter)] + pub fn aggreg(&self) -> $WasmPolyComm { + self.aggreg.clone() + } + + #[wasm_bindgen(getter)] + pub fn runtime(&self) -> Option<$WasmPolyComm> { + self.runtime.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_sorted(&mut self, s: WasmVector<$WasmPolyComm>) { + self.sorted = s + } + + #[wasm_bindgen(setter)] + pub fn set_aggreg(&mut self, a: $WasmPolyComm) { + self.aggreg = a + } + + #[wasm_bindgen(setter)] + pub fn set_runtime(&mut self, r: Option<$WasmPolyComm>) { + self.runtime = r + } + } + + + impl From<&LookupCommitments<$G>> for WasmLookupCommitments { + fn from(x: &LookupCommitments<$G>) -> Self { + WasmLookupCommitments { + sorted: x.sorted.iter().map(Into::into).collect(), + aggreg: x.aggreg.clone().into(), + runtime: x.runtime.clone().map(Into::into) + } + } + } + + impl From> for WasmLookupCommitments { + fn from(x: LookupCommitments<$G>) -> Self { + WasmLookupCommitments { + sorted: x.sorted.into_iter().map(Into::into).collect(), + aggreg: x.aggreg.into(), + runtime: x.runtime.map(Into::into) + } + } + } + + impl From<&WasmLookupCommitments> for LookupCommitments<$G> { + fn from(x: &WasmLookupCommitments) -> Self { + LookupCommitments { + sorted: x.sorted.iter().map(Into::into).collect(), + aggreg: x.aggreg.clone().into(), + runtime: x.runtime.clone().map(Into::into) + } + } + } + + impl From for LookupCommitments<$G> { + fn from(x: WasmLookupCommitments) -> Self { + LookupCommitments { + sorted: x.sorted.into_iter().map(Into::into).collect(), + aggreg: x.aggreg.into(), + runtime: x.runtime.map(Into::into) + } + } + } + + #[wasm_bindgen] + #[derive(Clone)] + pub struct [] + { + #[wasm_bindgen(skip)] + pub w_comm: WasmVector<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub z_comm: $WasmPolyComm, + #[wasm_bindgen(skip)] + pub t_comm: $WasmPolyComm, + #[wasm_bindgen(skip)] + pub lookup: Option, + } + type WasmProverCommitments = []; + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new( + w_comm: WasmVector<$WasmPolyComm>, + z_comm: $WasmPolyComm, + t_comm: $WasmPolyComm, + lookup: Option + ) -> Self { + WasmProverCommitments { w_comm, z_comm, t_comm, lookup } + } + + #[wasm_bindgen(getter)] + pub fn w_comm(&self) -> WasmVector<$WasmPolyComm> { + self.w_comm.clone() + } + #[wasm_bindgen(getter)] + pub fn z_comm(&self) -> $WasmPolyComm { + self.z_comm.clone() + } + #[wasm_bindgen(getter)] + pub fn t_comm(&self) -> $WasmPolyComm { + self.t_comm.clone() + } + + #[wasm_bindgen(getter)] + pub fn lookup(&self) -> Option { + self.lookup.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_w_comm(&mut self, x: WasmVector<$WasmPolyComm>) { + self.w_comm = x + } + #[wasm_bindgen(setter)] + pub fn set_z_comm(&mut self, x: $WasmPolyComm) { + self.z_comm = x + } + #[wasm_bindgen(setter)] + pub fn set_t_comm(&mut self, x: $WasmPolyComm) { + self.t_comm = x + } + + #[wasm_bindgen(setter)] + pub fn set_lookup(&mut self, l: Option) { + self.lookup = l + } + } + + impl From<&ProverCommitments<$G>> for WasmProverCommitments { + fn from(x: &ProverCommitments<$G>) -> Self { + WasmProverCommitments { + w_comm: x.w_comm.iter().map(Into::into).collect(), + z_comm: x.z_comm.clone().into(), + t_comm: x.t_comm.clone().into(), + lookup: x.lookup.clone().map(Into::into) + } + } + } + + impl From> for WasmProverCommitments { + fn from(x: ProverCommitments<$G>) -> Self { + WasmProverCommitments { + w_comm: x.w_comm.iter().map(Into::into).collect(), + z_comm: x.z_comm.into(), + t_comm: x.t_comm.into(), + lookup: x.lookup.map(Into::into), + } + } + } + + impl From<&WasmProverCommitments> for ProverCommitments<$G> { + fn from(x: &WasmProverCommitments) -> Self { + ProverCommitments { + w_comm: array_init(|i| x.w_comm[i].clone().into()), + z_comm: x.z_comm.clone().into(), + t_comm: x.t_comm.clone().into(), + lookup: x.lookup.clone().map(Into::into), + } + } + } + + impl From for ProverCommitments<$G> { + fn from(x: WasmProverCommitments) -> Self { + ProverCommitments { + w_comm: array_init(|i| (&x.w_comm[i]).into()), + z_comm: x.z_comm.into(), + t_comm: x.t_comm.into(), + lookup: x.lookup.map(Into::into), + } + } + } + + #[wasm_bindgen] + #[derive(Clone, Debug)] + pub struct [] { + #[wasm_bindgen(skip)] + pub lr_0: WasmVector<$WasmG>, // vector of rounds of L commitments + #[wasm_bindgen(skip)] + pub lr_1: WasmVector<$WasmG>, // vector of rounds of R commitments + #[wasm_bindgen(skip)] + pub delta: $WasmG, + pub z1: $WasmF, + pub z2: $WasmF, + #[wasm_bindgen(skip)] + pub sg: $WasmG, + } + type WasmOpeningProof = []; + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new( + lr_0: WasmVector<$WasmG>, + lr_1: WasmVector<$WasmG>, + delta: $WasmG, + z1: $WasmF, + z2: $WasmF, + sg: $WasmG) -> Self { + WasmOpeningProof { lr_0, lr_1, delta, z1, z2, sg } + } + + #[wasm_bindgen(getter)] + pub fn lr_0(&self) -> WasmVector<$WasmG> { + self.lr_0.clone() + } + #[wasm_bindgen(getter)] + pub fn lr_1(&self) -> WasmVector<$WasmG> { + self.lr_1.clone() + } + #[wasm_bindgen(getter)] + pub fn delta(&self) -> $WasmG { + self.delta.clone() + } + #[wasm_bindgen(getter)] + pub fn sg(&self) -> $WasmG { + self.sg.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_lr_0(&mut self, lr_0: WasmVector<$WasmG>) { + self.lr_0 = lr_0 + } + #[wasm_bindgen(setter)] + pub fn set_lr_1(&mut self, lr_1: WasmVector<$WasmG>) { + self.lr_1 = lr_1 + } + #[wasm_bindgen(setter)] + pub fn set_delta(&mut self, delta: $WasmG) { + self.delta = delta + } + #[wasm_bindgen(setter)] + pub fn set_sg(&mut self, sg: $WasmG) { + self.sg = sg + } + } + + impl From<&WasmOpeningProof> for OpeningProof<$G> { + fn from(x: &WasmOpeningProof) -> Self { + OpeningProof { + lr: x.lr_0.clone().into_iter().zip(x.lr_1.clone().into_iter()).map(|(x, y)| (x.into(), y.into())).collect(), + delta: x.delta.clone().into(), + z1: x.z1.into(), + z2: x.z2.into(), + sg: x.sg.clone().into(), + } + } + } + + impl From for OpeningProof<$G> { + fn from(x: WasmOpeningProof) -> Self { + let WasmOpeningProof {lr_0, lr_1, delta, z1, z2, sg} = x; + OpeningProof { + lr: lr_0.into_iter().zip(lr_1.into_iter()).map(|(x, y)| (x.into(), y.into())).collect(), + delta: delta.into(), + z1: z1.into(), + z2: z2.into(), + sg: sg.into(), + } + } + } + + impl From<&OpeningProof<$G>> for WasmOpeningProof { + fn from(x: &OpeningProof<$G>) -> Self { + let (lr_0, lr_1) = x.lr.clone().into_iter().map(|(x, y)| (x.into(), y.into())).unzip(); + WasmOpeningProof { + lr_0, + lr_1, + delta: x.delta.clone().into(), + z1: x.z1.into(), + z2: x.z2.into(), + sg: x.sg.clone().into(), + } + } + } + + impl From> for WasmOpeningProof { + fn from(x: OpeningProof<$G>) -> Self { + let (lr_0, lr_1) = x.lr.clone().into_iter().map(|(x, y)| (x.into(), y.into())).unzip(); + WasmOpeningProof { + lr_0, + lr_1, + delta: x.delta.clone().into(), + z1: x.z1.into(), + z2: x.z2.into(), + sg: x.sg.clone().into(), + } + } + } + + #[wasm_bindgen] + pub struct [] { + #[wasm_bindgen(skip)] + pub commitments: WasmProverCommitments, + #[wasm_bindgen(skip)] + pub proof: WasmOpeningProof, + // OCaml doesn't have sized arrays, so we have to convert to a tuple.. + #[wasm_bindgen(skip)] + pub evals: WasmProofEvaluations, + pub ft_eval1: $WasmF, + #[wasm_bindgen(skip)] + pub public: WasmFlatVector<$WasmF>, + #[wasm_bindgen(skip)] + pub prev_challenges_scalars: Vec>, + #[wasm_bindgen(skip)] + pub prev_challenges_comms: WasmVector<$WasmPolyComm>, + } + type WasmProverProof = []; + + impl From<(&ProverProof<$G, OpeningProof<$G>>, &Vec<$F>)> for WasmProverProof { + fn from((x, public): (&ProverProof<$G, OpeningProof<$G>>, &Vec<$F>)) -> Self { + let (scalars, comms) = + x.prev_challenges + .iter() + .map(|RecursionChallenge { chals, comm }| { + (chals.clone().into(), comm.into()) + }) + .unzip(); + WasmProverProof { + commitments: x.commitments.clone().into(), + proof: x.proof.clone().into(), + evals: x.evals.clone().into(), + ft_eval1: x.ft_eval1.clone().into(), + public: public.clone().into_iter().map(Into::into).collect(), + prev_challenges_scalars: scalars, + prev_challenges_comms: comms, + } + } + } + + impl From<(ProverProof<$G, OpeningProof<$G>>, Vec<$F>)> for WasmProverProof { + fn from((x, public): (ProverProof<$G, OpeningProof<$G>>, Vec<$F>)) -> Self { + let ProverProof {ft_eval1, commitments, proof, evals , prev_challenges} = x; + let (scalars, comms) = + prev_challenges + .into_iter() + .map(|RecursionChallenge { chals, comm }| (chals.into(), comm.into())) + .unzip(); + WasmProverProof { + commitments: commitments.into(), + proof: proof.into(), + evals: evals.into(), + ft_eval1: ft_eval1.clone().into(), + public: public.into_iter().map(Into::into).collect(), + prev_challenges_scalars: scalars, + prev_challenges_comms: comms, + } + } + } + + impl From<&WasmProverProof> for (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) { + fn from(x: &WasmProverProof) -> Self { + let proof = ProverProof { + commitments: x.commitments.clone().into(), + proof: x.proof.clone().into(), + evals: x.evals.clone().into(), + prev_challenges: + (&x.prev_challenges_scalars) + .into_iter() + .zip((&x.prev_challenges_comms).into_iter()) + .map(|(chals, comm)| { + RecursionChallenge { + chals: chals.clone(), + comm: comm.into(), + } + }) + .collect(), + ft_eval1: x.ft_eval1.clone().into() + }; + let public = x.public.clone().into_iter().map(Into::into).collect(); + (proof, public) + } + } + + impl From for (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) { + fn from(x: WasmProverProof) -> Self { + let proof =ProverProof { + commitments: x.commitments.into(), + proof: x.proof.into(), + evals: x.evals.into(), + prev_challenges: + (x.prev_challenges_scalars) + .into_iter() + .zip((x.prev_challenges_comms).into_iter()) + .map(|(chals, comm)| { + RecursionChallenge { + chals: chals.into(), + comm: comm.into(), + } + }) + .collect(), + ft_eval1: x.ft_eval1.into() + }; + let public = x.public.into_iter().map(Into::into).collect(); + (proof, public) + } + } + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new( + commitments: WasmProverCommitments, + proof: WasmOpeningProof, + evals: WasmProofEvaluations, + ft_eval1: $WasmF, + public_: WasmFlatVector<$WasmF>, + prev_challenges_scalars: WasmVecVecF, + prev_challenges_comms: WasmVector<$WasmPolyComm>) -> Self { + WasmProverProof { + commitments, + proof, + evals, + ft_eval1, + public: public_, + prev_challenges_scalars: prev_challenges_scalars.0, + prev_challenges_comms, + } + } + + #[wasm_bindgen(getter)] + pub fn commitments(&self) -> WasmProverCommitments { + self.commitments.clone() + } + #[wasm_bindgen(getter)] + pub fn proof(&self) -> WasmOpeningProof { + self.proof.clone() + } + #[wasm_bindgen(getter)] + pub fn evals(&self) -> WasmProofEvaluations { + self.evals.clone() + } + #[wasm_bindgen(getter)] + pub fn public_(&self) -> WasmFlatVector<$WasmF> { + self.public.clone() + } + #[wasm_bindgen(getter)] + pub fn prev_challenges_scalars(&self) -> WasmVecVecF { + [](self.prev_challenges_scalars.clone()) + } + #[wasm_bindgen(getter)] + pub fn prev_challenges_comms(&self) -> WasmVector<$WasmPolyComm> { + self.prev_challenges_comms.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_commitments(&mut self, commitments: WasmProverCommitments) { + self.commitments = commitments + } + #[wasm_bindgen(setter)] + pub fn set_proof(&mut self, proof: WasmOpeningProof) { + self.proof = proof + } + #[wasm_bindgen(setter)] + pub fn set_evals(&mut self, evals: WasmProofEvaluations) { + self.evals = evals + } + #[wasm_bindgen(setter)] + pub fn set_public_(&mut self, public_: WasmFlatVector<$WasmF>) { + self.public = public_ + } + #[wasm_bindgen(setter)] + pub fn set_prev_challenges_scalars(&mut self, prev_challenges_scalars: WasmVecVecF) { + self.prev_challenges_scalars = prev_challenges_scalars.0 + } + #[wasm_bindgen(setter)] + pub fn set_prev_challenges_comms(&mut self, prev_challenges_comms: WasmVector<$WasmPolyComm>) { + self.prev_challenges_comms = prev_challenges_comms + } + + #[wasm_bindgen] + pub fn serialize(&self) -> String { + let (proof, _public_input) = self.into(); + let serialized = rmp_serde::to_vec(&proof).unwrap(); + base64::encode(serialized) + } + } + + #[wasm_bindgen] + pub struct [] { + id: i32, + data: WasmFlatVector<$WasmF> + } + type WasmRuntimeTable = []; + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new(id: i32, data: WasmFlatVector<$WasmF>) -> WasmRuntimeTable { + WasmRuntimeTable {id, data} + } + } + + impl From<[]> for RuntimeTable<$F> { + fn from(wasm_rt: WasmRuntimeTable) -> RuntimeTable<$F> { + RuntimeTable { + id: wasm_rt.id.into(), + data: wasm_rt.data.into_iter().map(Into::into).collect() + } + } + } + + #[wasm_bindgen] + pub fn [<$name:snake _create>]( + index: &$WasmIndex, + witness: WasmVecVecF, + wasm_runtime_tables: WasmVector, + prev_challenges: WasmFlatVector<$WasmF>, + prev_sgs: WasmVector<$WasmG>, + ) -> Result { + console_error_panic_hook::set_once(); + let (maybe_proof, public_input) = crate::rayon::run_in_pool(|| { + { + let ptr: &mut poly_commitment::srs::SRS<$G> = + unsafe { &mut *(std::sync::Arc::as_ptr(&index.0.as_ref().srs) as *mut _) }; + ptr.add_lagrange_basis(index.0.as_ref().cs.domain.d1); + } + let prev: Vec> = { + if prev_challenges.is_empty() { + Vec::new() + } else { + let challenges_per_sg = prev_challenges.len() / prev_sgs.len(); + prev_sgs + .into_iter() + .map(Into::<$G>::into) + .enumerate() + .map(|(i, sg)| { + let chals = + prev_challenges[(i * challenges_per_sg)..(i + 1) * challenges_per_sg] + .iter() + .map(|a| a.clone().into()) + .collect(); + let comm = PolyComm::<$G> { + unshifted: vec![sg], + shifted: None, + }; + RecursionChallenge { chals, comm } + }) + .collect() + } + }; + + let rust_runtime_tables: Vec> = wasm_runtime_tables.into_iter().map(Into::into).collect(); + + let witness: [Vec<_>; COLUMNS] = witness.0 + .try_into() + .expect("the witness should be a column of 15 vectors"); + + let index: &ProverIndex<$G, OpeningProof<$G>> = &index.0.as_ref(); + + let public_input = witness[0][0..index.cs.public].to_vec(); + + // Release the runtime lock so that other threads can run using it while we generate the proof. + let group_map = GroupMap::<_>::setup(); + let maybe_proof = ProverProof::create_recursive::< + DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, + >(&group_map, witness, &rust_runtime_tables, index, prev, None); + (maybe_proof, public_input) + }); + + return match maybe_proof { + Ok(proof) => Ok((proof, public_input).into()), + Err(err) => Err(JsError::from(err)) + } + } + + #[wasm_bindgen] + pub fn [<$name:snake _verify>]( + index: $WasmVerifierIndex, + proof: WasmProverProof, + ) -> bool { + crate::rayon::run_in_pool(|| { + let group_map = <$G as CommitmentCurve>::Map::setup(); + let verifier_index = &index.into(); + let (proof, public_input) = &proof.into(); + batch_verify::< + $G, + DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, + OpeningProof<$G> + >( + &group_map, + &[Context { verifier_index, proof, public_input }] + ).is_ok() + }) + } + + #[wasm_bindgen] + pub struct [](Vec>>); + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn create(n: i32) -> Self { + [](Vec::with_capacity(n as usize)) + } + + #[wasm_bindgen] + pub fn push(&mut self, x: WasmVector<$WasmPolyComm>) { + self.0.push(x.into_iter().map(Into::into).collect()) + } + } + + #[wasm_bindgen] + pub fn [<$name:snake _batch_verify>]( + indexes: WasmVector<$WasmVerifierIndex>, + proofs: WasmVector, + ) -> bool { + crate::rayon::run_in_pool(|| { + let ts: Vec<_> = indexes + .into_iter() + .zip(proofs.into_iter()) + .map(|(index, proof)| (index.into(), proof.into())) + .collect(); + let ts: Vec<_> = ts.iter().map(|(verifier_index, (proof, public_input))| Context { verifier_index, proof, public_input}).collect(); + let group_map = GroupMap::<_>::setup(); + + batch_verify::< + $G, + DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, + OpeningProof<$G> + >(&group_map, &ts) + .is_ok() + }) + } + + #[wasm_bindgen] + pub fn [<$name:snake _dummy>]() -> WasmProverProof { + fn comm() -> PolyComm<$G> { + let g = $G::prime_subgroup_generator(); + PolyComm { + shifted: Some(g), + unshifted: vec![g, g, g], + } + } + + let prev = RecursionChallenge { + chals: vec![$F::one(), $F::one()], + comm: comm(), + }; + let prev_challenges = vec![prev.clone(), prev.clone(), prev.clone()]; + + let g = $G::prime_subgroup_generator(); + let proof = OpeningProof { + lr: vec![(g, g), (g, g), (g, g)], + z1: $F::one(), + z2: $F::one(), + delta: g, + sg: g, + }; + let eval = || PointEvaluations { + zeta: vec![$F::one()], + zeta_omega: vec![$F::one()], + }; + let evals = ProofEvaluations { + w: array_init(|_| eval()), + coefficients: array_init(|_| eval()), + z: eval(), + s: array_init(|_| eval()), + generic_selector: eval(), + poseidon_selector: eval(), + complete_add_selector: eval(), + mul_selector: eval(), + emul_selector: eval(), + endomul_scalar_selector: eval(), + range_check0_selector: None, + range_check1_selector: None, + foreign_field_add_selector: None, + foreign_field_mul_selector: None, + xor_selector: None, + rot_selector: None, + lookup_aggregation: None, + lookup_table: None, + lookup_sorted: array::from_fn(|_| None), + runtime_lookup_table: None, + runtime_lookup_table_selector: None, + xor_lookup_selector: None, + lookup_gate_lookup_selector: None, + range_check_lookup_selector: None, + foreign_field_mul_lookup_selector: None, + public: None, + }; + + let dlogproof = ProverProof { + commitments: ProverCommitments { + w_comm: array_init(|_| comm()), + z_comm: comm(), + t_comm: comm(), + lookup: None, + }, + proof, + evals, + ft_eval1: $F::one(), + prev_challenges, + }; + + let public = vec![$F::one(), $F::one()]; + (dlogproof, public).into() + } + + #[wasm_bindgen] + pub fn [<$name:snake _deep_copy>]( + x: WasmProverProof + ) -> WasmProverProof { + x + } + } + } +} + +pub mod fp { + use super::*; + use crate::arkworks::{WasmGVesta, WasmPastaFp}; + use crate::pasta_fp_plonk_index::WasmPastaFpPlonkIndex; + use crate::plonk_verifier_index::fp::WasmFpPlonkVerifierIndex as WasmPlonkVerifierIndex; + use crate::poly_comm::vesta::WasmFpPolyComm as WasmPolyComm; + use mina_curves::pasta::{Fp, Vesta as GAffine}; + + impl_proof!( + caml_pasta_fp_plonk_proof, + WasmGVesta, + GAffine, + WasmPastaFp, + Fp, + WasmPolyComm, + WasmSrs, + GAffineOther, + mina_poseidon::pasta::fp_kimchi, + mina_poseidon::pasta::fq_kimchi, + WasmPastaFpPlonkIndex, + WasmPlonkVerifierIndex, + Fp + ); +} + +pub mod fq { + use super::*; + use crate::arkworks::{WasmGPallas, WasmPastaFq}; + use crate::pasta_fq_plonk_index::WasmPastaFqPlonkIndex; + use crate::plonk_verifier_index::fq::WasmFqPlonkVerifierIndex as WasmPlonkVerifierIndex; + use crate::poly_comm::pallas::WasmFqPolyComm as WasmPolyComm; + use mina_curves::pasta::{Fq, Pallas as GAffine}; + + impl_proof!( + caml_pasta_fq_plonk_proof, + WasmGPallas, + GAffine, + WasmPastaFq, + Fq, + WasmPolyComm, + WasmSrs, + GAffineOther, + mina_poseidon::pasta::fq_kimchi, + mina_poseidon::pasta::fp_kimchi, + WasmPastaFqPlonkIndex, + WasmPlonkVerifierIndex, + Fq + ); +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/plonk_verifier_index.rs b/src/lib/crypto/kimchi_bindings/wasm/src/plonk_verifier_index.rs new file mode 100644 index 00000000000..7bdf8d2a401 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/plonk_verifier_index.rs @@ -0,0 +1,1059 @@ +use crate::wasm_vector::WasmVector; +use ark_ec::AffineCurve; +use ark_ff::One; +use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain}; +use array_init::array_init; +use kimchi::circuits::{ + constraints::FeatureFlags, + lookup::index::LookupSelectors, + lookup::lookups::{LookupFeatures, LookupInfo, LookupPatterns}, + polynomials::permutation::{permutation_vanishing_polynomial, zk_w, Shifts}, + wires::{COLUMNS, PERMUTS}, +}; +use kimchi::linearization::expr_linearization; +use kimchi::poly_commitment::evaluation_proof::OpeningProof; +use kimchi::verifier_index::{LookupVerifierIndex, VerifierIndex as DlogVerifierIndex}; +use paste::paste; +use poly_commitment::commitment::PolyComm; +use poly_commitment::srs::SRS; +use std::path::Path; +use std::sync::Arc; +use wasm_bindgen::prelude::*; + +macro_rules! impl_verification_key { + ( + $name: ident, + $WasmG: ty, + $G: ty, + $WasmF: ty, + $F: ty, + $WasmPolyComm: ty, + $WasmSrs: ty, + $GOther: ty, + $FrSpongeParams: path, + $FqSpongeParams: path, + $WasmIndex: ty, + $field_name: ident + ) => { + paste! { + #[wasm_bindgen] + #[derive(Clone, Copy)] + pub struct [] { + pub log_size_of_group: i32, + pub group_gen: $WasmF, + } + type WasmDomain = []; + + #[wasm_bindgen] + impl []{ + #[wasm_bindgen(constructor)] + pub fn new(log_size_of_group: i32, group_gen: $WasmF) -> Self { + WasmDomain {log_size_of_group, group_gen} + } + } + + #[wasm_bindgen] + #[derive(Clone)] + pub struct [] { + #[wasm_bindgen(skip)] + pub sigma_comm: WasmVector<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub coefficients_comm: WasmVector<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub generic_comm: $WasmPolyComm, + #[wasm_bindgen(skip)] + pub psm_comm: $WasmPolyComm, + #[wasm_bindgen(skip)] + pub complete_add_comm: $WasmPolyComm, + #[wasm_bindgen(skip)] + pub mul_comm: $WasmPolyComm, + #[wasm_bindgen(skip)] + pub emul_comm: $WasmPolyComm, + #[wasm_bindgen(skip)] + pub endomul_scalar_comm: $WasmPolyComm, + #[wasm_bindgen(skip)] + pub xor_comm: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub range_check0_comm: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub range_check1_comm: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub foreign_field_add_comm: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub foreign_field_mul_comm: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub rot_comm: Option<$WasmPolyComm> + } + + type WasmPlonkVerificationEvals = []; + + + #[wasm_bindgen] + impl [] { + #[allow(clippy::too_many_arguments)] + #[wasm_bindgen(constructor)] + pub fn new( + sigma_comm: WasmVector<$WasmPolyComm>, + coefficients_comm: WasmVector<$WasmPolyComm>, + generic_comm: &$WasmPolyComm, + psm_comm: &$WasmPolyComm, + complete_add_comm: &$WasmPolyComm, + mul_comm: &$WasmPolyComm, + emul_comm: &$WasmPolyComm, + endomul_scalar_comm: &$WasmPolyComm, + xor_comm: Option<$WasmPolyComm>, + range_check0_comm: Option<$WasmPolyComm>, + range_check1_comm: Option<$WasmPolyComm>, + foreign_field_add_comm: Option<$WasmPolyComm>, + foreign_field_mul_comm: Option<$WasmPolyComm>, + rot_comm: Option<$WasmPolyComm>, + ) -> Self { + WasmPlonkVerificationEvals { + sigma_comm: sigma_comm.clone(), + coefficients_comm: coefficients_comm.clone(), + generic_comm: generic_comm.clone(), + psm_comm: psm_comm.clone(), + complete_add_comm: complete_add_comm.clone(), + mul_comm: mul_comm.clone(), + emul_comm: emul_comm.clone(), + endomul_scalar_comm: endomul_scalar_comm.clone(), + xor_comm: xor_comm.clone(), + range_check0_comm: range_check0_comm.clone(), + range_check1_comm: range_check1_comm.clone(), + foreign_field_mul_comm: foreign_field_mul_comm.clone(), + foreign_field_add_comm: foreign_field_add_comm.clone(), + rot_comm: rot_comm.clone(), + } + } + + #[wasm_bindgen(getter)] + pub fn sigma_comm(&self) -> WasmVector<$WasmPolyComm> { + self.sigma_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_sigma_comm(&mut self, x: WasmVector<$WasmPolyComm>) { + self.sigma_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn coefficients_comm(&self) -> WasmVector<$WasmPolyComm> { + self.coefficients_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_coefficients_comm(&mut self, x: WasmVector<$WasmPolyComm>) { + self.coefficients_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn generic_comm(&self) -> $WasmPolyComm { + self.generic_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_generic_comm(&mut self, x: $WasmPolyComm) { + self.generic_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn psm_comm(&self) -> $WasmPolyComm { + self.psm_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_psm_comm(&mut self, x: $WasmPolyComm) { + self.psm_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn complete_add_comm(&self) -> $WasmPolyComm { + self.complete_add_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_complete_add_comm(&mut self, x: $WasmPolyComm) { + self.complete_add_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn mul_comm(&self) -> $WasmPolyComm { + self.mul_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_mul_comm(&mut self, x: $WasmPolyComm) { + self.mul_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn emul_comm(&self) -> $WasmPolyComm { + self.emul_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_emul_comm(&mut self, x: $WasmPolyComm) { + self.emul_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn endomul_scalar_comm(&self) -> $WasmPolyComm { + self.endomul_scalar_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_endomul_scalar_comm(&mut self, x: $WasmPolyComm) { + self.endomul_scalar_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn xor_comm(&self) -> Option<$WasmPolyComm> { + self.xor_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_xor_comm(&mut self, x: Option<$WasmPolyComm>) { + self.xor_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn rot_comm(&self) -> Option<$WasmPolyComm> { + self.rot_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_rot_comm(&mut self, x: Option<$WasmPolyComm>) { + self.rot_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn range_check0_comm(&self) -> Option<$WasmPolyComm> { + self.range_check0_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_range_check0_comm(&mut self, x: Option<$WasmPolyComm>) { + self.range_check0_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn range_check1_comm(&self) -> Option<$WasmPolyComm> { + self.range_check1_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_range_check1_comm(&mut self, x: Option<$WasmPolyComm>) { + self.range_check1_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn foreign_field_add_comm(&self) -> Option<$WasmPolyComm> { + self.foreign_field_add_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_foreign_field_add_comm(&mut self, x: Option<$WasmPolyComm>) { + self.foreign_field_add_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn foreign_field_mul_comm(&self) -> Option<$WasmPolyComm> { + self.foreign_field_mul_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_foreign_field_mul_comm(&mut self, x: Option<$WasmPolyComm>) { + self.foreign_field_mul_comm = x; + } + + } + + #[derive(Clone, Copy)] + #[wasm_bindgen] + pub struct [] { + pub s0: $WasmF, + pub s1: $WasmF, + pub s2: $WasmF, + pub s3: $WasmF, + pub s4: $WasmF, + pub s5: $WasmF, + pub s6: $WasmF, + } + type WasmShifts = []; + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new( + s0: $WasmF, + s1: $WasmF, + s2: $WasmF, + s3: $WasmF, + s4: $WasmF, + s5: $WasmF, + s6: $WasmF + ) -> Self { + Self { s0, s1, s2, s3, s4, s5, s6} + } + } + + #[wasm_bindgen] + #[derive(Clone)] + pub struct [] { + #[wasm_bindgen(skip)] + pub xor: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub lookup : Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub range_check: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub ffmul: Option<$WasmPolyComm>, + } + + type WasmLookupSelectors = []; + + impl From for LookupSelectors> { + fn from(x: WasmLookupSelectors) -> Self { + Self { + xor: x.xor.map(Into::into), + lookup: x.lookup.map(Into::into), + range_check: x.range_check.map(Into::into), + ffmul: x.ffmul.map(Into::into), + } + } + } + + impl From<&WasmLookupSelectors> for LookupSelectors> { + fn from(x: &WasmLookupSelectors) -> Self { + Self { + xor: x.xor.clone().map(Into::into), + lookup: x.lookup.clone().map(Into::into), + range_check: x.range_check.clone().map(Into::into), + ffmul: x.ffmul.clone().map(Into::into), + } + } + } + + impl From<&LookupSelectors>> for WasmLookupSelectors { + fn from(x: &LookupSelectors>) -> Self { + Self { + xor: x.xor.clone().map(Into::into), + lookup: x.lookup.clone().map(Into::into), + range_check: x.range_check.clone().map(Into::into), + ffmul: x.ffmul.clone().map(Into::into), + } + } + } + + impl From>> for WasmLookupSelectors { + fn from(x: LookupSelectors>) -> Self { + Self { + xor: x.xor.clone().map(Into::into), + lookup: x.lookup.clone().map(Into::into), + range_check: x.range_check.clone().map(Into::into), + ffmul: x.ffmul.clone().map(Into::into), + } + } + } + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new( + xor: Option<$WasmPolyComm>, + lookup: Option<$WasmPolyComm>, + range_check: Option<$WasmPolyComm>, + ffmul: Option<$WasmPolyComm> + ) -> Self { + Self { + xor, + lookup, + range_check, + ffmul + } + } + + #[wasm_bindgen(getter)] + pub fn xor(&self) -> Option<$WasmPolyComm> { + self.xor.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_xor(&mut self, x: Option<$WasmPolyComm>) { + self.xor = x + } + + #[wasm_bindgen(getter)] + pub fn lookup(&self) -> Option<$WasmPolyComm> { + self.lookup.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_lookup(&mut self, x: Option<$WasmPolyComm>) { + self.lookup = x + } + + #[wasm_bindgen(getter)] + pub fn ffmul(&self) -> Option<$WasmPolyComm> { + self.ffmul.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_ffmul(&mut self, x: Option<$WasmPolyComm>) { + self.ffmul = x + } + + #[wasm_bindgen(getter)] + pub fn range_check(&self) -> Option<$WasmPolyComm> { + self.range_check.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_range_check(&mut self, x: Option<$WasmPolyComm>) { + self.range_check = x + } + } + + #[wasm_bindgen] + #[derive(Clone)] + pub struct [] { + pub joint_lookup_used: bool, + + #[wasm_bindgen(skip)] + pub lookup_table: WasmVector<$WasmPolyComm>, + + #[wasm_bindgen(skip)] + pub lookup_selectors: WasmLookupSelectors, + + #[wasm_bindgen(skip)] + pub table_ids: Option<$WasmPolyComm>, + + #[wasm_bindgen(skip)] + pub lookup_info: LookupInfo, + + #[wasm_bindgen(skip)] + pub runtime_tables_selector: Option<$WasmPolyComm>, + } + + type WasmLookupVerifierIndex = []; + + impl From<&LookupVerifierIndex<$G>> for WasmLookupVerifierIndex { + fn from(x: &LookupVerifierIndex<$G>) -> Self { + Self { + joint_lookup_used: x.joint_lookup_used.into(), + lookup_table: x.lookup_table.clone().iter().map(Into::into).collect(), + lookup_selectors: x.lookup_selectors.clone().into(), + table_ids: x.table_ids.clone().map(Into::into), + lookup_info: x.lookup_info.clone(), + runtime_tables_selector: x.runtime_tables_selector.clone().map(Into::into) + } + } + } + + impl From> for WasmLookupVerifierIndex { + fn from(x: LookupVerifierIndex<$G>) -> Self { + Self { + joint_lookup_used: x.joint_lookup_used.into(), + lookup_table: x.lookup_table.iter().map(Into::into).collect(), + lookup_selectors: x.lookup_selectors.into(), + table_ids: x.table_ids.map(Into::into), + lookup_info: x.lookup_info, + runtime_tables_selector: x.runtime_tables_selector.map(Into::into) + } + } + } + + + impl From<&WasmLookupVerifierIndex> for LookupVerifierIndex<$G> { + fn from(x: &WasmLookupVerifierIndex) -> Self { + Self { + joint_lookup_used: x.joint_lookup_used.into(), + lookup_table: x.lookup_table.clone().iter().map(Into::into).collect(), + lookup_selectors: x.lookup_selectors.clone().into(), + table_ids: x.table_ids.clone().map(Into::into), + lookup_info: x.lookup_info, + runtime_tables_selector: x.runtime_tables_selector.clone().map(Into::into) + } + } + } + + impl From for LookupVerifierIndex<$G> { + fn from(x: WasmLookupVerifierIndex) -> Self { + Self { + joint_lookup_used: x.joint_lookup_used.into(), + lookup_table: x.lookup_table.iter().map(Into::into).collect(), + lookup_selectors: x.lookup_selectors.into(), + table_ids: x.table_ids.map(Into::into), + lookup_info: x.lookup_info, + runtime_tables_selector: x.runtime_tables_selector.map(Into::into) + } + } + } + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new( + joint_lookup_used: bool, + lookup_table: WasmVector<$WasmPolyComm>, + lookup_selectors: WasmLookupSelectors, + table_ids: Option<$WasmPolyComm>, + lookup_info: &LookupInfo, + runtime_tables_selector: Option<$WasmPolyComm> + ) -> WasmLookupVerifierIndex { + WasmLookupVerifierIndex { + joint_lookup_used, + lookup_table, + lookup_selectors, + table_ids, + lookup_info: lookup_info.clone(), + runtime_tables_selector + } + } + + #[wasm_bindgen(getter)] + pub fn lookup_table(&self) -> WasmVector<$WasmPolyComm> { + self.lookup_table.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_lookup_table(&mut self, x: WasmVector<$WasmPolyComm>) { + self.lookup_table = x + } + + #[wasm_bindgen(getter)] + pub fn lookup_selectors(&self) -> WasmLookupSelectors { + self.lookup_selectors.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_lookup_selectors(&mut self, x: WasmLookupSelectors) { + self.lookup_selectors = x + } + + #[wasm_bindgen(getter)] + pub fn table_ids(&self) -> Option<$WasmPolyComm>{ + self.table_ids.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_table_ids(&mut self, x: Option<$WasmPolyComm>) { + self.table_ids = x + } + + #[wasm_bindgen(getter)] + pub fn lookup_info(&self) -> LookupInfo { + self.lookup_info.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_lookup_info(&mut self, x: LookupInfo) { + self.lookup_info = x + } + + #[wasm_bindgen(getter)] + pub fn runtime_tables_selector(&self) -> Option<$WasmPolyComm> { + self.runtime_tables_selector.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_runtime_tables_selector(&mut self, x: Option<$WasmPolyComm>) { + self.runtime_tables_selector = x + } + } + + #[wasm_bindgen] + #[derive(Clone)] + pub struct [] { + pub domain: WasmDomain, + pub max_poly_size: i32, + pub public_: i32, + pub prev_challenges: i32, + #[wasm_bindgen(skip)] + pub srs: $WasmSrs, + #[wasm_bindgen(skip)] + pub evals: WasmPlonkVerificationEvals, + pub shifts: WasmShifts, + #[wasm_bindgen(skip)] + pub lookup_index: Option, + } + type WasmPlonkVerifierIndex = []; + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new( + domain: &WasmDomain, + max_poly_size: i32, + public_: i32, + prev_challenges: i32, + srs: &$WasmSrs, + evals: &WasmPlonkVerificationEvals, + shifts: &WasmShifts, + lookup_index: Option, + ) -> Self { + WasmPlonkVerifierIndex { + domain: domain.clone(), + max_poly_size, + public_, + prev_challenges, + srs: srs.clone(), + evals: evals.clone(), + shifts: shifts.clone(), + lookup_index: lookup_index.clone(), + } + } + + #[wasm_bindgen(getter)] + pub fn srs(&self) -> $WasmSrs { + self.srs.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_srs(&mut self, x: $WasmSrs) { + self.srs = x + } + + #[wasm_bindgen(getter)] + pub fn evals(&self) -> WasmPlonkVerificationEvals { + self.evals.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_evals(&mut self, x: WasmPlonkVerificationEvals) { + self.evals = x + } + + #[wasm_bindgen(getter)] + pub fn lookup_index(&self) -> Option { + self.lookup_index.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_lookup_index(&mut self, li: Option) { + self.lookup_index = li + } + } + + pub fn to_wasm<'a>( + srs: &Arc>, + vi: DlogVerifierIndex<$G, OpeningProof<$G>>, + ) -> WasmPlonkVerifierIndex { + WasmPlonkVerifierIndex { + domain: WasmDomain { + log_size_of_group: vi.domain.log_size_of_group as i32, + group_gen: vi.domain.group_gen.into(), + }, + max_poly_size: vi.max_poly_size as i32, + public_: vi.public as i32, + prev_challenges: vi.prev_challenges as i32, + srs: srs.into(), + evals: WasmPlonkVerificationEvals { + sigma_comm: IntoIterator::into_iter(vi.sigma_comm).map(From::from).collect(), + coefficients_comm: IntoIterator::into_iter(vi.coefficients_comm).map(From::from).collect(), + generic_comm: vi.generic_comm.into(), + psm_comm: vi.psm_comm.into(), + complete_add_comm: vi.complete_add_comm.into(), + mul_comm: vi.mul_comm.into(), + emul_comm: vi.emul_comm.into(), + endomul_scalar_comm: vi.endomul_scalar_comm.into(), + xor_comm: vi.xor_comm.map(|v| v.into()), + range_check0_comm: vi.range_check0_comm.map(|v| v.into()), + range_check1_comm: vi.range_check1_comm.map(|v| v.into()), + foreign_field_add_comm: vi.foreign_field_add_comm.map(|v| v.into()), + foreign_field_mul_comm: vi.foreign_field_mul_comm.map(|v| v.into()), + rot_comm: vi.rot_comm.map(|v| v.into()) + }, + shifts: + WasmShifts { + s0: vi.shift[0].into(), + s1: vi.shift[1].into(), + s2: vi.shift[2].into(), + s3: vi.shift[3].into(), + s4: vi.shift[4].into(), + s5: vi.shift[5].into(), + s6: vi.shift[6].into(), + }, + lookup_index: vi.lookup_index.map(Into::into), + } + } + + /* pub fn to_wasm_copy<'a>( + srs: &Arc>, + vi: &DlogVerifierIndex, + ) -> WasmPlonkVerifierIndex { + WasmPlonkVerifierIndex { + domain: WasmDomain { + log_size_of_group: vi.domain.log_size_of_group as i32, + group_gen: vi.domain.group_gen.clone().into(), + }, + max_poly_size: vi.max_poly_size as i32, + srs: srs.clone().into(), + evals: WasmPlonkVerificationEvals { + sigma_comm: vi.sigma_comm.iter().map(From::from).collect(), + coefficients_comm: vi.coefficients_comm.iter().map(From::from).collect(), + generic_comm: vi.generic_comm.clone().into(), + psm_comm: vi.psm_comm.clone().into(), + complete_add_comm: vi.complete_add_comm.clone().into(), + mul_comm: vi.mul_comm.clone().into(), + emul_comm: vi.emul_comm.clone().into(), + endomul_scalar_comm: vi.endomul_scalar_comm.clone().into(), + }, + shifts: + WasmShifts { + s0: vi.shift[0].clone().into(), + s1: vi.shift[1].clone().into(), + s2: vi.shift[2].clone().into(), + s3: vi.shift[3].clone().into(), + s4: vi.shift[4].clone().into(), + s5: vi.shift[5].clone().into(), + s6: vi.shift[6].clone().into(), + }, + linearization: [](Box::new(vi.linearization.clone())), + } + } */ + + fn compute_feature_flags(index: &WasmPlonkVerifierIndex) -> FeatureFlags { + let xor = index.evals.xor_comm.is_some(); + let range_check0 = index.evals.range_check0_comm.is_some(); + let range_check1 = index.evals.range_check1_comm.is_some(); + let foreign_field_add = index.evals.foreign_field_add_comm.is_some(); + let foreign_field_mul = index.evals.foreign_field_mul_comm.is_some(); + let rot = index.evals.rot_comm.is_some(); + + let lookup = index + .lookup_index.as_ref() + .map_or(false, |li| li.lookup_info.features.patterns.lookup); + + // TODO + let runtime_tables = false; + + let patterns = LookupPatterns { + xor, + lookup, + range_check: range_check0 || range_check1 || rot, + foreign_field_mul: foreign_field_mul, + }; + + FeatureFlags { + range_check0, + range_check1, + foreign_field_add, + foreign_field_mul, + xor, + rot, + lookup_features: LookupFeatures { + patterns, + joint_lookup_used: patterns.joint_lookups_used(), + uses_runtime_tables: runtime_tables, + }, + } + } + + pub fn of_wasm( + index: WasmPlonkVerifierIndex, + ) -> (DlogVerifierIndex>, Arc>) { + let max_poly_size = index.max_poly_size; + let public_ = index.public_; + let prev_challenges = index.prev_challenges; + let log_size_of_group = index.domain.log_size_of_group; + let srs = &index.srs; + let evals = &index.evals; + let shifts = &index.shifts; + + /* + let urs_copy = Rc::clone(&*urs); + let urs_copy_outer = Rc::clone(&*urs); + let srs = { + // We know that the underlying value is still alive, because we never convert any of our + // Rc<_>s into weak pointers. + SRSValue::Ref(unsafe { &*Rc::into_raw(urs_copy) }) + }; */ + let (endo_q, _endo_r) = poly_commitment::srs::endos::<$GOther>(); + let domain = Domain::<$F>::new(1 << log_size_of_group).unwrap(); + + let feature_flags = compute_feature_flags(&index); + let (linearization, powers_of_alpha) = expr_linearization(Some(&feature_flags), true, 3); + + let index = + DlogVerifierIndex { + domain, + + sigma_comm: array_init(|i| (&evals.sigma_comm[i]).into()), + generic_comm: (&evals.generic_comm).into(), + coefficients_comm: array_init(|i| (&evals.coefficients_comm[i]).into()), + + psm_comm: (&evals.psm_comm).into(), + + complete_add_comm: (&evals.complete_add_comm).into(), + mul_comm: (&evals.mul_comm).into(), + emul_comm: (&evals.emul_comm).into(), + + endomul_scalar_comm: (&evals.endomul_scalar_comm).into(), + xor_comm: (&evals.xor_comm).as_ref().map(Into::into), + range_check0_comm: (&evals.range_check0_comm).as_ref().map(Into::into), + range_check1_comm: (&evals.range_check1_comm).as_ref().map(Into::into), + foreign_field_add_comm: (&evals.foreign_field_add_comm).as_ref().map(Into::into), + foreign_field_mul_comm: (&evals.foreign_field_mul_comm).as_ref().map(Into::into), + rot_comm: (&evals.rot_comm).as_ref().map(Into::into), + + w: { + let res = once_cell::sync::OnceCell::new(); + res.set(zk_w(domain, 3)).unwrap(); + res + }, + endo: endo_q, + max_poly_size: max_poly_size as usize, + public: public_ as usize, + prev_challenges: prev_challenges as usize, + permutation_vanishing_polynomial_m: { + let res = once_cell::sync::OnceCell::new(); + res.set(permutation_vanishing_polynomial(domain, 3)).unwrap(); + res + }, + shift: [ + shifts.s0.into(), + shifts.s1.into(), + shifts.s2.into(), + shifts.s3.into(), + shifts.s4.into(), + shifts.s5.into(), + shifts.s6.into() + ], + srs: { + Arc::clone(&srs.0) + }, + + zk_rows: 3, + + linearization, + powers_of_alpha, + lookup_index: index.lookup_index.map(Into::into), + }; + (index, srs.0.clone()) + } + + impl From for DlogVerifierIndex<$G, OpeningProof<$G>> { + fn from(index: WasmPlonkVerifierIndex) -> Self { + of_wasm(index).0 + } + } + + pub fn read_raw( + offset: Option, + srs: &$WasmSrs, + path: String, + ) -> Result>, JsValue> { + let path = Path::new(&path); + let (endo_q, _endo_r) = poly_commitment::srs::endos::(); + DlogVerifierIndex::<$G, OpeningProof<$G>>::from_file( + srs.0.clone(), + path, + offset.map(|x| x as u64), + endo_q, + ).map_err(|e| JsValue::from_str(format!("read_raw: {}", e).as_str())) + } + + #[wasm_bindgen] + pub fn [<$name:snake _read>]( + offset: Option, + srs: &$WasmSrs, + path: String, + ) -> Result { + let vi = read_raw(offset, srs, path)?; + Ok(to_wasm(srs, vi.into())) + } + + #[wasm_bindgen] + pub fn [<$name:snake _write>]( + append: Option, + index: WasmPlonkVerifierIndex, + path: String, + ) -> Result<(), JsValue> { + let index: DlogVerifierIndex<$G, OpeningProof<$G>> = index.into(); + let path = Path::new(&path); + index.to_file(path, append).map_err(|e| { + println!("{}", e); + JsValue::from_str("caml_pasta_fp_plonk_verifier_index_raw_read") + }) + } + + // TODO understand what serialization format we need + + // #[wasm_bindgen] + // pub fn [<$name:snake _serialize>]( + // index: WasmPlonkVerifierIndex, + // ) -> Box<[u8]> { + // let index: DlogVerifierIndex<$G> = index.into(); + // rmp_serde::to_vec(&index).unwrap().into_boxed_slice() + // } + + // #[wasm_bindgen] + // pub fn [<$name:snake _deserialize>]( + // srs: &$WasmSrs, + // index: Box<[u8]>, + // ) -> WasmPlonkVerifierIndex { + // let mut vi: DlogVerifierIndex<$G> = rmp_serde::from_slice(&index).unwrap(); + // vi.linearization = expr_linearization(vi.domain, false, false, None); + // return to_wasm(srs, vi.into()) + // } + + #[wasm_bindgen] + pub fn [<$name:snake _serialize>]( + index: WasmPlonkVerifierIndex, + ) -> String { + let index: DlogVerifierIndex<$G, OpeningProof<$G>> = index.into(); + serde_json::to_string(&index).unwrap() + } + + #[wasm_bindgen] + pub fn [<$name:snake _deserialize>]( + srs: &$WasmSrs, + index: String, + ) -> Result { + let vi: Result>, serde_json::Error> = serde_json::from_str(&index); + match vi { + Ok(vi) => Ok(to_wasm(srs, vi)), + Err(e) => Err(JsError::new(&(e.to_string()))), + } + } + + #[wasm_bindgen] + pub fn [<$name:snake _create>]( + index: &$WasmIndex, + ) -> WasmPlonkVerifierIndex { + { + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&index.0.as_ref().srs) as *mut _) }; + ptr.add_lagrange_basis(index.0.as_ref().cs.domain.d1); + } + let verifier_index = index.0.as_ref().verifier_index(); + to_wasm(&index.0.as_ref().srs, verifier_index) + } + + #[wasm_bindgen] + pub fn [<$name:snake _shifts>](log2_size: i32) -> WasmShifts { + let domain = Domain::<$F>::new(1 << log2_size).unwrap(); + let shifts = Shifts::new(&domain); + let s = shifts.shifts(); + WasmShifts { + s0: s[0].clone().into(), + s1: s[1].clone().into(), + s2: s[2].clone().into(), + s3: s[3].clone().into(), + s4: s[4].clone().into(), + s5: s[5].clone().into(), + s6: s[6].clone().into(), + } + } + + #[wasm_bindgen] + pub fn [<$name:snake _dummy>]() -> WasmPlonkVerifierIndex { + fn comm() -> $WasmPolyComm { + let g: $WasmG = $G::prime_subgroup_generator().into(); + $WasmPolyComm { + shifted: None, + unshifted: vec![g].into(), + } + } + fn vec_comm(num: usize) -> WasmVector<$WasmPolyComm> { + (0..num).map(|_| comm()).collect() + } + + WasmPlonkVerifierIndex { + domain: WasmDomain { + log_size_of_group: 1, + group_gen: $F::one().into(), + }, + max_poly_size: 0, + public_: 0, + prev_challenges: 0, + srs: $WasmSrs(Arc::new(SRS::create(0))), + evals: WasmPlonkVerificationEvals { + sigma_comm: vec_comm(PERMUTS), + coefficients_comm: vec_comm(COLUMNS), + generic_comm: comm(), + psm_comm: comm(), + complete_add_comm: comm(), + mul_comm: comm(), + emul_comm: comm(), + endomul_scalar_comm: comm(), + xor_comm: None, + range_check0_comm: None, + range_check1_comm: None, + foreign_field_add_comm: None, + foreign_field_mul_comm: None, + rot_comm: None, + }, + shifts: + WasmShifts { + s0: $F::one().into(), + s1: $F::one().into(), + s2: $F::one().into(), + s3: $F::one().into(), + s4: $F::one().into(), + s5: $F::one().into(), + s6: $F::one().into(), + }, + lookup_index: None + } + } + + #[wasm_bindgen] + pub fn [<$name:snake _deep_copy>]( + x: &WasmPlonkVerifierIndex, + ) -> WasmPlonkVerifierIndex { + x.clone() + } + + } + } +} + +pub mod fp { + use super::*; + use crate::arkworks::{WasmGVesta, WasmPastaFp}; + use crate::pasta_fp_plonk_index::WasmPastaFpPlonkIndex; + use crate::poly_comm::vesta::WasmFpPolyComm as WasmPolyComm; + use crate::srs::fp::WasmFpSrs; + use mina_curves::pasta::{Fp, Pallas as GAffineOther, Vesta as GAffine}; + + impl_verification_key!( + caml_pasta_fp_plonk_verifier_index, + WasmGVesta, + GAffine, + WasmPastaFp, + Fp, + WasmPolyComm, + WasmFpSrs, + GAffineOther, + mina_poseidon::pasta::fp_kimchi, + mina_poseidon::pasta::fq_kimchi, + WasmPastaFpPlonkIndex, + Fp + ); +} + +pub mod fq { + use super::*; + use crate::arkworks::{WasmGPallas, WasmPastaFq}; + use crate::pasta_fq_plonk_index::WasmPastaFqPlonkIndex; + use crate::poly_comm::pallas::WasmFqPolyComm as WasmPolyComm; + use crate::srs::fq::WasmFqSrs; + use mina_curves::pasta::{Fq, Pallas as GAffine, Vesta as GAffineOther}; + + impl_verification_key!( + caml_pasta_fq_plonk_verifier_index, + WasmGPallas, + GAffine, + WasmPastaFq, + Fq, + WasmPolyComm, + WasmFqSrs, + GAffineOther, + mina_poseidon::pasta::fq_kimchi, + mina_poseidon::pasta::fp_kimchi, + WasmPastaFqPlonkIndex, + Fq + ); +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/poly_comm.rs b/src/lib/crypto/kimchi_bindings/wasm/src/poly_comm.rs new file mode 100644 index 00000000000..3338c0f23f3 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/poly_comm.rs @@ -0,0 +1,105 @@ +use crate::wasm_vector::WasmVector; +use paste::paste; +macro_rules! impl_poly_comm { + ( + $WasmG: ty, + $G: ty, + $field_name: ident + /* + $CamlScalarField: ty, + $BaseField: ty, + $CamlBaseField: ty, + $Projective: ty */ + ) => { + paste! { + use wasm_bindgen::prelude::*; + use poly_commitment::commitment::PolyComm; + + #[wasm_bindgen] + #[derive(Clone)] + pub struct [] { + #[wasm_bindgen(skip)] + pub unshifted: WasmVector<$WasmG>, + pub shifted: Option<$WasmG>, + } + + type WasmPolyComm = []; + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new(unshifted: WasmVector<$WasmG>, shifted: Option<$WasmG>) -> Self { + WasmPolyComm { unshifted, shifted } + } + + #[wasm_bindgen(getter)] + pub fn unshifted(&self) -> WasmVector<$WasmG> { + self.unshifted.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_unshifted(&mut self, x: WasmVector<$WasmG>) { + self.unshifted = x + } + } + + impl From> for WasmPolyComm { + fn from(x: PolyComm<$G>) -> Self { + let PolyComm {unshifted, shifted} = x; + let unshifted: Vec<$WasmG> = + unshifted.into_iter().map(|x| x.into()).collect(); + WasmPolyComm { + unshifted: unshifted.into(), + shifted: shifted.map(|x| x.into()), + } + } + } + + impl From<&PolyComm<$G>> for WasmPolyComm { + fn from(x: &PolyComm<$G>) -> Self { + let unshifted: Vec<$WasmG> = + x.unshifted.iter().map(|x| x.into()).collect(); + WasmPolyComm { + unshifted: unshifted.into(), + shifted: x.shifted.map(|x| x.into()), + } + } + } + + impl From for PolyComm<$G> { + fn from(x: WasmPolyComm) -> Self { + let WasmPolyComm {unshifted, shifted} = x; + PolyComm { + unshifted: (*unshifted).iter().map(|x| { (*x).into() }).collect(), + shifted: shifted.map(|x| x.into()), + } + } + } + + impl From<&WasmPolyComm> for PolyComm<$G> { + fn from(x: &WasmPolyComm) -> Self { + PolyComm { + unshifted: x.unshifted.iter().map(|x| { (*x).into() }).collect(), + shifted: x.shifted.map(|x| x.into()), + } + } + } + } + }; +} + +pub mod pallas { + use super::*; + use crate::arkworks::group_affine::WasmGPallas; + use mina_curves::pasta::Pallas as GAffine; + + impl_poly_comm!(WasmGPallas, GAffine, Fq); +} + +pub mod vesta { + use super::*; + use crate::arkworks::group_affine::WasmGVesta; + use mina_curves::pasta::Vesta as GAffine; + + impl_poly_comm!(WasmGVesta, GAffine, Fp); +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/poseidon.rs b/src/lib/crypto/kimchi_bindings/wasm/src/poseidon.rs new file mode 100644 index 00000000000..58878602cc1 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/poseidon.rs @@ -0,0 +1,44 @@ +use mina_curves::pasta::{Fp, Fq}; +use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, permutation::poseidon_block_cipher}; +use wasm_bindgen::prelude::*; + +use crate::{ + arkworks::{WasmPastaFp, WasmPastaFq}, + wasm_flat_vector::WasmFlatVector, +}; + +// fp + +#[wasm_bindgen] +pub fn caml_pasta_fp_poseidon_block_cipher( + state: WasmFlatVector, +) -> WasmFlatVector { + let mut state_vec: Vec = state.into_iter().map(Into::into).collect(); + poseidon_block_cipher::( + &mina_poseidon::pasta::fp_kimchi::params(), + &mut state_vec, + ); + state_vec + .iter() + .map(|f| WasmPastaFp(*f)) + .collect::>() + .into() +} + +// fq + +#[wasm_bindgen] +pub fn caml_pasta_fq_poseidon_block_cipher( + state: WasmFlatVector, +) -> WasmFlatVector { + let mut state_vec: Vec = state.into_iter().map(Into::into).collect(); + poseidon_block_cipher::( + &mina_poseidon::pasta::fq_kimchi::params(), + &mut state_vec, + ); + state_vec + .iter() + .map(|f| WasmPastaFq(*f)) + .collect::>() + .into() +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/projective.rs b/src/lib/crypto/kimchi_bindings/wasm/src/projective.rs new file mode 100644 index 00000000000..12ba103da0e --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/projective.rs @@ -0,0 +1,152 @@ +use ark_ec::{AffineCurve, ProjectiveCurve}; +use ark_ff::UniformRand; +use paste::paste; +use rand::rngs::StdRng; + +use wasm_bindgen::prelude::*; + +macro_rules! impl_projective { + ($name: ident, + $GroupProjective: ty, + $CamlG: ty, + $CamlScalarField: ty, + $BaseField: ty, + $CamlBaseField: ty, + $Projective: ty) => { + + paste! { + #[wasm_bindgen] + pub fn []() -> $GroupProjective { + $Projective::prime_subgroup_generator().into() + } + + #[wasm_bindgen] + pub fn []( + x: &$GroupProjective, + y: &$GroupProjective, + ) -> $GroupProjective { + x.as_ref() + y.as_ref() + } + + #[wasm_bindgen] + pub fn []( + x: &$GroupProjective, + y: &$GroupProjective, + ) -> $GroupProjective { + x.as_ref() - y.as_ref() + } + + #[wasm_bindgen] + pub fn []( + x: &$GroupProjective, + ) -> $GroupProjective { + -(*x.as_ref()) + } + + #[wasm_bindgen] + pub fn []( + x: &$GroupProjective, + ) -> $GroupProjective { + x.as_ref().double().into() + } + + #[wasm_bindgen] + pub fn []( + x: &$GroupProjective, + y: $CamlScalarField, + ) -> $GroupProjective { + let y: ark_ff::BigInteger256 = y.0.into(); + x.as_ref().mul(&y).into() + } + + #[wasm_bindgen] + pub fn []() -> $GroupProjective { + let rng = &mut rand::rngs::OsRng; + let proj: $Projective = UniformRand::rand(rng); + proj.into() + } + + #[wasm_bindgen] + pub fn [](i: u32) -> $GroupProjective { + // We only care about entropy here, so we force a conversion i32 -> u32. + let i: u64 = (i as u32).into(); + let mut rng: StdRng = rand::SeedableRng::seed_from_u64(i); + let proj: $Projective = UniformRand::rand(&mut rng); + proj.into() + } + + #[wasm_bindgen] + pub extern "C" fn []() -> $CamlBaseField { + let (endo_q, _endo_r) = poly_commitment::srs::endos::(); + endo_q.into() + } + + #[wasm_bindgen] + pub extern "C" fn []() -> $CamlScalarField { + let (_endo_q, endo_r) = poly_commitment::srs::endos::(); + endo_r.into() + } + + #[wasm_bindgen] + pub fn []( + x: &$GroupProjective + ) -> $CamlG { + x.as_ref().into_affine().into() + } + + #[wasm_bindgen] + pub fn [](x: $CamlG) -> $GroupProjective { + Into::::into(x).into_projective().into() + } + + #[wasm_bindgen] + pub fn [](x: $CamlBaseField, y: $CamlBaseField) -> $GroupProjective { + let res = $Projective::new(x.into(), y.into(), <$BaseField as ark_ff::One>::one()); + res.into() + } + + #[wasm_bindgen] + pub fn [](x: $CamlG) -> $CamlG { + x + } + } + } +} + +pub mod pallas { + use super::*; + use crate::arkworks::group_affine::WasmGPallas; + use crate::arkworks::group_projective::WasmPallasGProjective; + use crate::arkworks::pasta_fp::WasmPastaFp; + use crate::arkworks::pasta_fq::WasmPastaFq; + use mina_curves::pasta::{Fp, Pallas as GAffine, ProjectivePallas}; + + impl_projective!( + pallas, + WasmPallasGProjective, + WasmGPallas, + WasmPastaFq, + Fp, + WasmPastaFp, + ProjectivePallas + ); +} + +pub mod vesta { + use super::*; + use crate::arkworks::group_affine::WasmGVesta; + use crate::arkworks::group_projective::WasmVestaGProjective; + use crate::arkworks::pasta_fp::WasmPastaFp; + use crate::arkworks::pasta_fq::WasmPastaFq; + use mina_curves::pasta::{Fq, ProjectiveVesta, Vesta as GAffine}; + + impl_projective!( + vesta, + WasmVestaGProjective, + WasmGVesta, + WasmPastaFp, + Fq, + WasmPastaFq, + ProjectiveVesta + ); +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/rayon.rs b/src/lib/crypto/kimchi_bindings/wasm/src/rayon.rs new file mode 100644 index 00000000000..35caae1fb58 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/rayon.rs @@ -0,0 +1,155 @@ +/* + * Copyright 2022 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use js_sys::JsString; +use js_sys::Promise; +use spmc::{channel, Receiver, Sender}; +use wasm_bindgen::prelude::*; + +static mut THREAD_POOL: Option = None; + +pub fn run_in_pool(op: OP) -> R +where + OP: FnOnce() -> R + Send, + R: Send, +{ + let pool = unsafe { THREAD_POOL.as_ref().unwrap() }; + pool.install(op) +} + +#[wasm_bindgen] +#[doc(hidden)] +pub struct PoolBuilder { + num_threads: usize, + sender: Sender, + receiver: Receiver, +} + +#[cfg(not(feature = "nodejs"))] +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(js_name = startWorkers)] + fn start_workers(module: JsValue, memory: JsValue, builder: PoolBuilder) -> Promise; +} +#[cfg(feature = "nodejs")] +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(js_name = startWorkers)] + fn start_workers(module: JsString, memory: JsValue, builder: PoolBuilder) -> Promise; +} +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(js_name = terminateWorkers)] + fn terminate_workers() -> Promise; +} + +#[wasm_bindgen] +impl PoolBuilder { + fn new(num_threads: usize) -> Self { + let (sender, receiver) = channel(); + Self { + num_threads, + sender, + receiver, + } + } + + #[wasm_bindgen(js_name = numThreads)] + pub fn num_threads(&self) -> usize { + self.num_threads + } + + pub fn receiver(&self) -> *const Receiver { + &self.receiver + } + + // This should be called by the JS side once all the Workers are spawned. + // Important: it must take `self` by reference, otherwise + // `start_worker_thread` will try to receive a message on a moved value. + pub fn build(&mut self) { + unsafe { + THREAD_POOL = Some( + rayon::ThreadPoolBuilder::new() + .num_threads(self.num_threads) + // We could use postMessage here instead of Rust channels, + // but currently we can't due to a Chrome bug that will cause + // the main thread to lock up before it even sends the message: + // https://bugs.chromium.org/p/chromium/issues/detail?id=1075645 + .spawn_handler(move |thread| { + // Note: `send` will return an error if there are no receivers. + // We can use it because all the threads are spawned and ready to accept + // messages by the time we call `build()` to instantiate spawn handler. + self.sender.send(thread).unwrap_throw(); + Ok(()) + }) + .build() + .unwrap_throw(), + ) + } + } +} + +#[cfg(feature = "nodejs")] +#[wasm_bindgen(js_name = initThreadPool)] +#[doc(hidden)] +pub fn init_thread_pool(num_threads: usize, worker_source: JsString) -> Promise { + start_workers( + worker_source, + wasm_bindgen::memory(), + PoolBuilder::new(num_threads), + ) +} + +#[cfg(not(feature = "nodejs"))] +#[wasm_bindgen(js_name = initThreadPool)] +#[doc(hidden)] +pub fn init_thread_pool(num_threads: usize) -> Promise { + start_workers( + wasm_bindgen::module(), + wasm_bindgen::memory(), + PoolBuilder::new(num_threads), + ) +} + +#[wasm_bindgen(js_name = exitThreadPool)] +#[doc(hidden)] +pub fn exit_thread_pool() -> Promise { + unsafe { + let promise = terminate_workers(); + THREAD_POOL = None; + promise + } +} + +#[wasm_bindgen] +#[allow(clippy::not_unsafe_ptr_arg_deref)] +#[doc(hidden)] +pub fn wbg_rayon_start_worker(receiver: *const Receiver) +where + // Statically assert that it's safe to accept `Receiver` from another thread. + Receiver: Sync, +{ + // This is safe, because we know it came from a reference to PoolBuilder, + // allocated on the heap by wasm-bindgen and dropped only once all the + // threads are running. + // + // The only way to violate safety is if someone externally calls + // `exports.wbg_rayon_start_worker(garbageValue)`, but then no Rust tools + // would prevent us from issues anyway. + let receiver = unsafe { &*receiver }; + // Wait for a task (`ThreadBuilder`) on the channel, and, once received, + // start executing it. + // + // On practice this will start running Rayon's internal event loop. + receiver.recv().unwrap_throw().run(); +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/srs.rs b/src/lib/crypto/kimchi_bindings/wasm/src/srs.rs new file mode 100644 index 00000000000..4e7699bc765 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/srs.rs @@ -0,0 +1,385 @@ +use crate::wasm_flat_vector::WasmFlatVector; +use crate::wasm_vector::WasmVector; +use ark_poly::UVPolynomial; +use ark_poly::{univariate::DensePolynomial, EvaluationDomain, Evaluations}; +use paste::paste; +use poly_commitment::SRS as ISRS; +use poly_commitment::{commitment::b_poly_coefficients, srs::SRS}; +use serde::{Deserialize, Serialize}; +use std::ops::Deref; +use std::{ + fs::{File, OpenOptions}, + io::{BufReader, BufWriter, Seek, SeekFrom::Start}, + sync::Arc, +}; +use wasm_bindgen::prelude::*; + +macro_rules! impl_srs { + ($name: ident, + $WasmF: ty, + $WasmG: ty, + $F: ty, + $G: ty, + $WasmPolyComm: ty, + $field_name: ident) => { + paste! { + #[wasm_bindgen] + #[derive(Clone)] + pub struct []( + #[wasm_bindgen(skip)] + pub Arc>); + + impl Deref for [] { + type Target = Arc>; + + fn deref(&self) -> &Self::Target { &self.0 } + } + + impl From>> for [] { + fn from(x: Arc>) -> Self { + [](x) + } + } + + impl From<&Arc>> for [] { + fn from(x: &Arc>) -> Self { + [](x.clone()) + } + } + + impl From<[]> for Arc> { + fn from(x: []) -> Self { + x.0 + } + } + + impl From<&[]> for Arc> { + fn from(x: &[]) -> Self { + x.0.clone() + } + } + + impl<'a> From<&'a []> for &'a Arc> { + fn from(x: &'a []) -> Self { + &x.0 + } + } + + #[wasm_bindgen] + pub fn [<$name:snake _create>](depth: i32) -> [] { + Arc::new(SRS::create(depth as usize)).into() + } + + #[wasm_bindgen] + pub fn [<$name:snake _add_lagrange_basis>]( + srs: &[], + log2_size: i32, + ) { + crate::rayon::run_in_pool(|| { + let ptr: &mut poly_commitment::srs::SRS<$G> = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; + let domain = EvaluationDomain::<$F>::new(1 << (log2_size as usize)).expect("invalid domain size"); + ptr.add_lagrange_basis(domain); + }); + } + + #[wasm_bindgen] + pub fn [<$name:snake _write>]( + append: Option, + srs: &[], + path: String, + ) -> Result<(), JsValue> { + let file = OpenOptions::new() + .append(append.unwrap_or(true)) + .open(path) + .map_err(|err| { + JsValue::from_str(format!("caml_pasta_fp_urs_write: {}", err).as_str()) + })?; + let file = BufWriter::new(file); + + srs.0.serialize(&mut rmp_serde::Serializer::new(file)) + .map_err(|e| JsValue::from_str(format!("caml_pasta_fp_urs_write: {}", e).as_str())) + } + + #[wasm_bindgen] + pub fn [<$name:snake _read>]( + offset: Option, + path: String, + ) -> Result]>, JsValue> { + let file = File::open(path).map_err(|err| { + JsValue::from_str(format!("caml_pasta_fp_urs_read: {}", err).as_str()) + })?; + let mut reader = BufReader::new(file); + + if let Some(offset) = offset { + reader.seek(Start(offset as u64)).map_err(|err| { + JsValue::from_str(format!("caml_pasta_fp_urs_read: {}", err).as_str()) + })?; + } + + // TODO: shouldn't we just error instead of returning None? + let srs = match SRS::<$G>::deserialize(&mut rmp_serde::Deserializer::new(reader)) { + Ok(srs) => srs, + Err(_) => return Ok(None), + }; + + Ok(Some(Arc::new(srs).into())) + } + + #[wasm_bindgen] + pub fn [<$name:snake _lagrange_commitment>]( + srs: &[], + domain_size: i32, + i: i32, + ) -> Result<$WasmPolyComm, JsValue> { + let x_domain = EvaluationDomain::<$F>::new(domain_size as usize).ok_or_else(|| { + JsValue::from_str("caml_pasta_fp_urs_lagrange_commitment") + })?; + crate::rayon::run_in_pool(|| { + // We're single-threaded, so it's safe to grab this pointer as mutable. + // Do not try this at home. + let ptr: &mut poly_commitment::srs::SRS<$G> = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; + ptr.add_lagrange_basis(x_domain); + }); + + Ok(srs.lagrange_bases[&x_domain.size()][i as usize].clone().into()) + } + + #[wasm_bindgen] + pub fn [<$name:snake _commit_evaluations>]( + srs: &[], + domain_size: i32, + evals: WasmFlatVector<$WasmF>, + ) -> Result<$WasmPolyComm, JsValue> { + let x_domain = EvaluationDomain::<$F>::new(domain_size as usize).ok_or_else(|| { + JsValue::from_str("caml_pasta_fp_urs_commit_evaluations") + })?; + + let evals = evals.into_iter().map(Into::into).collect(); + let p = Evaluations::<$F>::from_vec_and_domain(evals, x_domain).interpolate(); + + Ok(srs.commit_non_hiding(&p, 1, None).into()) + } + + #[wasm_bindgen] + pub fn [<$name:snake _b_poly_commitment>]( + srs: &[], + chals: WasmFlatVector<$WasmF>, + ) -> Result<$WasmPolyComm, JsValue> { + let result = crate::rayon::run_in_pool(|| { + let chals: Vec<$F> = chals.into_iter().map(Into::into).collect(); + let coeffs = b_poly_coefficients(&chals); + let p = DensePolynomial::<$F>::from_coefficients_vec(coeffs); + srs.commit_non_hiding(&p, 1, None) + }); + Ok(result.into()) + } + + #[wasm_bindgen] + pub fn [<$name:snake _batch_accumulator_check>]( + srs: &[], + comms: WasmVector<$WasmG>, + chals: WasmFlatVector<$WasmF>, + ) -> bool { + crate::rayon::run_in_pool(|| { + let comms: Vec<_> = comms.into_iter().map(Into::into).collect(); + let chals: Vec<_> = chals.into_iter().map(Into::into).collect(); + crate::urs_utils::batch_dlog_accumulator_check(&srs, &comms, &chals) + }) + } + + #[wasm_bindgen] + pub fn [<$name:snake _batch_accumulator_generate>]( + srs: &[], + comms: i32, + chals: WasmFlatVector<$WasmF>, + ) -> WasmVector<$WasmG> { + crate::urs_utils::batch_dlog_accumulator_generate::<$G>( + &srs, + comms as usize, + &chals.into_iter().map(From::from).collect(), + ).into_iter().map(Into::into).collect() + } + + #[wasm_bindgen] + pub fn [<$name:snake _h>](srs: &[]) -> $WasmG { + srs.h.into() + } + } + } +} + +// +// Fp +// + +pub mod fp { + use std::collections::HashMap; + + use super::*; + use crate::arkworks::{WasmGVesta as WasmG, WasmPastaFp}; + use crate::poly_comm::vesta::WasmFpPolyComm as WasmPolyComm; + use mina_curves::pasta::{Fp, Vesta as G}; + use poly_commitment::PolyComm; + + impl_srs!(caml_fp_srs, WasmPastaFp, WasmG, Fp, G, WasmPolyComm, Fp); + + #[wasm_bindgen] + pub fn caml_fp_srs_create_parallel(depth: i32) -> WasmFpSrs { + crate::rayon::run_in_pool(|| Arc::new(SRS::::create_parallel(depth as usize)).into()) + } + + // return the cloned srs in a form that we can store on the js side + #[wasm_bindgen] + pub fn caml_fp_srs_get(srs: &WasmFpSrs) -> WasmVector { + // return a vector which consists of h, then all the gs + let mut h_and_gs: Vec = vec![srs.0.h.clone().into()]; + h_and_gs.extend(srs.0.g.iter().map(|x: &G| WasmG::from(x.clone()))); + h_and_gs.into() + } + + // set the srs from a vector of h and gs + #[wasm_bindgen] + pub fn caml_fp_srs_set(h_and_gs: WasmVector) -> WasmFpSrs { + // return a vector which consists of h, then all the gs + let mut h_and_gs: Vec = h_and_gs.into_iter().map(|x| x.into()).collect(); + let h = h_and_gs.remove(0); + let g = h_and_gs; + let srs = SRS:: { + h, + g, + lagrange_bases: HashMap::new(), + }; + Arc::new(srs).into() + } + + // maybe get lagrange commitment + #[wasm_bindgen] + pub fn caml_fp_srs_maybe_lagrange_commitment( + srs: &WasmFpSrs, + domain_size: i32, + i: i32, + ) -> Option { + let bases = srs.0.lagrange_bases.get(&(domain_size as usize)); + bases.map(|bases| bases[i as usize].clone().into()) + } + + // set entire lagrange basis from input + #[wasm_bindgen] + pub fn caml_fp_srs_set_lagrange_basis( + srs: &WasmFpSrs, + domain_size: i32, + input_bases: WasmVector, + ) { + let bases: Vec> = input_bases.into_iter().map(Into::into).collect(); + + // add to srs + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; + ptr.lagrange_bases.insert(domain_size as usize, bases); + } + + // compute & add lagrange basis internally, return the entire basis + #[wasm_bindgen] + pub fn caml_fp_srs_get_lagrange_basis( + srs: &WasmFpSrs, + domain_size: i32, + ) -> WasmVector { + // compute lagrange basis + crate::rayon::run_in_pool(|| { + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; + let domain = + EvaluationDomain::::new(domain_size as usize).expect("invalid domain size"); + ptr.add_lagrange_basis(domain); + }); + let bases = &srs.0.lagrange_bases[&(domain_size as usize)]; + bases.into_iter().map(Into::into).collect() + } +} + +pub mod fq { + use std::collections::HashMap; + + use super::*; + use crate::arkworks::{WasmGPallas as WasmG, WasmPastaFq}; + use crate::poly_comm::pallas::WasmFqPolyComm as WasmPolyComm; + use mina_curves::pasta::{Fq, Pallas as G}; + use poly_commitment::PolyComm; + + impl_srs!(caml_fq_srs, WasmPastaFq, WasmG, Fq, G, WasmPolyComm, Fq); + + #[wasm_bindgen] + pub fn caml_fq_srs_create_parallel(depth: i32) -> WasmFqSrs { + crate::rayon::run_in_pool(|| Arc::new(SRS::::create_parallel(depth as usize)).into()) + } + + // return the cloned srs in a form that we can store on the js side + #[wasm_bindgen] + pub fn caml_fq_srs_get(srs: &WasmFqSrs) -> WasmVector { + // return a vector which consists of h, then all the gs + let mut h_and_gs: Vec = vec![srs.0.h.clone().into()]; + h_and_gs.extend(srs.0.g.iter().map(|x: &G| WasmG::from(x.clone()))); + h_and_gs.into() + } + + // set the srs from a vector of h and gs + #[wasm_bindgen] + pub fn caml_fq_srs_set(h_and_gs: WasmVector) -> WasmFqSrs { + // return a vector which consists of h, then all the gs + let mut h_and_gs: Vec = h_and_gs.into_iter().map(|x| x.into()).collect(); + let h = h_and_gs.remove(0); + let g = h_and_gs; + let srs = SRS:: { + h, + g, + lagrange_bases: HashMap::new(), + }; + Arc::new(srs).into() + } + + // maybe get lagrange commitment + #[wasm_bindgen] + pub fn caml_fq_srs_maybe_lagrange_commitment( + srs: &WasmFqSrs, + domain_size: i32, + i: i32, + ) -> Option { + let bases = srs.0.lagrange_bases.get(&(domain_size as usize)); + bases.map(|bases| bases[i as usize].clone().into()) + } + + // set entire lagrange basis from input + #[wasm_bindgen] + pub fn caml_fq_srs_set_lagrange_basis( + srs: &WasmFqSrs, + domain_size: i32, + input_bases: WasmVector, + ) { + let bases: Vec> = input_bases.into_iter().map(Into::into).collect(); + + // add to srs + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; + ptr.lagrange_bases.insert(domain_size as usize, bases); + } + + // compute & add lagrange basis internally, return the entire basis + #[wasm_bindgen] + pub fn caml_fq_srs_get_lagrange_basis( + srs: &WasmFqSrs, + domain_size: i32, + ) -> WasmVector { + // compute lagrange basis + crate::rayon::run_in_pool(|| { + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; + let domain = + EvaluationDomain::::new(domain_size as usize).expect("invalid domain size"); + ptr.add_lagrange_basis(domain); + }); + let bases = &srs.0.lagrange_bases[&(domain_size as usize)]; + bases.into_iter().map(Into::into).collect() + } +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/urs_utils.rs b/src/lib/crypto/kimchi_bindings/wasm/src/urs_utils.rs new file mode 100644 index 00000000000..4eb79e6ea99 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/urs_utils.rs @@ -0,0 +1,101 @@ +use ark_ec::{msm::VariableBaseMSM, ProjectiveCurve}; +use ark_ff::{batch_inversion, One, PrimeField, UniformRand, Zero}; +use poly_commitment::{ + commitment::{b_poly_coefficients, CommitmentCurve}, + srs::SRS, +}; +use rayon::prelude::*; + +// TODO: Not compatible with variable rounds +pub fn batch_dlog_accumulator_check( + urs: &SRS, + comms: &[G], + chals: &[G::ScalarField], +) -> bool { + let k = comms.len(); + + if k == 0 { + assert_eq!(chals.len(), 0); + return true; + } + + let rounds = chals.len() / k; + assert_eq!(chals.len() % rounds, 0); + + let rs = { + let r = G::ScalarField::rand(&mut rand::rngs::OsRng); + let mut rs = vec![G::ScalarField::one(); k]; + for i in 1..k { + rs[i] = r * rs[i - 1]; + } + rs + }; + + let mut points = urs.g.clone(); + let n = points.len(); + points.extend(comms); + + let mut scalars = vec![G::ScalarField::zero(); n]; + scalars.extend(&rs[..]); + + let chal_invs = { + let mut cs = chals.to_vec(); + batch_inversion(&mut cs); + cs + }; + + let termss: Vec<_> = chals + .par_iter() + .zip(chal_invs) + .chunks(rounds) + .zip(rs) + .map(|(chunk, r)| { + let chals: Vec<_> = chunk.iter().map(|(c, _)| **c).collect(); + let mut s = b_poly_coefficients(&chals); + s.iter_mut().for_each(|c| *c *= &r); + s + }) + .collect(); + + for terms in termss { + assert_eq!(terms.len(), n); + for i in 0..n { + scalars[i] -= &terms[i]; + } + } + + let scalars: Vec<_> = scalars.iter().map(|x| x.into_repr()).collect(); + VariableBaseMSM::multi_scalar_mul(&points, &scalars) == G::Projective::zero() +} + +pub fn batch_dlog_accumulator_generate( + urs: &SRS, + num_comms: usize, + chals: &Vec, +) -> Vec { + let k = num_comms; + + if k == 0 { + assert_eq!(chals.len(), 0); + return vec![]; + } + + let rounds = chals.len() / k; + assert_eq!(chals.len() % rounds, 0); + + let comms: Vec<_> = chals + .into_par_iter() + .chunks(rounds) + .map(|chals| { + let chals: Vec = chals.into_iter().copied().collect(); + let scalars: Vec<_> = b_poly_coefficients(&chals) + .into_iter() + .map(|x| x.into_repr()) + .collect(); + let points: Vec<_> = urs.g.clone(); + VariableBaseMSM::multi_scalar_mul(&points, &scalars).into_affine() + }) + .collect(); + + comms +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/wasm_flat_vector.rs b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_flat_vector.rs new file mode 100644 index 00000000000..82b9bc0a0e8 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_flat_vector.rs @@ -0,0 +1,150 @@ +//! The flat vector is a vector of fixed-size elements that we want to expose directly to js-of-ocaml +//! (so that we can access a `Vec` cheaply, +//! by just passing a pointer to a continuous memory region instead of copying. +//! The wasmvector is a normal heap-allocated vector, +//! where we leave it on the rust heap and just keep a pointer around. +//! We use flat for fields, normal for gates etc. +//! +//! Accessing Rust vector values is not the same as accessing an array. +//! Each indexing (e.g. `some_vec[3]`) is costly as it is implemented as a function call. +//! Knowing that, plus the fact that field elements are implemented as `[u32; 8]`, we know that we incur the cost of following several pointers. +//! To decrease that cost, we flatten such arrays, going from something like +//! +//! ```ignore +//! [[a0, a1, ..., a7], [b0, b1, ..., b7], ...] +//! ``` +//! +//! to a flattened vector like: +//! +//! ```ignore +//! [a0, a1, ..., a7, b0, b1, ..., b7, ...] +//! ``` + +use wasm_bindgen::convert::{FromWasmAbi, IntoWasmAbi, OptionFromWasmAbi, OptionIntoWasmAbi}; + +use std::convert::From; +use std::ops::Deref; + +#[derive(Clone, Debug)] +pub struct WasmFlatVector(Vec); + +pub trait FlatVectorElem { + const FLATTENED_SIZE: usize; + fn flatten(self) -> Vec; + fn unflatten(flat: Vec) -> Self; +} + +impl Deref for WasmFlatVector { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From> for WasmFlatVector { + fn from(x: Vec) -> Self { + WasmFlatVector(x) + } +} + +impl From> for Vec { + fn from(x: WasmFlatVector) -> Self { + x.0 + } +} + +impl<'a, T> From<&'a WasmFlatVector> for &'a Vec { + fn from(x: &'a WasmFlatVector) -> Self { + &x.0 + } +} + +impl std::iter::IntoIterator for WasmFlatVector { + type Item = as std::iter::IntoIterator>::Item; + type IntoIter = as std::iter::IntoIterator>::IntoIter; + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl<'a, T> std::iter::IntoIterator for &'a WasmFlatVector { + type Item = <&'a Vec as std::iter::IntoIterator>::Item; + type IntoIter = <&'a Vec as std::iter::IntoIterator>::IntoIter; + fn into_iter(self) -> Self::IntoIter { + self.0.iter() + } +} + +impl std::iter::FromIterator for WasmFlatVector { + fn from_iter(iter: I) -> WasmFlatVector + where + I: IntoIterator, + { + WasmFlatVector(std::iter::FromIterator::from_iter(iter)) + } +} + +impl std::default::Default for WasmFlatVector { + fn default() -> Self { + WasmFlatVector(std::default::Default::default()) + } +} + +impl std::iter::Extend for WasmFlatVector { + fn extend(&mut self, iter: I) + where + I: IntoIterator, + { + self.0.extend(iter) + } +} + +impl wasm_bindgen::describe::WasmDescribe for WasmFlatVector { + fn describe() { + as wasm_bindgen::describe::WasmDescribe>::describe() + } +} + +impl FromWasmAbi for WasmFlatVector { + type Abi = as FromWasmAbi>::Abi; + unsafe fn from_abi(js: Self::Abi) -> Self { + let data: Vec = FromWasmAbi::from_abi(js); + let mut res: Vec = Vec::with_capacity(data.len() / T::FLATTENED_SIZE); + + let mut buf = Vec::with_capacity(T::FLATTENED_SIZE); + for x in data.into_iter() { + assert!(buf.len() < T::FLATTENED_SIZE); + buf.push(x); + if buf.len() >= T::FLATTENED_SIZE { + res.push(T::unflatten(buf)); + buf = Vec::with_capacity(T::FLATTENED_SIZE); + } + } + assert_eq!(buf.len(), 0); + WasmFlatVector(res) + } +} + +impl OptionFromWasmAbi for WasmFlatVector { + fn is_none(x: &Self::Abi) -> bool { + as OptionFromWasmAbi>::is_none(x) + } +} + +impl IntoWasmAbi for WasmFlatVector { + type Abi = as FromWasmAbi>::Abi; + fn into_abi(self) -> Self::Abi { + let mut data: Vec = Vec::with_capacity(self.0.len() * T::FLATTENED_SIZE); + for x in self.0.into_iter() { + data.extend(x.flatten().into_iter()); + } + IntoWasmAbi::into_abi(data) + } +} + +impl OptionIntoWasmAbi for WasmFlatVector { + fn none() -> Self::Abi { + as OptionIntoWasmAbi>::none() + } +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/de.rs b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/de.rs new file mode 100644 index 00000000000..d990e856a13 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/de.rs @@ -0,0 +1,304 @@ +use js_sys::{Array, ArrayBuffer, Number, Uint8Array}; +use serde::de::value::SeqDeserializer; +use serde::de::{self, Error as _, IntoDeserializer}; +use wasm_bindgen::{JsCast, JsValue, UnwrapThrowExt}; + +use super::{Error, Result}; + +struct SeqAccess { + iter: js_sys::IntoIter, +} + +impl<'de> de::SeqAccess<'de> for SeqAccess { + type Error = Error; + + fn next_element_seed>( + &mut self, + seed: T, + ) -> Result> { + Ok(match self.iter.next().transpose()? { + Some(value) => Some(seed.deserialize(Deserializer::from(value))?), + None => None, + }) + } +} + +struct ObjectAccess { + data: Array, + fields: std::slice::Iter<'static, &'static str>, + idx: u32, + next_value: Option, +} + +impl ObjectAccess { + fn new(data: Array, fields: &'static [&'static str]) -> Self { + // We start the index at 1, due to some js-of-ocaml expecting the first element to be 0 + // this is due to OCaml implementation details. + Self { + data, + idx: 1, + fields: fields.iter(), + next_value: None, + } + } +} + +fn str_deserializer(s: &str) -> de::value::StrDeserializer { + de::IntoDeserializer::into_deserializer(s) +} + +impl<'de> de::MapAccess<'de> for ObjectAccess { + type Error = Error; + + fn next_key_seed>(&mut self, seed: K) -> Result> { + debug_assert!(self.next_value.is_none()); + + match self.fields.next() { + None => Ok(None), + Some(field) => { + self.next_value = Some(Deserializer::from(self.data.get(self.idx))); + self.idx += 1; + Ok(Some(seed.deserialize(str_deserializer(field))?)) + } + } + } + + fn next_value_seed>(&mut self, seed: V) -> Result { + seed.deserialize(self.next_value.take().unwrap_throw()) + } +} + +pub struct Deserializer(JsValue); + +// Can't use `serde_wasm_bindgen::de::Deserializer`, its `value` field is private. +impl From for Deserializer { + fn from(value: JsValue) -> Self { + Self(value) + } +} + +// Ideally this would be implemented for `JsValue` instead, but we can't because +// of the orphan rule. +impl<'de> IntoDeserializer<'de, Error> for Deserializer { + type Deserializer = Self; + + fn into_deserializer(self) -> Self::Deserializer { + self + } +} + +impl Deserializer { + fn as_bytes(&self) -> Option> { + if let Some(v) = self.0.dyn_ref::() { + Some(v.to_vec()) + } else { + /* We can hit this case when the values have come from the non-serde conversions. */ + self.0 + .dyn_ref::() + .map(|v| Uint8Array::new(v).to_vec()) + } + } +} + +impl<'de> de::Deserializer<'de> for Deserializer { + type Error = Error; + + fn deserialize_any>(self, _: V) -> Result { + Err(Error::custom( + "Inferring the serialized type is not implemented", + )) + } + + fn deserialize_unit>(self, visitor: V) -> Result { + visitor.visit_unit() + } + + fn deserialize_unit_struct>( + self, + _: &'static str, + _: V, + ) -> Result { + Err(Error::custom( + "Deserializing unit structs is not implemented", + )) + } + + fn deserialize_bool>(self, visitor: V) -> Result { + let x = self.0.unchecked_into::().as_f64().unwrap() as u32; + visitor.visit_bool(x != 0) + } + + fn deserialize_f32>(self, visitor: V) -> Result { + visitor.visit_f32(self.0.unchecked_into::().as_f64().unwrap() as f32) + } + + fn deserialize_f64>(self, visitor: V) -> Result { + visitor.visit_f64(self.0.unchecked_into::().as_f64().unwrap()) + } + + fn deserialize_identifier>(self, _: V) -> Result { + Err(Error::custom("Deserializing strings is not implemented")) + } + + fn deserialize_str>(self, _: V) -> Result { + Err(Error::custom("Deserializing strings is not implemented")) + } + + fn deserialize_string>(self, _: V) -> Result { + Err(Error::custom("Deserializing strings is not implemented")) + } + + fn deserialize_i8>(self, visitor: V) -> Result { + visitor.visit_i8(self.0.unchecked_into::().as_f64().unwrap() as i8) + } + + fn deserialize_i16>(self, visitor: V) -> Result { + visitor.visit_i16(self.0.unchecked_into::().as_f64().unwrap() as i16) + } + + fn deserialize_i32>(self, visitor: V) -> Result { + visitor.visit_i32(self.0.unchecked_into::().as_f64().unwrap() as i32) + } + + fn deserialize_u8>(self, visitor: V) -> Result { + visitor.visit_u8(self.0.unchecked_into::().as_f64().unwrap() as u8) + } + + fn deserialize_u16>(self, visitor: V) -> Result { + visitor.visit_u16(self.0.unchecked_into::().as_f64().unwrap() as u16) + } + + fn deserialize_u32>(self, visitor: V) -> Result { + visitor.visit_u32(self.0.unchecked_into::().as_f64().unwrap() as u32) + } + + fn deserialize_i64>(self, _visitor: V) -> Result { + Err(Error::custom("Deserializing i64 is not implemented")) + } + + fn deserialize_u64>(self, _visitor: V) -> Result { + Err(Error::custom("Deserializing u64 is not implemented")) + } + + fn deserialize_i128>(self, _visitor: V) -> Result { + Err(Error::custom("Deserializing i128 is not implemented")) + } + + fn deserialize_u128>(self, _visitor: V) -> Result { + Err(Error::custom("Deserializing u128 is not implemented")) + } + + fn deserialize_char>(self, visitor: V) -> Result { + visitor.visit_char((self.0.unchecked_into::().as_f64().unwrap() as u8) as char) + } + + // Serde can deserialize `visit_unit` into `None`, but can't deserialize arbitrary value + // as `Some`, so we need to provide own simple implementation. + fn deserialize_option>(self, visitor: V) -> Result { + if let Ok(arr) = self.0.dyn_into::() { + visitor.visit_some(Into::::into(arr.get(1))) + } else { + visitor.visit_none() + } + } + + fn deserialize_newtype_struct>( + self, + _name: &'static str, + _visitor: V, + ) -> Result { + Err(Error::custom( + "Deserializing newtype structus is not implemented", + )) + } + + fn deserialize_seq>(self, visitor: V) -> Result { + let arr = self.0.unchecked_into::(); + visitor.visit_seq(SeqDeserializer::new( + arr.iter().skip(1).map(Deserializer::from), + )) + } + + fn deserialize_tuple>(self, _len: usize, visitor: V) -> Result { + self.deserialize_seq(visitor) + } + + fn deserialize_tuple_struct>( + self, + _name: &'static str, + _len: usize, + _visitor: V, + ) -> Result { + Err(Error::custom( + "Deserializing tuple structs is not implemented", + )) + } + + fn deserialize_map>(self, _visitor: V) -> Result { + Err(Error::custom("Deserializing maps is not implemented")) + } + + fn deserialize_struct>( + self, + _name: &'static str, + fields: &'static [&'static str], + visitor: V, + ) -> Result { + let arr = self.0.unchecked_into::(); + visitor.visit_map(ObjectAccess::new(arr, fields)) + } + + fn deserialize_enum>( + self, + _: &'static str, + _: &'static [&'static str], + _: V, + ) -> Result { + Err(Error::custom("Deserializing enums is not implemented")) + } + + fn deserialize_ignored_any>(self, visitor: V) -> Result { + visitor.visit_unit() + } + + fn deserialize_bytes>(self, visitor: V) -> Result { + self.deserialize_byte_buf(visitor) + } + + fn deserialize_byte_buf>(self, visitor: V) -> Result { + if let Some(bytes) = self.as_bytes() { + visitor.visit_byte_buf(bytes) + } else { + Err(Error::custom("Type error while deserializing bytes")) + } + } + + #[inline] + fn is_human_readable(&self) -> bool { + false + } +} + +impl<'de> de::VariantAccess<'de> for Deserializer { + type Error = Error; + + fn unit_variant(self) -> Result<()> { + Err(Error::custom("Deserializing variants is not implemented")) + } + + fn newtype_variant_seed>(self, _: T) -> Result { + Err(Error::custom("Deserializing variants is not implemented")) + } + + fn tuple_variant>(self, _: usize, _: V) -> Result { + Err(Error::custom("Deserializing variants is not implemented")) + } + + fn struct_variant>( + self, + _: &'static [&'static str], + _: V, + ) -> Result { + Err(Error::custom("Deserializing variants is not implemented")) + } +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/mod.rs b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/mod.rs new file mode 100644 index 00000000000..320dfd7b319 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/mod.rs @@ -0,0 +1,33 @@ +//! This module constructs a serde serializer (and deserializer) to convert Rust structures to (and from) Js types expected by js-of-ocaml. +//! js-of-ocaml expects arrays of values instead of objects, so a Rust structure like: +//! +//! ```ignore +//! { a: F, b: Vec, c: SomeType } +//! ``` +//! +//! must be converted to an array that looks like this: +//! +//! ```ignore +//! // notice the leading 0, which is an artifact of OCaml's memory layout and how js-of-ocaml is implemented. +//! [0, a, b, c] +//! ``` +//! See the following example on how to use it: +//! +//! ``` +//! #[derive(serde::Serialize, serde::Deserialize)] +//! struct Thing { a: usize, b: u32 } +//! +//! let serializer = crate::wasm_ocaml_serde::ser::Serializer::new(); +//! let thing = Thing { a: 5, b: 6 }; +//! let js_value = serde::Serialize::serialize(thing, &mut serializer).unwrap(); +//! assert_eq!(format!("{}", js_value), "[0, 5, 6]"); +//! ``` + +use wasm_bindgen::prelude::*; + +pub mod de; +pub mod ser; + +pub use serde_wasm_bindgen::Error; + +pub type Result = core::result::Result; diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/ser.rs b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/ser.rs new file mode 100644 index 00000000000..ab4a4f9e95e --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/ser.rs @@ -0,0 +1,319 @@ +use js_sys::{Array, Uint8Array}; +use serde::ser::{self, Error as _, Serialize}; +use wasm_bindgen::prelude::*; + +use super::{Error, Result}; + +pub struct ErrorSerializer; + +impl ser::SerializeTupleVariant for ErrorSerializer { + type Ok = JsValue; + type Error = Error; + + fn serialize_field(&mut self, _: &T) -> Result<()> { + Err(Error::custom("Serializing variants is not implemented")) + } + + fn end(self) -> Result { + Err(Error::custom("Serializing variants is not implemented")) + } +} + +impl ser::SerializeStructVariant for ErrorSerializer { + type Ok = JsValue; + type Error = Error; + + fn serialize_field(&mut self, _: &'static str, _: &T) -> Result<()> { + Err(Error::custom("Serializing variants is not implemented")) + } + + fn end(self) -> Result { + Err(Error::custom("Serializing variants is not implemented")) + } +} + +pub struct ArraySerializer<'s> { + serializer: &'s Serializer, + res: Array, + idx: u32, +} + +impl<'s> ArraySerializer<'s> { + pub fn new(serializer: &'s Serializer) -> Self { + let res = Array::new(); + res.set(0, JsValue::from(0u32)); + Self { + serializer, + res, + idx: 1, + } + } +} + +impl ser::SerializeSeq for ArraySerializer<'_> { + type Ok = JsValue; + type Error = Error; + + fn serialize_element(&mut self, value: &T) -> Result<()> { + self.res.set(self.idx, value.serialize(self.serializer)?); + self.idx += 1; + Ok(()) + } + + fn end(self) -> Result { + Ok(self.res.into()) + } +} + +impl ser::SerializeTuple for ArraySerializer<'_> { + type Ok = JsValue; + type Error = Error; + + fn serialize_element(&mut self, value: &T) -> Result<()> { + ser::SerializeSeq::serialize_element(self, value) + } + + fn end(self) -> Result { + Ok(self.res.into()) + } +} + +impl ser::SerializeTupleStruct for ArraySerializer<'_> { + type Ok = JsValue; + type Error = Error; + + fn serialize_field(&mut self, value: &T) -> Result<()> { + ser::SerializeTuple::serialize_element(self, value) + } + + fn end(self) -> Result { + Ok(self.res.into()) + } +} + +impl ser::SerializeMap for ErrorSerializer { + type Ok = JsValue; + type Error = Error; + + fn serialize_key(&mut self, _: &T) -> Result<()> { + Err(Error::custom("Serializing maps is not implemented")) + } + + fn serialize_value(&mut self, _: &T) -> Result<()> { + Err(Error::custom("Serializing maps is not implemented")) + } + + fn end(self) -> Result { + Err(Error::custom("Serializing maps is not implemented")) + } +} + +impl ser::SerializeStruct for ArraySerializer<'_> { + type Ok = JsValue; + type Error = Error; + + fn serialize_field(&mut self, _: &'static str, value: &T) -> Result<()> { + ser::SerializeSeq::serialize_element(self, value) + } + + fn end(self) -> Result { + Ok(self.res.into()) + } +} + +#[derive(Default)] +pub struct Serializer(serde_wasm_bindgen::Serializer); + +impl Serializer { + pub fn new() -> Self { + Self(serde_wasm_bindgen::Serializer::new()) + } +} + +impl<'s> ser::Serializer for &'s Serializer { + type Ok = JsValue; + type Error = Error; + + type SerializeSeq = ArraySerializer<'s>; + type SerializeTuple = ArraySerializer<'s>; + type SerializeTupleStruct = ArraySerializer<'s>; + type SerializeTupleVariant = ErrorSerializer; + type SerializeMap = ErrorSerializer; + type SerializeStruct = ArraySerializer<'s>; + type SerializeStructVariant = ErrorSerializer; + + #[inline] + fn is_human_readable(&self) -> bool { + false + } + + fn serialize_bool(self, v: bool) -> Result { + if v { + self.0.serialize_u32(1) + } else { + self.0.serialize_u32(0) + } + } + + fn serialize_i8(self, v: i8) -> Result { + self.0.serialize_i8(v) + } + + fn serialize_i16(self, v: i16) -> Result { + self.0.serialize_i16(v) + } + + fn serialize_i32(self, v: i32) -> Result { + self.0.serialize_i32(v) + } + + fn serialize_u8(self, v: u8) -> Result { + self.0.serialize_u8(v) + } + + fn serialize_u16(self, v: u16) -> Result { + self.0.serialize_u16(v) + } + + fn serialize_u32(self, v: u32) -> Result { + self.0.serialize_u32(v) + } + + fn serialize_f32(self, v: f32) -> Result { + self.0.serialize_f32(v) + } + + fn serialize_f64(self, v: f64) -> Result { + self.0.serialize_f64(v) + } + + fn serialize_str(self, _: &str) -> Result { + /* The bindings call caml_string_of_jsstring; not clear what to do here without digging in + further. */ + Err(Error::custom("Serializing strings is not implemented")) + } + + fn serialize_i64(self, _: i64) -> Result { + /* Custom type in OCaml */ + Err(Error::custom("Serializing i64 is not implemented")) + } + + fn serialize_u64(self, _: u64) -> Result { + /* Custom type in OCaml */ + Err(Error::custom("Serializing u64 is not implemented")) + } + + fn serialize_i128(self, _: i128) -> Result { + /* Custom type in OCaml */ + Err(Error::custom("Serializing i128 is not implemented")) + } + + fn serialize_u128(self, _: u128) -> Result { + /* Custom type in OCaml */ + Err(Error::custom("Serializing u128 is not implemented")) + } + + fn serialize_char(self, v: char) -> Result { + self.0.serialize_u32(v as u32) + } + + fn serialize_bytes(self, v: &[u8]) -> Result { + Ok(JsValue::from(Uint8Array::new( + unsafe { Uint8Array::view(v) }.as_ref(), + ))) + } + + fn serialize_none(self) -> Result { + self.0.serialize_u32(0) + } + + fn serialize_some(self, value: &T) -> Result { + let res = Array::new(); + res.set(0, JsValue::from(0u32)); + res.set(1, value.serialize(self)?); + Ok(res.into()) + } + + fn serialize_unit(self) -> Result { + self.0.serialize_u32(0) + } + + fn serialize_unit_struct(self, _: &'static str) -> Result { + Err(Error::custom("Serializing unit structs is not implemented")) + } + + fn serialize_unit_variant(self, _: &'static str, _: u32, _: &'static str) -> Result { + Err(Error::custom( + "Serializing unit variants is not implemented", + )) + } + + fn serialize_newtype_struct(self, _: &'static str, _: &T) -> Result { + Err(Error::custom( + "Serializing newtype structs is not implemented", + )) + } + + fn serialize_newtype_variant( + self, + _: &'static str, + _: u32, + _: &'static str, + _: &T, + ) -> Result { + Err(Error::custom( + "Serializing newtype variants is not implemented", + )) + } + + // TODO: Figure out if there is a way to detect and serialise `Set` differently. + fn serialize_seq(self, _: Option) -> Result { + Ok(ArraySerializer::new(self)) + } + + fn serialize_tuple(self, _: usize) -> Result { + Ok(ArraySerializer::new(self)) + } + + fn serialize_tuple_struct( + self, + _: &'static str, + _: usize, + ) -> Result { + Err(Error::custom( + "Serializing tuple structs is not implemented", + )) + } + + fn serialize_tuple_variant( + self, + _: &'static str, + _: u32, + _: &'static str, + _: usize, + ) -> Result { + Err(Error::custom( + "Serializing tuple variants is not implemented", + )) + } + + fn serialize_map(self, _: Option) -> Result { + Err(Error::custom("Serializing maps is not implemented")) + } + + fn serialize_struct(self, _: &'static str, _: usize) -> Result { + Ok(ArraySerializer::new(self)) + } + + fn serialize_struct_variant( + self, + _: &'static str, + _: u32, + _: &'static str, + _: usize, + ) -> Result { + Err(Error::custom( + "Serializing struct variants is not implemented", + )) + } +} diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/wasm_vector.rs b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_vector.rs new file mode 100644 index 00000000000..2ee0f6ef27a --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_vector.rs @@ -0,0 +1,166 @@ +use crate::wasm_flat_vector::WasmFlatVector; +use paste::paste; +use std::convert::From; +use std::ops::Deref; +use wasm_bindgen::convert::{FromWasmAbi, IntoWasmAbi, OptionFromWasmAbi, OptionIntoWasmAbi}; +use wasm_bindgen::prelude::*; + +#[derive(Clone, Debug)] +pub struct WasmVector(Vec); + +impl Deref for WasmVector { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From> for WasmVector { + fn from(x: Vec) -> Self { + WasmVector(x) + } +} + +impl From> for Vec { + fn from(x: WasmVector) -> Self { + x.0 + } +} + +impl<'a, T> From<&'a WasmVector> for &'a Vec { + fn from(x: &'a WasmVector) -> Self { + &x.0 + } +} + +impl std::iter::IntoIterator for WasmVector { + type Item = as std::iter::IntoIterator>::Item; + type IntoIter = as std::iter::IntoIterator>::IntoIter; + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl<'a, T> std::iter::IntoIterator for &'a WasmVector { + type Item = <&'a Vec as std::iter::IntoIterator>::Item; + type IntoIter = <&'a Vec as std::iter::IntoIterator>::IntoIter; + fn into_iter(self) -> Self::IntoIter { + self.0.iter() + } +} + +impl std::iter::FromIterator for WasmVector { + fn from_iter(iter: I) -> WasmVector + where + I: IntoIterator, + { + WasmVector(std::iter::FromIterator::from_iter(iter)) + } +} + +impl std::default::Default for WasmVector { + fn default() -> Self { + WasmVector(std::default::Default::default()) + } +} + +impl std::iter::Extend for WasmVector { + fn extend(&mut self, iter: I) + where + I: IntoIterator, + { + self.0.extend(iter) + } +} + +impl wasm_bindgen::describe::WasmDescribe for WasmVector { + fn describe() { + as wasm_bindgen::describe::WasmDescribe>::describe() + } +} + +impl> FromWasmAbi for WasmVector { + type Abi = as FromWasmAbi>::Abi; + unsafe fn from_abi(js: Self::Abi) -> Self { + let pointers: Vec = FromWasmAbi::from_abi(js); + WasmVector( + pointers + .into_iter() + .map(|x| FromWasmAbi::from_abi(x)) + .collect(), + ) + } +} + +impl> OptionFromWasmAbi for WasmVector { + fn is_none(x: &Self::Abi) -> bool { + as OptionFromWasmAbi>::is_none(x) + } +} + +impl> IntoWasmAbi for WasmVector { + type Abi = as FromWasmAbi>::Abi; + fn into_abi(self) -> Self::Abi { + let pointers: Vec = self + .0 + .into_iter() + .map(|x| IntoWasmAbi::into_abi(x)) + .collect(); + IntoWasmAbi::into_abi(pointers) + } +} + +impl> OptionIntoWasmAbi for WasmVector { + fn none() -> Self::Abi { + as OptionIntoWasmAbi>::none() + } +} + +macro_rules! impl_vec_vec_fp { + ( $F:ty, $WasmF:ty ) => { + paste! { + #[wasm_bindgen] + pub struct [](#[wasm_bindgen(skip)] pub Vec>); + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn create(n: i32) -> Self { + [](Vec::with_capacity(n as usize)) + } + + #[wasm_bindgen] + pub fn push(&mut self, x: WasmFlatVector<$WasmF>) { + self.0.push(x.into_iter().map(Into::into).collect()) + } + + #[wasm_bindgen] + pub fn get(&self, i: i32) -> WasmFlatVector<$WasmF> { + self.0[i as usize].clone().into_iter().map(Into::into).collect() + } + + #[wasm_bindgen] + pub fn set(&mut self, i: i32, x: WasmFlatVector<$WasmF>) { + self.0[i as usize] = x.into_iter().map(Into::into).collect() + } + } + } + }; +} + +pub mod fp { + use super::*; + use crate::arkworks::WasmPastaFp; + use mina_curves::pasta::Fp; + + impl_vec_vec_fp!(Fp, WasmPastaFp); +} + +pub mod fq { + use super::*; + use crate::arkworks::WasmPastaFq; + use mina_curves::pasta::Fq; + + impl_vec_vec_fp!(Fq, WasmPastaFq); +} diff --git a/src/lib/o1js_stub/README.md b/src/lib/o1js_stub/README.md new file mode 100644 index 00000000000..f11b6aaecb4 --- /dev/null +++ b/src/lib/o1js_stub/README.md @@ -0,0 +1,26 @@ +# `o1js-stub` + +## Overview + +The primary purpose of the `o1js-stub` module is to maintain the compatibility of the Mina core. This stub module, which contains only a dune file to compile Mina dependencies, replicates the dependency structure of [`o1js`](https://github.com/o1-labs/o1js), specifically the [OCaml](https://github.com/o1-labs/o1js-bindings/blob/main/ocaml/lib/dune) dependencies but without the complexity of the full implementation. + +## Rationale + +In the development of Mina, there have been instances where the JavaScript build broke unexpectedly. This was often due to the inadvertent addition of popular OCaml libraries that are incompatible with JavaScript compilation. To prevent such scenarios and ensure the robustness of the Mina core in a JS environment, `o1js-stub` plays a crucial role. + +## Key Features + +- **Dependency Mirror:** `o1js-stub` mirrors the dependency profile of `o1js` module. This ensures that any dependencies which could break JS compilation are flagged early. + +- **Continuous Integration Checks:** In the Mina CI pipeline, `o1js-stub` is subjected to compilation checks using Dune. This is a critical step to verify that the module, along with its dependencies, remains JS-compilable. + +## Integration in Mina CI + +The integration of `o1js-stub` in the Mina Continuous Integration (CI) process serves as a gatekeeper to prevent the introduction of JS-incompatible dependencies into the Mina core. During CI, the module is compiled, and any failure in this process would indicate potential compatibility issues, thereby safeguarding the JS build. + +## Usage + +For Mina developers, the introduction of `o1js-stub` should be largely transparent. However, it's important to be aware of its existence, especially when considering the addition or modification of dependencies that might affect the JavaScript build. + +Developers should ensure that any changes in the dependencies of modules related to `o1js-stub` do not introduce JS-incompatibility. Any such changes should be thoroughly tested locally and will be automatically verified in the CI pipeline. + diff --git a/src/lib/o1js_stub/dune b/src/lib/o1js_stub/dune new file mode 100644 index 00000000000..9797ae6413b --- /dev/null +++ b/src/lib/o1js_stub/dune @@ -0,0 +1,78 @@ +(executable + (name o1js_stub) + (modes js) + (flags (-g)) + (link_flags + (-noautolink -g)) + (link_deps + ../crypto/kimchi_bindings/js/node_js/plonk_wasm.js + ../crypto/kimchi_bindings/js/node_js/plonk_wasm_bg.wasm) + (libraries + ;; opam libraries ;; + core_kernel + base + base.caml + integers + sexplib0 + yojson + ppx_deriving_yojson.runtime + ;; local libraries ;; + mina_wire_types + mina_base + mina_base.import + snarky.backendless + h_list + pickles + pickles.backend + pickles_base + pickles.limb_vector + pickles_types + kimchi_backend + kimchi_backend.pasta + kimchi_backend.pasta.basic + kimchi_backend.pasta.constraint_system + kimchi_backend.common + kimchi_bindings + kimchi_types + pasta_bindings + base58_check + block_time + currency + data_hash_lib + hash_prefixes + fields_derivers + fields_derivers_zkapps + genesis_constants + mina_numbers + mina_signature_kind + mina_transaction + mina_transaction_logic + random_oracle + random_oracle_input + sgn + signature_lib + snark_keys_header + snark_params + sponge + tuple_lib + unsigned_extended + with_hash + ;; js-specific libraries ;; + js_of_ocaml + bindings_js + integers_stubs_js + zarith_stubs_js + node_backend + ;; js-specific overrides ;; + cache_dir.fake + digestif.ocaml + logger.fake + mina_metrics.none + promise.js + promise.js_helpers + run_in_thread.fake) + (instrumentation + (backend bisect_ppx)) + (forbidden_libraries async core re2 ctypes) + (preprocess + (pps ppx_custom_printf ppx_version js_of_ocaml-ppx))) diff --git a/src/lib/o1js_stub/o1js_stub.ml b/src/lib/o1js_stub/o1js_stub.ml new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/lib/snarkyjs b/src/lib/snarkyjs deleted file mode 160000 index cbe61315c6a..00000000000 --- a/src/lib/snarkyjs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit cbe61315c6aa8c94ae2de5b92db29f551002d62c