From d3aad252fcecb4e366d48c167e080daff2c48d50 Mon Sep 17 00:00:00 2001 From: leon chou Date: Mon, 29 Sep 2025 15:44:37 -0400 Subject: [PATCH] added o1js_bindings --- src/dune-project | 1 + src/lib/o1js_bindings/artifacts/.gitignore | 2 + src/lib/o1js_bindings/dune | 44 + src/lib/o1js_bindings/jsoo_exports/dune | 107 ++ .../o1js_bindings/jsoo_exports/o1js_node.ml | 3 + .../o1js_bindings/jsoo_exports/o1js_web.ml | 1 + .../o1js_bindings/jsoo_exports/overrides.js | 75 ++ src/lib/o1js_bindings/lib/consistency_test.ml | 444 +++++++++ src/lib/o1js_bindings/lib/dune | 72 ++ src/lib/o1js_bindings/lib/local_ledger.ml | 288 ++++++ .../o1js_bindings/lib/o1js_bindings_lib.ml | 20 + .../o1js_bindings/lib/o1js_bindings_lib.mli | 3 + src/lib/o1js_bindings/lib/pickles_bindings.ml | 916 ++++++++++++++++++ .../o1js_bindings/lib/pickles_bindings.mli | 132 +++ src/lib/o1js_bindings/lib/snarky_bindings.ml | 608 ++++++++++++ src/lib/o1js_bindings/lib/snarky_bindings.mli | 279 ++++++ src/lib/o1js_bindings/lib/util.ml | 45 + src/lib/o1js_bindings/o1js_constants.ml | 198 ++++ src/lib/o1js_bindings/o1js_types.ml | 13 + 19 files changed, 3251 insertions(+) create mode 100644 src/lib/o1js_bindings/artifacts/.gitignore create mode 100644 src/lib/o1js_bindings/dune create mode 100644 src/lib/o1js_bindings/jsoo_exports/dune create mode 100644 src/lib/o1js_bindings/jsoo_exports/o1js_node.ml create mode 100644 src/lib/o1js_bindings/jsoo_exports/o1js_web.ml create mode 100644 src/lib/o1js_bindings/jsoo_exports/overrides.js create mode 100644 src/lib/o1js_bindings/lib/consistency_test.ml create mode 100644 src/lib/o1js_bindings/lib/dune create mode 100644 src/lib/o1js_bindings/lib/local_ledger.ml create mode 100644 src/lib/o1js_bindings/lib/o1js_bindings_lib.ml create mode 100644 src/lib/o1js_bindings/lib/o1js_bindings_lib.mli create mode 100644 src/lib/o1js_bindings/lib/pickles_bindings.ml create mode 100644 src/lib/o1js_bindings/lib/pickles_bindings.mli create mode 100644 src/lib/o1js_bindings/lib/snarky_bindings.ml create mode 100644 src/lib/o1js_bindings/lib/snarky_bindings.mli create mode 100644 src/lib/o1js_bindings/lib/util.ml create mode 100644 src/lib/o1js_bindings/o1js_constants.ml create mode 100644 src/lib/o1js_bindings/o1js_types.ml diff --git a/src/dune-project b/src/dune-project index 6e1bd8d118d7..88cf311dce94 100644 --- a/src/dune-project +++ b/src/dune-project @@ -125,6 +125,7 @@ (package (name node_error_service)) (package (name node_status_service)) (package (name non_zero_curve_point)) +(package (name o1js_bindings)) (package (name o1trace)) (package (name o1trace_webkit_event)) (package (name one_or_two)) diff --git a/src/lib/o1js_bindings/artifacts/.gitignore b/src/lib/o1js_bindings/artifacts/.gitignore new file mode 100644 index 000000000000..c96a04f008ee --- /dev/null +++ b/src/lib/o1js_bindings/artifacts/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore \ No newline at end of file diff --git a/src/lib/o1js_bindings/dune b/src/lib/o1js_bindings/dune new file mode 100644 index 000000000000..e12cffdbab40 --- /dev/null +++ b/src/lib/o1js_bindings/dune @@ -0,0 +1,44 @@ +(executable + (instrumentation + (backend bisect_ppx)) + (libraries fields_derivers.zkapps mina_base yojson) + (link_flags (-linkall)) + (modes native) + (modules o1js_types) + (name o1js_types) + (package o1js_bindings) + (preprocess + (pps ppx_version)) + (public_name o1js-types)) + +(rule + (targets jsLayout.json) + (action + (with-stdout-to + jsLayout.json + (run %{exe:./o1js_types.exe})))) + +(executable + (instrumentation + (backend bisect_ppx)) + (libraries + base + base58_check + core_kernel + hash_prefixes + mina_base + mina_signature_kind + pickles + protocol_version + random_oracle + random_oracle.permutation.ocaml + sponge + yojson) + (link_flags (-linkall)) + (name o1js_constants) + (modes native) + (modules o1js_constants) + (package o1js_bindings) + (preprocess + (pps ppx_version)) + (public_name o1js-constants)) diff --git a/src/lib/o1js_bindings/jsoo_exports/dune b/src/lib/o1js_bindings/jsoo_exports/dune new file mode 100644 index 000000000000..40ca466f2bf1 --- /dev/null +++ b/src/lib/o1js_bindings/jsoo_exports/dune @@ -0,0 +1,107 @@ +(env + (_ + (js_of_ocaml + (compilation_mode whole_program)))) + +(rule + (enabled_if + (= %{env:PREBUILT_KIMCHI_BINDINGS_JS_NODE_JS=n} n)) + (targets node_js_plonk_wasm.js node_js_plonk_wasm_bg.wasm) + (deps + (:d1 ../../crypto/kimchi_bindings/js/node_js/plonk_wasm.js) + (:d2 ../../crypto/kimchi_bindings/js/node_js/plonk_wasm_bg.wasm)) + (mode + (promote + (into ../artifacts))) + (action + (progn + (run cp %{d1} node_js_plonk_wasm.js) + (run cp %{d2} node_js_plonk_wasm_bg.wasm)))) + +(rule + (enabled_if + (= %{env:PREBUILT_KIMCHI_BINDINGS_JS_WEB=n} n)) + (targets web_plonk_wasm.js web_plonk_wasm_bg.wasm) + (deps + (:d1 ../../crypto/kimchi_bindings/js/web/plonk_wasm.js) + (:d2 ../../crypto/kimchi_bindings/js/web/plonk_wasm_bg.wasm)) + (mode + (promote + (into ../artifacts))) + (action + (progn + (run cp %{d1} web_plonk_wasm.js) + (run cp %{d2} web_plonk_wasm_bg.wasm)))) + +(rule + (enabled_if + (<> %{env:PREBUILT_KIMCHI_BINDINGS_JS_NODE_JS=n} n)) + (targets node_js_plonk_wasm.js node_js_plonk_wasm_bg.wasm) + (mode + (promote + (into ../artifacts))) + (action + (progn + (run + cp + %{env:PREBUILT_KIMCHI_BINDINGS_JS_NODE_JS=n}/plonk_wasm.js + node_js_plonk_wasm.js) + (run + cp + %{env:PREBUILT_KIMCHI_BINDINGS_JS_NODE_JS=n}/plonk_wasm_bg.wasm + node_js_plonk_wasm_bg.wasm)))) + +(rule + (enabled_if + (<> %{env:PREBUILT_KIMCHI_BINDINGS_JS_WEB=n} n)) + (targets web_plonk_wasm.js web_plonk_wasm_bg.wasm) + (mode + (promote + (into ../artifacts))) + (action + (progn + (run + cp + %{env:PREBUILT_KIMCHI_BINDINGS_JS_WEB=n}/plonk_wasm.js + web_plonk_wasm.js) + (run + cp + %{env:PREBUILT_KIMCHI_BINDINGS_JS_WEB=n}/plonk_wasm_bg.wasm + web_plonk_wasm_bg.wasm)))) + +(executable + (name o1js_node) + (modules o1js_node) + (modes js) + (promote + (into ../artifacts)) + (link_flags :standard -noautolink -g) + (js_of_ocaml + (flags :standard +toplevel.js +dynlink.js --source-map) + (link_flags :standard --source-map) + (javascript_files overrides.js)) + (libraries o1js_bindings.lib bindings_js.node_backend) + (link_deps node_js_plonk_wasm.js node_js_plonk_wasm_bg.wasm) + (instrumentation + (backend bisect_ppx)) + (forbidden_libraries async core re2 ctypes) + (preprocess + (pps ppx_version js_of_ocaml-ppx))) + +(executable + (name o1js_web) + (modules o1js_web) + (modes js) + (promote + (into ../artifacts)) + (link_flags :standard -noautolink) + (js_of_ocaml + (flags :standard +toplevel.js +dynlink.js) + (javascript_files overrides.js)) + (libraries o1js_bindings.lib bindings_js.web_backend) + (link_deps web_plonk_wasm.js web_plonk_wasm_bg.wasm) + (instrumentation + (backend bisect_ppx)) + (forbidden_libraries async core re2 ctypes) + (preprocess + (pps ppx_version js_of_ocaml-ppx))) diff --git a/src/lib/o1js_bindings/jsoo_exports/o1js_node.ml b/src/lib/o1js_bindings/jsoo_exports/o1js_node.ml new file mode 100644 index 000000000000..c52a2cfa06de --- /dev/null +++ b/src/lib/o1js_bindings/jsoo_exports/o1js_node.ml @@ -0,0 +1,3 @@ +let () = O1js_bindings_lib.export_global () + +let () = O1js_bindings_lib.export () diff --git a/src/lib/o1js_bindings/jsoo_exports/o1js_web.ml b/src/lib/o1js_bindings/jsoo_exports/o1js_web.ml new file mode 100644 index 000000000000..31639688ecd5 --- /dev/null +++ b/src/lib/o1js_bindings/jsoo_exports/o1js_web.ml @@ -0,0 +1 @@ +let () = O1js_bindings_lib.export_global () diff --git a/src/lib/o1js_bindings/jsoo_exports/overrides.js b/src/lib/o1js_bindings/jsoo_exports/overrides.js new file mode 100644 index 000000000000..95cc07baacff --- /dev/null +++ b/src/lib/o1js_bindings/jsoo_exports/overrides.js @@ -0,0 +1,75 @@ +/* global caml_named_value, caml_global_data, caml_string_of_jsstring + */ + +//Provides: caml_wrap_exception const (const) +//Requires: caml_global_data,caml_string_of_jsstring,caml_named_value +//Requires: caml_return_exn_constant +function caml_wrap_exception(e) { + if (e instanceof Array) return e; + if (e instanceof globalThis.Error && caml_named_value('jsError')) + return [0, caml_named_value('jsError'), e]; + //fallback: wrapped in Failure + return [0, caml_global_data.Failure, caml_string_of_jsstring(String(e))]; +} + +//Provides: caml_raise_with_string (const, const) +function caml_raise_with_string(tag, msg) { + throw globalThis.Error(msg.c); +} + +//Provides: custom_reraise_exn +function custom_reraise_exn(exn, fallbackMessage) { + // this handles the common case of a JS Error reraised by OCaml + // in that case, the error will first be wrapped in OCaml with "caml_wrap_exception" + // (defined in js_of_ocaml-compiler / jslib.js) + // which results in [0, caml_named_value("jsError"), err] + var err = exn[2]; + if (err instanceof globalThis.Error) { + throw err; + } else { + throw Error(fallbackMessage); + } +} + +/** + * This overrides the handler for uncaught exceptions in js_of_ocaml, + * fixing the flaw that by default, no actual `Error`s are thrown, + * but other objects (arrays) which are missing an error trace. + * This override should make it much easier to find the source of an error. + */ +//Provides: caml_fatal_uncaught_exception +function caml_fatal_uncaught_exception(err) { + // first, we search for an actual error inside `err`, + // since this is the best thing to throw + function throw_errors(err) { + if (err instanceof Error) throw err; + else if (Array.isArray(err)) { + err.forEach(throw_errors); + } + } + throw_errors(err); + // if this didn't throw an error, let's log whatever we got + console.dir(err, { depth: 20 }); + // now, try to collect all strings in the error and throw that + function collect_strings(err, acc) { + var str = undefined; + if (typeof err === 'string') { + str = err; + } else if (err && err.constructor && err.constructor.name === 'MlBytes') { + str = err.c; + } else if (Array.isArray(err)) { + err.forEach(function (e) { + collect_strings(e, acc); + }); + } + if (!str) return acc.string; + if (acc.string === undefined) acc.string = str; + else acc.string = acc.string + '\n' + str; + return acc.string; + } + var str = collect_strings(err, {}); + if (str !== undefined) throw globalThis.Error(str); + // otherwise, just throw an unhelpful error + console.dir(err, { depth: 10 }); + throw globalThis.Error('Unknown error thrown from OCaml'); +} diff --git a/src/lib/o1js_bindings/lib/consistency_test.ml b/src/lib/o1js_bindings/lib/consistency_test.ml new file mode 100644 index 000000000000..fafa14b0974b --- /dev/null +++ b/src/lib/o1js_bindings/lib/consistency_test.ml @@ -0,0 +1,444 @@ +open Core_kernel +module Js = Js_of_ocaml.Js +module Impl = Pickles.Impls.Step +module Other_impl = Pickles.Impls.Wrap +module Field = Impl.Field +module Account_update = Mina_base.Account_update +module Zkapp_command = Mina_base.Zkapp_command +(*module Signed_command = Mina_base.Signed_command*) + +(* Test - functions that have a ts implementation, exposed for ts-ml consistency tests *) + +module Encoding = struct + (* arbitrary base58_check encoding *) + let binary_string_to_base58_check bin_string (version_byte : int) : + Js.js_string Js.t = + let module T = struct + let version_byte = Char.of_int_exn version_byte + + let description = "any" + end in + let module B58 = Base58_check.Make (T) in + bin_string |> B58.encode |> Js.string + + let binary_string_of_base58_check (base58 : Js.js_string Js.t) + (version_byte : int) = + let module T = struct + let version_byte = Char.of_int_exn version_byte + + let description = "any" + end in + let module B58 = Base58_check.Make (T) in + base58 |> Js.to_string |> B58.decode_exn + + (* base58 encoding of some transaction types *) + let public_key_to_base58 (pk : Signature_lib.Public_key.Compressed.t) : + Js.js_string Js.t = + pk |> Signature_lib.Public_key.Compressed.to_base58_check |> Js.string + + let public_key_of_base58 (pk_base58 : Js.js_string Js.t) : + Signature_lib.Public_key.Compressed.t = + pk_base58 |> Js.to_string + |> Signature_lib.Public_key.Compressed.of_base58_check_exn + + let private_key_to_base58 (sk : Other_impl.field) : Js.js_string Js.t = + sk |> Signature_lib.Private_key.to_base58_check |> Js.string + + let private_key_of_base58 (sk_base58 : Js.js_string Js.t) : Other_impl.field = + sk_base58 |> Js.to_string |> Signature_lib.Private_key.of_base58_check_exn + + let token_id_to_base58 (field : Impl.field) : Js.js_string Js.t = + field |> Mina_base.Account_id.Digest.of_field + |> Mina_base.Account_id.Digest.to_string |> Js.string + + let token_id_of_base58 (field : Js.js_string Js.t) : Impl.field = + Mina_base.Account_id.Digest.to_field_unsafe + @@ Mina_base.Account_id.Digest.of_string @@ Js.to_string field + + let memo_to_base58 (memo : Js.js_string Js.t) : Js.js_string Js.t = + Js.string @@ Mina_base.Signed_command_memo.to_base58_check + @@ Mina_base.Signed_command_memo.create_from_string_exn @@ Js.to_string memo + + let memo_hash_base58 (memo_base58 : Js.js_string Js.t) : Impl.field = + memo_base58 |> Js.to_string + |> Mina_base.Signed_command_memo.of_base58_check_exn + |> Mina_base.Signed_command_memo.hash +end + +module Token_id = struct + let derive pk token = + let account_id = + Mina_base.Account_id.create pk (Mina_base.Token_id.of_field token) + in + Mina_base.Account_id.derive_token_id ~owner:account_id + |> Mina_base.Token_id.to_field_unsafe + + let derive_checked pk token = + let account_id = + Mina_base.Account_id.Checked.create pk + (Mina_base.Token_id.Checked.of_field token) + in + Mina_base.Account_id.Checked.derive_token_id ~owner:account_id + |> Mina_base.Account_id.Digest.Checked.to_field_unsafe +end + +(* deriver *) +let account_update_of_json, _account_update_to_json = + let deriver = + lazy + ( Account_update.Graphql_repr.deriver + @@ Fields_derivers_zkapps.Derivers.o () ) + in + let account_update_of_json (account_update : Js.js_string Js.t) : + Account_update.Stable.Latest.t = + Fields_derivers_zkapps.of_json (Lazy.force deriver) + (account_update |> Js.to_string |> Yojson.Safe.from_string) + |> Account_update.of_graphql_repr + in + let account_update_to_json (account_update : Account_update.Stable.Latest.t) : + Js.js_string Js.t = + Fields_derivers_zkapps.to_json (Lazy.force deriver) + (Account_update.to_graphql_repr account_update ~call_depth:0) + |> Yojson.Safe.to_string |> Js.string + in + (account_update_of_json, account_update_to_json) + +let body_of_json = + let body_deriver = + lazy + ( Mina_base.Account_update.Body.Graphql_repr.deriver + @@ Fields_derivers_zkapps.o () ) + in + let body_of_json json = + json + |> Fields_derivers_zkapps.of_json (Lazy.force body_deriver) + |> Account_update.Body.of_graphql_repr + in + body_of_json + +let get_network_id_of_js_string (network : Js.js_string Js.t) = + match Js.to_string network with + | "mainnet" -> + Mina_signature_kind.Mainnet + | "testnet" | "devnet" -> + Mina_signature_kind.Testnet + | other -> + Mina_signature_kind.(Other_network other) + +module Poseidon = struct + let hash_to_group (xs : Impl.field array) = + let input = Random_oracle.hash xs in + Snark_params.Group_map.to_group input +end + +module Signature = struct + let sign_field_element (x : Impl.field) (key : Other_impl.field) + (network_id : Js.js_string Js.t) = + Signature_lib.Schnorr.Chunked.sign + ~signature_kind:(get_network_id_of_js_string network_id) + key + (Random_oracle.Input.Chunked.field x) + |> Mina_base.Signature.to_base58_check |> Js.string + + let dummy_signature () = + Mina_base.Signature.(dummy |> to_base58_check) |> Js.string +end + +module To_fields = struct + (* helper function to check whether the fields we produce from JS are correct *) + let fields_of_json (typ : ('var, 'value) Impl.Internal_Basic.Typ.typ) of_json + (json : Js.js_string Js.t) : Impl.field array = + let json = json |> Js.to_string |> Yojson.Safe.from_string in + let value = of_json json in + let (Typ typ) = typ in + let fields, _ = typ.value_to_fields value in + fields + + let account_update = + fields_of_json (Mina_base.Account_update.Body.typ ()) body_of_json +end + +let proof_cache_db = Proof_cache_tag.For_tests.create_db () + +module Hash_from_json = struct + let account_update (p : Js.js_string Js.t) (network_id : Js.js_string Js.t) = + p |> account_update_of_json + |> Account_update.digest + ~signature_kind:(get_network_id_of_js_string network_id) + + let transaction_commitments (tx_json : Js.js_string Js.t) + (network_id : Js.js_string Js.t) = + let signature_kind = get_network_id_of_js_string network_id in + let tx = + Zkapp_command.write_all_proofs_to_disk ~signature_kind ~proof_cache_db + @@ Zkapp_command.of_json @@ Yojson.Safe.from_string + @@ Js.to_string tx_json + in + let get_account_updates_hash xs = + let hash_account_update (p : Account_update.t) = + Zkapp_command.Digest.Account_update.create ~signature_kind p + in + Zkapp_command.Call_forest.accumulate_hashes ~hash_account_update xs + in + let commitment = + let account_updates_hash = + Zkapp_command.Call_forest.hash + (get_account_updates_hash tx.account_updates) + in + Zkapp_command.Transaction_commitment.create ~account_updates_hash + in + let fee_payer = Account_update.of_fee_payer tx.fee_payer in + let fee_payer_hash = + Zkapp_command.Digest.Account_update.create ~signature_kind fee_payer + in + let full_commitment = + Zkapp_command.Transaction_commitment.create_complete commitment + ~memo_hash:(Mina_base.Signed_command_memo.hash tx.memo) + ~fee_payer_hash + in + object%js + val commitment = commitment + + val fullCommitment = full_commitment + + val feePayerHash = (fee_payer_hash :> Impl.field) + end + + let zkapp_public_input (tx_json : Js.js_string Js.t) + (account_update_index : int) = + let signature_kind = Mina_signature_kind_type.Testnet in + let tx = + Zkapp_command.write_all_proofs_to_disk ~signature_kind ~proof_cache_db + @@ Zkapp_command.of_json @@ Yojson.Safe.from_string + @@ Js.to_string tx_json + in + let account_update = List.nth_exn tx.account_updates account_update_index in + object%js + val accountUpdate = + (account_update.elt.account_update_digest :> Impl.field) + + val calls = + (Zkapp_command.Call_forest.hash account_update.elt.calls :> Impl.field) + end +end + +module Hash_input = struct + type random_oracle_input = Impl.field Random_oracle_input.Chunked.t + + let pack_input (input : random_oracle_input) : Impl.field array = + Random_oracle.pack_input input + + (* hash inputs for various account_update subtypes *) + let timing_input (json : Js.js_string Js.t) : random_oracle_input = + let deriver = Account_update.Update.Timing_info.deriver in + let json = json |> Js.to_string |> Yojson.Safe.from_string in + let value = Fields_derivers_zkapps.(of_json (deriver @@ o ()) json) in + let input = Account_update.Update.Timing_info.to_input value in + input + + let permissions_input (json : Js.js_string Js.t) : random_oracle_input = + let deriver = Mina_base.Permissions.deriver in + let json = json |> Js.to_string |> Yojson.Safe.from_string in + let value = Fields_derivers_zkapps.(of_json (deriver @@ o ()) json) in + let input = Mina_base.Permissions.to_input value in + input + + let update_input (json : Js.js_string Js.t) : random_oracle_input = + let deriver = Account_update.Update.deriver in + let json = json |> Js.to_string |> Yojson.Safe.from_string in + let value = Fields_derivers_zkapps.(of_json (deriver @@ o ()) json) in + let input = Account_update.Update.to_input value in + input + + let account_precondition_input (json : Js.js_string Js.t) : + random_oracle_input = + let deriver = Mina_base.Zkapp_precondition.Account.deriver in + let json = json |> Js.to_string |> Yojson.Safe.from_string in + let value = Fields_derivers_zkapps.(of_json (deriver @@ o ()) json) in + let input = Mina_base.Zkapp_precondition.Account.to_input value in + input + + let network_precondition_input (json : Js.js_string Js.t) : + random_oracle_input = + let deriver = Mina_base.Zkapp_precondition.Protocol_state.deriver in + let json = json |> Js.to_string |> Yojson.Safe.from_string in + let value = Fields_derivers_zkapps.(of_json (deriver @@ o ()) json) in + let input = Mina_base.Zkapp_precondition.Protocol_state.to_input value in + input + + let body_input (json : Js.js_string Js.t) : random_oracle_input = + let json = json |> Js.to_string |> Yojson.Safe.from_string in + let value = body_of_json json in + let input = Account_update.Body.to_input value in + input +end + +module Transaction_hash = struct + module Signed_command = Mina_base.Signed_command + module Signed_command_payload = Mina_base.Signed_command_payload + + let ok_exn result = + let open Ppx_deriving_yojson_runtime.Result in + match result with Ok c -> c | Error e -> failwith ("not ok: " ^ e) + + let keypair () = Signature_lib.Keypair.create () + + let hash_payment (command : Js.js_string Js.t) = + let command : Signed_command.t = + command |> Js.to_string |> Yojson.Safe.from_string + |> Signed_command.of_yojson |> ok_exn + in + Mina_transaction.Transaction_hash.( + command |> hash_signed_command |> to_base58_check |> Js.string) + + let hash_zkapp_command (command : Js.js_string Js.t) = + let command : Zkapp_command.Stable.Latest.t = + command |> Js.to_string |> Yojson.Safe.from_string + |> Zkapp_command.of_json + in + Mina_transaction.Transaction_hash.( + command |> hash_zkapp_command |> to_base58_check |> Js.string) + + let hash_payment_v1 (command : Js.js_string Js.t) = + let command : Signed_command.Stable.V1.t = + command |> Js.to_string |> Yojson.Safe.from_string + |> Signed_command.Stable.V1.of_yojson |> ok_exn + in + let b58 = Signed_command.to_base58_check_v1 command in + Mina_transaction.Transaction_hash.(b58 |> digest_string |> to_base58_check) + |> Js.string + + let serialize_common (command : Js.js_string Js.t) = + let command : Signed_command_payload.Common.t = + command |> Js.to_string |> Yojson.Safe.from_string + |> Signed_command_payload.Common.of_yojson |> ok_exn + in + Binable.to_bigstring + (module Signed_command_payload.Common.Stable.Latest) + command + + let serialize_payment (command : Js.js_string Js.t) = + let command : Signed_command.t = + command |> Js.to_string |> Yojson.Safe.from_string + |> Signed_command.of_yojson |> ok_exn + in + Binable.to_bigstring (module Signed_command.Stable.Latest) command + + let serialize_payment_v1 (command : Js.js_string Js.t) = + let command : Signed_command.Stable.V1.t = + command |> Js.to_string |> Yojson.Safe.from_string + |> Signed_command.Stable.V1.of_yojson |> ok_exn + in + Signed_command.to_base58_check_v1 command |> Js.string + + let example_payment () = + let kp = keypair () in + let payload : Signed_command_payload.t = + { Signed_command_payload.dummy with + common = + { Signed_command_payload.dummy.common with + fee_payer_pk = Signature_lib.Public_key.compress kp.public_key + } + } + in + let signature_kind = Mina_signature_kind.t_DEPRECATED in + let payment = Signed_command.sign ~signature_kind kp payload in + (payment :> Signed_command.t) + |> Signed_command.to_yojson |> Yojson.Safe.to_string |> Js.string +end + +let test = + object%js + val encoding = + let open Encoding in + object%js + val toBase58 = binary_string_to_base58_check + + val ofBase58 = binary_string_of_base58_check + + method publicKeyToBase58 = public_key_to_base58 + + method publicKeyOfBase58 = public_key_of_base58 + + method privateKeyToBase58 = private_key_to_base58 + + method privateKeyOfBase58 = private_key_of_base58 + + method tokenIdToBase58 = token_id_to_base58 + + method tokenIdOfBase58 = token_id_of_base58 + + method memoToBase58 = memo_to_base58 + + method memoHashBase58 = memo_hash_base58 + end + + val tokenId = + object%js + method derive = Token_id.derive + + method deriveChecked = Token_id.derive_checked + end + + val poseidon = + object%js + val hashToGroup = Poseidon.hash_to_group + end + + val signature = + object%js + method signFieldElement = Signature.sign_field_element + + val dummySignature = Signature.dummy_signature + end + + val fieldsFromJson = + object%js + method accountUpdate = To_fields.account_update + end + + val hashFromJson = + object%js + method accountUpdate = Hash_from_json.account_update + + method transactionCommitments = Hash_from_json.transaction_commitments + + method zkappPublicInput = Hash_from_json.zkapp_public_input + end + + val hashInputFromJson = + let open Hash_input in + object%js + val packInput = pack_input + + val timing = timing_input + + val permissions = permissions_input + + val accountPrecondition = account_precondition_input + + val networkPrecondition = network_precondition_input + + val update = update_input + + val body = body_input + end + + val transactionHash = + let open Transaction_hash in + object%js + method hashPayment = hash_payment + + method hashPaymentV1 = hash_payment_v1 + + method serializeCommon = serialize_common + + method serializePayment = serialize_payment + + method serializePaymentV1 = serialize_payment_v1 + + method hashZkAppCommand = hash_zkapp_command + + val examplePayment = example_payment + end + end diff --git a/src/lib/o1js_bindings/lib/dune b/src/lib/o1js_bindings/lib/dune new file mode 100644 index 000000000000..291afaf23f00 --- /dev/null +++ b/src/lib/o1js_bindings/lib/dune @@ -0,0 +1,72 @@ +(library + (public_name o1js_bindings.lib) + (name o1js_bindings_lib) + (libraries + ;; opam libraries ;; + core_kernel + base + base.caml + integers + sexplib0 + yojson + ppx_deriving_yojson.runtime + ;; local libraries ;; + mina_wire_types + mina_base + mina_base.import + snarky.backendless + h_list + pickles + pickles.backend + pickles_base + pickles.limb_vector + pickles_types + key_cache + kimchi_backend + kimchi_pasta + kimchi_pasta.basic + kimchi_pasta.constraint_system + kimchi_backend_common + kimchi_bindings + kimchi_types + pasta_bindings + base58_check + block_time + currency + data_hash_lib + hash_prefixes + fields_derivers + fields_derivers.zkapps + genesis_constants + mina_numbers + mina_signature_kind + mina_transaction + mina_transaction_logic + random_oracle + random_oracle_input + sgn + signature_lib + snark_keys_header + snark_params + sponge + tuple_lib + unsigned_extended + with_hash + ;; js-specific libraries ;; + js_of_ocaml + bindings_js + integers_stubs_js + zarith_stubs_js + ;; js-specific overrides ;; + cache_dir.fake + digestif.ocaml + hash_prefix_create.js + logger.fake + mina_metrics.none + promise.js + promise.js_helpers + run_in_thread.fake) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_custom_printf ppx_version js_of_ocaml-ppx))) diff --git a/src/lib/o1js_bindings/lib/local_ledger.ml b/src/lib/o1js_bindings/lib/local_ledger.ml new file mode 100644 index 000000000000..0853534511a5 --- /dev/null +++ b/src/lib/o1js_bindings/lib/local_ledger.ml @@ -0,0 +1,288 @@ +open Core_kernel +module Js = Js_of_ocaml.Js +module Impl = Pickles.Impls.Step +module Field = Impl.Field + +(* Ledger - local mina transaction logic for prototyping and testing zkapps *) + +type public_key = Signature_lib.Public_key.Compressed.t + +module Account_update = Mina_base.Account_update +module Zkapp_command = Mina_base.Zkapp_command + +let ledger_class : < .. > Js.t = + Js.Unsafe.eval_string {js|(function(v) { this.value = v; return this })|js} + +module Ledger : Mina_base.Ledger_intf.S = struct + module Account = Mina_base.Account + module Account_id = Mina_base.Account_id + module Ledger_hash = Mina_base.Ledger_hash + module Token_id = Mina_base.Token_id + + type t_ = + { next_location : int + ; accounts : Account.t Int.Map.t + ; locations : int Account_id.Map.t + } + + type t = t_ ref + + type location = int + + let get (t : t) (loc : location) : Account.t option = Map.find !t.accounts loc + + let location_of_account (t : t) (a : Account_id.t) : location option = + Map.find !t.locations a + + let set (t : t) (loc : location) (a : Account.t) : unit = + t := { !t with accounts = Map.set !t.accounts ~key:loc ~data:a } + + let next_location (t : t) : int = + let loc = !t.next_location in + t := { !t with next_location = loc + 1 } ; + loc + + let get_or_create (t : t) (id : Account_id.t) : + (Mina_base.Ledger_intf.account_state * Account.t * location) Or_error.t = + let loc = location_of_account t id in + let res = + match loc with + | None -> + let loc = next_location t in + let a = Account.create id Currency.Balance.zero in + t := { !t with locations = Map.set !t.locations ~key:id ~data:loc } ; + set t loc a ; + (`Added, a, loc) + | Some loc -> + (`Existed, Option.value_exn (get t loc), loc) + in + Ok res + + let[@warning "-32"] get_or_create_account (t : t) (id : Account_id.t) + (a : Account.t) : + (Mina_base.Ledger_intf.account_state * location) Or_error.t = + match location_of_account t id with + | Some loc -> + let a' = Option.value_exn (get t loc) in + if Account.equal a a' then Ok (`Existed, loc) + else + Or_error.errorf + !"account %{sexp: Account_id.t} already present with different \ + contents" + id + | None -> + let loc = next_location t in + t := { !t with locations = Map.set !t.locations ~key:id ~data:loc } ; + set t loc a ; + Ok (`Added, loc) + + let create_new_account (t : t) (id : Account_id.t) (a : Account.t) : + unit Or_error.t = + match location_of_account t id with + | Some _ -> + Or_error.errorf !"account %{sexp: Account_id.t} already present" id + | None -> + let loc = next_location t in + t := { !t with locations = Map.set !t.locations ~key:id ~data:loc } ; + set t loc a ; + Ok () + + let[@warning "-32"] remove_accounts_exn (t : t) (ids : Account_id.t list) : + unit = + let locs = List.filter_map ids ~f:(fun id -> Map.find !t.locations id) in + t := + { !t with + locations = List.fold ids ~init:!t.locations ~f:Map.remove + ; accounts = List.fold locs ~init:!t.accounts ~f:Map.remove + } + + (* TODO *) + let merkle_root (_ : t) : Ledger_hash.t = Field.Constant.zero + + let empty ~depth:_ () : t = + ref + { next_location = 0 + ; accounts = Int.Map.empty + ; locations = Account_id.Map.empty + } + + let with_ledger (type a) ~depth ~(f : t -> a) : a = f (empty ~depth ()) + + let create_masked (t : t) : t = ref !t + + let apply_mask (t : t) ~(masked : t) = t := !masked +end + +module Transaction_logic = Mina_transaction_logic.Make (Ledger) + +type ledger_class = < value : Ledger.t Js.prop > + +let ledger_constr : (Ledger.t -> ledger_class Js.t) Js.constr = + Obj.magic ledger_class + +let create_new_account_exn (t : Ledger.t) account_id account = + Ledger.create_new_account t account_id account |> Or_error.ok_exn + +let default_token_id = + Mina_base.Token_id.default |> Mina_base.Token_id.to_field_unsafe + +let account_id (pk : public_key) token = + Mina_base.Account_id.create pk (Mina_base.Token_id.of_field token) + +module To_js = struct + let option (transform : 'a -> 'b) (x : 'a option) = + Js.Optdef.option (Option.map x ~f:transform) +end + +let check_account_update_signatures zkapp_command = + let ({ fee_payer; account_updates; memo } : Zkapp_command.t) = + zkapp_command + in + let tx_commitment = Zkapp_command.commitment zkapp_command in + let signature_kind = Mina_signature_kind.t_DEPRECATED in + let full_tx_commitment = + Zkapp_command.Transaction_commitment.create_complete tx_commitment + ~memo_hash:(Mina_base.Signed_command_memo.hash memo) + ~fee_payer_hash: + (Zkapp_command.Digest.Account_update.create ~signature_kind + (Account_update.of_fee_payer fee_payer) ) + in + let key_to_string = Signature_lib.Public_key.Compressed.to_base58_check in + let check_signature who s pk msg = + match Signature_lib.Public_key.decompress pk with + | None -> + failwith + (sprintf "Check signature: Invalid key on %s: %s" who + (key_to_string pk) ) + | Some pk_ -> + if + not + (Signature_lib.Schnorr.Chunked.verify + ~signature_kind:Mina_signature_kind.t_DEPRECATED s + (Kimchi_pasta.Pasta.Pallas.of_affine pk_) + (Random_oracle_input.Chunked.field msg) ) + then + failwith + (sprintf "Check signature: Invalid signature on %s for key %s" who + (key_to_string pk) ) + else () + in + + check_signature "fee payer" fee_payer.authorization fee_payer.body.public_key + full_tx_commitment ; + List.iteri (Zkapp_command.Call_forest.to_account_updates account_updates) + ~f:(fun i p -> + let commitment = + if p.body.use_full_commitment then full_tx_commitment else tx_commitment + in + match p.authorization with + | Signature s -> + check_signature + (sprintf "account_update %d" i) + s p.body.public_key commitment + | Proof _ | None_given -> + () ) + +let add_account_exn (l : Ledger.t) pk (balance : string) = + let account_id = account_id pk default_token_id in + let bal_u64 = Unsigned.UInt64.of_string balance in + let balance = Currency.Balance.of_uint64 bal_u64 in + let a : Mina_base.Account.t = Mina_base.Account.create account_id balance in + create_new_account_exn l account_id a + +let create () : ledger_class Js.t = + let l = Ledger.empty ~depth:20 () in + new%js ledger_constr l + +let account_to_json = + let deriver = + lazy (Mina_base.Account.deriver @@ Fields_derivers_zkapps.o ()) + in + let to_json (account : Mina_base.Account.t) : Js.Unsafe.any = + Mina_base.Account.to_poly account + |> Fields_derivers_zkapps.to_json (Lazy.force deriver) + |> Yojson.Safe.to_string |> Js.string |> Util.json_parse + in + to_json + +let get_account l (pk : public_key) (token : Impl.field) : + Js.Unsafe.any Js.optdef = + let loc = Ledger.location_of_account l##.value (account_id pk token) in + let account = Option.bind loc ~f:(Ledger.get l##.value) in + To_js.option account_to_json account + +let add_account l (pk : public_key) (balance : Js.js_string Js.t) = + add_account_exn l##.value pk (Js.to_string balance) + +let protocol_state_of_json = + let deriver = + lazy + ( Mina_base.Zkapp_precondition.Protocol_state.View.deriver + @@ Fields_derivers_zkapps.o () ) + in + fun (json : Js.js_string Js.t) : + Mina_base.Zkapp_precondition.Protocol_state.View.t -> + json |> Js.to_string |> Yojson.Safe.from_string + |> Fields_derivers_zkapps.of_json (Lazy.force deriver) + +let proof_cache_db = Proof_cache_tag.create_identity_db () + +let apply_zkapp_command_transaction l (txn : Zkapp_command.Stable.Latest.t) + (account_creation_fee : string) + (network_state : Mina_base.Zkapp_precondition.Protocol_state.View.t) = + let signature_kind = Mina_signature_kind_type.Testnet in + let txn = + Zkapp_command.write_all_proofs_to_disk ~signature_kind ~proof_cache_db txn + in + check_account_update_signatures txn ; + let ledger = l##.value in + let application_result = + Transaction_logic.apply_zkapp_command_unchecked ~signature_kind + ~global_slot:network_state.global_slot_since_genesis + ~state_view:network_state + ~constraint_constants: + { Genesis_constants.Compiled.constraint_constants with + account_creation_fee = Currency.Fee.of_string account_creation_fee + } + ledger txn + in + let applied, _ = + match application_result with + | Ok res -> + res + | Error err -> + Util.raise_error (Error.to_string_hum err) + in + match applied.command.status with + | Applied -> + () + | Failed failures -> + Util.raise_error + ( Mina_base.Transaction_status.Failure.Collection.to_yojson failures + |> Yojson.Safe.to_string ) + +let apply_json_transaction l (tx_json : Js.js_string Js.t) + (account_creation_fee : Js.js_string Js.t) (network_json : Js.js_string Js.t) + = + let txn = + Zkapp_command.of_json @@ Yojson.Safe.from_string @@ Js.to_string tx_json + in + let network_state = protocol_state_of_json network_json in + apply_zkapp_command_transaction l txn + (Js.to_string account_creation_fee) + network_state + +let method_ class_ (name : string) (f : _ Js.t -> _) = + let prototype = Js.Unsafe.get class_ (Js.string "prototype") in + Js.Unsafe.set prototype (Js.string name) (Js.wrap_meth_callback f) + +let () = + let static_method name f = + Js.Unsafe.set ledger_class (Js.string name) (Js.wrap_callback f) + in + let method_ name (f : ledger_class Js.t -> _) = method_ ledger_class name f in + static_method "create" create ; + + method_ "getAccount" get_account ; + method_ "addAccount" add_account ; + method_ "applyJsonTransaction" apply_json_transaction diff --git a/src/lib/o1js_bindings/lib/o1js_bindings_lib.ml b/src/lib/o1js_bindings/lib/o1js_bindings_lib.ml new file mode 100644 index 000000000000..f01e71c5f3fa --- /dev/null +++ b/src/lib/o1js_bindings/lib/o1js_bindings_lib.ml @@ -0,0 +1,20 @@ +module Js = Js_of_ocaml.Js + +let export () = + Js.export "Snarky" Snarky_bindings.snarky ; + Js.export "Ledger" Local_ledger.ledger_class ; + Js.export "Pickles" Pickles_bindings.pickles ; + Js.export "Test" Consistency_test.test + +let export_global () = + let snarky_obj = + Js.Unsafe.( + let i = inject in + obj + [| ("Snarky", i Snarky_bindings.snarky) + ; ("Ledger", i Local_ledger.ledger_class) + ; ("Pickles", i Pickles_bindings.pickles) + ; ("Test", i Consistency_test.test) + |]) + in + Js.Unsafe.(set global (Js.string "__snarky") snarky_obj) diff --git a/src/lib/o1js_bindings/lib/o1js_bindings_lib.mli b/src/lib/o1js_bindings/lib/o1js_bindings_lib.mli new file mode 100644 index 000000000000..b8fa6e896e59 --- /dev/null +++ b/src/lib/o1js_bindings/lib/o1js_bindings_lib.mli @@ -0,0 +1,3 @@ +val export : unit -> unit + +val export_global : unit -> unit diff --git a/src/lib/o1js_bindings/lib/pickles_bindings.ml b/src/lib/o1js_bindings/lib/pickles_bindings.ml new file mode 100644 index 000000000000..c3eaadf72794 --- /dev/null +++ b/src/lib/o1js_bindings/lib/pickles_bindings.ml @@ -0,0 +1,916 @@ +open Core_kernel +module Js = Js_of_ocaml.Js +module Impl = Pickles.Impls.Step +module Field = Impl.Field +module Boolean = Impl.Boolean +module Typ = Impl.Typ +module Backend = Pickles.Backend + +module Public_input = struct + type t = Field.t array + + module Constant = struct + type t = Field.Constant.t array + end +end + +type 'a statement = 'a array * 'a array + +module Statement = struct + type t = Field.t statement + + module Constant = struct + type t = Field.Constant.t statement + end +end + +let public_input_typ (i : int) = Typ.array ~length:i Field.typ + +let statement_typ (input_size : int) (output_size : int) = + Typ.(array ~length:input_size Field.typ * array ~length:output_size Field.typ) + +type 'proof js_prover = + Public_input.Constant.t + -> (Public_input.Constant.t * 'proof) Promise_js_helpers.js_promise + +let dummy_constraints = + let module Inner_curve = Kimchi_pasta.Pasta.Pallas in + let module Step_main_inputs = Pickles.Step_main_inputs in + let inner_curve_typ : (Field.t * Field.t, Inner_curve.t) Typ.t = + Typ.transport Step_main_inputs.Inner_curve.typ + ~there:Inner_curve.to_affine_exn ~back:Inner_curve.of_affine + in + fun () -> + let x = + Impl.exists Field.typ ~compute:(fun () -> Field.Constant.of_int 3) + in + let g = Impl.exists inner_curve_typ ~compute:(fun _ -> Inner_curve.one) in + ignore + ( Pickles.Scalar_challenge.to_field_checked' + (module Impl) + ~num_bits:16 + (Kimchi_backend_common.Scalar_challenge.create x) + : Field.t * Field.t * Field.t ) ; + ignore + ( Step_main_inputs.Ops.scale_fast g ~num_bits:5 (Shifted_value x) + : Step_main_inputs.Inner_curve.t ) ; + ignore + ( Pickles.Step_verifier.Scalar_challenge.endo g ~num_bits:4 + (Kimchi_backend_common.Scalar_challenge.create x) + : Field.t * Field.t ) + +(* what we use in places where we don't care about the generic type parameter *) +type proof = Pickles_types.Nat.N0.n Pickles.Proof.t + +let unsafe_coerce_proof (proof : proof) : 'm Pickles.Proof.t = Obj.magic proof + +type pickles_rule_js_return = + < publicOutput : Public_input.t Js.prop + ; previousStatements : Statement.t array Js.prop + ; previousProofs : proof array Js.prop + ; shouldVerify : Boolean.var array Js.prop > + Js.t + +type pickles_rule_js = + < identifier : Js.js_string Js.t Js.prop + ; main : + (Public_input.t -> pickles_rule_js_return Promise_js_helpers.js_promise) + Js.prop + ; featureFlags : bool option Pickles_types.Plonk_types.Features.t Js.prop + ; proofsToVerify : + < isSelf : bool Js.t Js.prop ; tag : Js.Unsafe.any Js.t Js.prop > Js.t + array + Js.prop > + Js.t + +let map_feature_flags_option + (feature_flags_ : bool option Pickles_types.Plonk_types.Features.t) = + Pickles_types.Plonk_types.Features.map feature_flags_ ~f:(function + | Some true -> + Pickles_types.Opt.Flag.Yes + | Some false -> + Pickles_types.Opt.Flag.No + | None -> + Pickles_types.Opt.Flag.Maybe ) + +module Choices = struct + open Pickles_types + open Hlist + + module Tag = struct + type ('var, 'value, 'width) t = + | Tag : + ('var, 'value, 'width, 'height) Pickles.Tag.t + -> ('var, 'value, 'width) t + end + + module Prevs = struct + type ('var, 'value, 'width, 'height) t = + | Prevs : + ( self:('var, 'value, 'width) Tag.t + -> ('prev_var, 'prev_values, 'widths, 'heights) H4.T(Pickles.Tag).t + ) + -> ('var, 'value, 'width, 'height) t + + let of_rule (rule : pickles_rule_js) = + let js_prevs = rule##.proofsToVerify in + let rec get_tags (Prevs prevs) index = + if index < 0 then Prevs prevs + else + let js_tag = Array.get js_prevs index in + (* We introduce new opaque types to make sure that the type in the tag + doesn't escape into the environment or have other ill effects. + *) + let module Types = struct + type var + + type value + + type width + + type height + end in + let open Types in + let to_tag ~self tag : (var, value, width, height) Pickles.Tag.t = + let (Tag.Tag self) = self in + (* The magic here isn't ideal, but it's safe enough if we immediately + hide it behind [Types]. + *) + if Js.to_bool tag##.isSelf then Obj.magic self + else Obj.magic tag##.tag + in + let tag = to_tag js_tag in + let prevs ~self : _ H4.T(Pickles.Tag).t = tag ~self :: prevs ~self in + get_tags (Prevs prevs) (index - 1) + in + get_tags (Prevs (fun ~self:_ -> [])) (Array.length js_prevs - 1) + end + + module Inductive_rule = struct + type ( 'var + , 'value + , 'width + , 'height + , 'arg_var + , 'arg_value + , 'ret_var + , 'ret_value + , 'auxiliary_var + , 'auxiliary_value ) + t = + | Rule : + ( self:('var, 'value, 'width) Tag.t + -> ( 'prev_vars + , 'prev_values + , 'widths + , 'heights + , 'arg_var + , 'arg_value + , 'ret_var + , 'ret_value + , 'auxiliary_var + , 'auxiliary_value ) + Pickles.Inductive_rule.Promise.t ) + -> ( 'var + , 'value + , 'width + , 'height + , 'arg_var + , 'arg_value + , 'ret_var + , 'ret_value + , 'auxiliary_var + , 'auxiliary_value ) + t + + let rec should_verifys : + type prev_vars prev_values widths heights. + int + -> (prev_vars, prev_values, widths, heights) H4.T(Pickles.Tag).t + -> Boolean.var array + -> prev_vars H1.T(E01(Pickles.Inductive_rule.B)).t = + fun index tags should_verifys_js -> + match tags with + | [] -> + [] + | _ :: tags -> + let js_bool = Array.get should_verifys_js index in + let should_verifys = + should_verifys (index + 1) tags should_verifys_js + in + js_bool :: should_verifys + + let should_verifys tags should_verifys_js = + should_verifys 0 tags should_verifys_js + + let get_typ ~public_input_size ~public_output_size + (type a1 a2 a3 a4 width height) (tag : (a1, a2, a3, a4) Pickles.Tag.t) + (self : + ( Public_input.t * Public_input.t + , Public_input.Constant.t * Public_input.Constant.t + , width + , height ) + Pickles.Tag.t ) = + match Type_equal.Id.same_witness tag.id self.id with + | None -> + Pickles.Types_map.public_input tag + | Some T -> + statement_typ public_input_size public_output_size + + let rec prev_statements : + type prev_vars prev_values widths heights width height. + public_input_size:int + -> public_output_size:int + -> self: + ( Public_input.t * Public_input.t + , Public_input.Constant.t * Public_input.Constant.t + , width + , height ) + Pickles.Tag.t + -> int + -> (prev_vars, prev_values, widths, heights) H4.T(Pickles.Tag).t + -> Statement.t array + -> prev_vars H1.T(Id).t = + fun ~public_input_size ~public_output_size ~self i tags statements -> + match tags with + | [] -> + [] + | tag :: tags -> + let (Typ typ) = + get_typ ~public_input_size ~public_output_size tag self + in + let input, output = Array.get statements i in + let fields = Array.concat [ input; output ] in + let aux = typ.constraint_system_auxiliary () in + let statement = typ.var_of_fields (fields, aux) in + statement + :: prev_statements ~public_input_size ~public_output_size ~self + (i + 1) tags statements + + let prev_statements ~public_input_size ~public_output_size ~self tags + statements = + prev_statements ~public_input_size ~public_output_size ~self 0 tags + statements + + let create ~public_input_size ~public_output_size (rule : pickles_rule_js) : + ( _ + , _ + , _ + , _ + , Public_input.t + , Public_input.Constant.t + , Public_input.t + , Public_input.Constant.t + , unit + , unit ) + t = + let (Prevs prevs) = Prevs.of_rule rule in + + (* this is called after `picklesRuleFromFunction()` and finishes the circuit *) + let finish_circuit prevs (Tag.Tag self) + (js_result : pickles_rule_js_return) : + _ Pickles.Inductive_rule.main_return = + (* convert js rule output to pickles rule output *) + let public_output = js_result##.publicOutput in + let previous_proofs_should_verify = + should_verifys prevs js_result##.shouldVerify + in + let previous_public_inputs = + prev_statements ~public_input_size ~public_output_size ~self prevs + js_result##.previousStatements + in + let previous_proof_statements = + let rec go : + type prev_vars prev_values widths heights. + int + -> prev_vars H1.T(Id).t + -> prev_vars H1.T(E01(Pickles.Inductive_rule.B)).t + -> (prev_vars, prev_values, widths, heights) H4.T(Pickles.Tag).t + -> ( prev_vars + , widths ) + H2.T(Pickles.Inductive_rule.Previous_proof_statement).t = + fun i public_inputs should_verifys tags -> + match (public_inputs, should_verifys, tags) with + | [], [], [] -> + [] + | ( public_input :: public_inputs + , proof_must_verify :: should_verifys + , _tag :: tags ) -> + let proof = + Impl.exists (Impl.Typ.prover_value ()) ~compute:(fun () -> + Array.get js_result##.previousProofs i + |> unsafe_coerce_proof ) + in + { public_input; proof; proof_must_verify } + :: go (i + 1) public_inputs should_verifys tags + in + go 0 previous_public_inputs previous_proofs_should_verify prevs + in + { previous_proof_statements; public_output; auxiliary_output = () } + in + + let rule ~(self : (Statement.t, Statement.Constant.t, _) Tag.t) : + _ Pickles.Inductive_rule.Promise.t = + let prevs = prevs ~self in + + let main ({ public_input } : _ Pickles.Inductive_rule.main_input) = + (* add dummy constraints *) + dummy_constraints () ; + (* circuit from js *) + rule##.main public_input + |> Promise_js_helpers.of_js + |> Promise.map ~f:(finish_circuit prevs self) + in + { identifier = Js.to_string rule##.identifier + ; feature_flags = + Pickles_types.Plonk_types.Features.map rule##.featureFlags + ~f:(function + | Some true -> + true + | _ -> + false ) + ; prevs + ; main + } + in + Rule rule + end + + type ( 'var + , 'value + , 'width + , 'height + , 'arg_var + , 'arg_value + , 'ret_var + , 'ret_value + , 'auxiliary_var + , 'auxiliary_value ) + t = + | Choices : + ( self:('var, 'value, 'width) Tag.t + -> ( _ + , 'prev_vars + , 'prev_values + , 'widths + , 'heights + , 'arg_var + , 'arg_value + , 'ret_var + , 'ret_value + , 'auxiliary_var + , 'auxiliary_value ) + H4_6_with_length.T(Pickles.Inductive_rule.Promise).t ) + -> ( 'var + , 'value + , 'width + , 'height + , 'arg_var + , 'arg_value + , 'ret_var + , 'ret_value + , 'auxiliary_var + , 'auxiliary_value ) + t + + (* Convert each rule given in js_rules as JS object into their corresponding + OCaml type counterparty *) + let of_js ~public_input_size ~public_output_size js_rules = + let rec get_rules (Choices rules) index : + ( _ + , _ + , _ + , _ + , Public_input.t + , Public_input.Constant.t + , Public_input.t + , Public_input.Constant.t + , unit + , unit ) + t = + if index < 0 then Choices rules + else + let (Rule rule) = + Inductive_rule.create ~public_input_size ~public_output_size + (Array.get js_rules index) + in + let rules ~self : _ H4_6_with_length.T(Pickles.Inductive_rule.Promise).t + = + rule ~self :: rules ~self + in + get_rules (Choices rules) (index - 1) + in + get_rules (Choices (fun ~self:_ -> [])) (Array.length js_rules - 1) +end + +module Cache = struct + module Sync : Key_cache.Sync = struct + open Key_cache + include T (Or_error) + + module Disk_storable = struct + include Disk_storable (Or_error) + + let of_binable = Trivial.Disk_storable.of_binable + + let simple to_string read write = { to_string; read; write } + end + + let read spec { Disk_storable.to_string; read; write = _ } key = + Or_error.find_map_ok spec ~f:(fun s -> + let res, cache_hit = + match s with + | Spec.On_disk { should_write; _ } -> + let path = to_string key in + ( read ~path key + , if should_write then `Locally_generated else `Cache_hit ) + | S3 _ -> + (Or_error.errorf "Downloading from S3 is disabled", `Cache_hit) + in + Or_error.map res ~f:(fun res -> (res, cache_hit)) ) + + let write spec { Disk_storable.to_string; read = _; write } key value = + let errs = + List.filter_map spec ~f:(fun s -> + let res = + match s with + | Spec.On_disk { should_write; _ } -> + if should_write then write key value (to_string key) + else Or_error.return () + | S3 _ -> + Or_error.return () + in + match res with Error e -> Some e | Ok () -> None ) + in + match errs with [] -> Ok () | errs -> Error (Error.of_list errs) + end + + let () = Key_cache.set_sync_implementation (module Sync) + + open Pickles.Cache + + type any_key = + | Step_pk of Step.Key.Proving.t + | Step_vk of Step.Key.Verification.t + | Wrap_pk of Wrap.Key.Proving.t + | Wrap_vk of Wrap.Key.Verification.t + + type any_value = + | Step_pk of Backend.Tick.Keypair.t + | Step_vk of Kimchi_bindings.Protocol.VerifierIndex.Fp.t + | Wrap_pk of Backend.Tock.Keypair.t + | Wrap_vk of Pickles.Verification_key.t + + let step_pk = function Step_pk v -> Ok v | _ -> Or_error.errorf "step_pk" + + let step_vk = function Step_vk v -> Ok v | _ -> Or_error.errorf "step_vk" + + let wrap_pk = function Wrap_pk v -> Ok v | _ -> Or_error.errorf "wrap_pk" + + let wrap_vk = function Wrap_vk v -> Ok v | _ -> Or_error.errorf "wrap_vk" + + type js_storable = + { read : any_key -> Js.js_string Js.t -> (any_value, unit) result + ; write : any_key -> any_value -> Js.js_string Js.t -> (unit, unit) result + ; can_write : bool + } + + let or_error f = function Ok v -> f v | _ -> Or_error.errorf "failed" + + let map_error = function Ok v -> Ok v | _ -> Or_error.errorf "failed" + + let step_storable { read; write; _ } : Step.storable = + let read key ~path = + read (Step_pk key) (Js.string path) |> or_error step_pk + in + let write key value path = + write (Step_pk key) (Step_pk value) (Js.string path) |> map_error + in + Sync.Disk_storable.simple Step.Key.Proving.to_string read write + + let step_vk_storable { read; write; _ } : Step.vk_storable = + let read key ~path = + read (Step_vk key) (Js.string path) |> or_error step_vk + in + let write key value path = + write (Step_vk key) (Step_vk value) (Js.string path) |> map_error + in + Sync.Disk_storable.simple Step.Key.Verification.to_string read write + + let wrap_storable { read; write; _ } : Wrap.storable = + let read key ~path = + read (Wrap_pk key) (Js.string path) |> or_error wrap_pk + in + let write key value path = + write (Wrap_pk key) (Wrap_pk value) (Js.string path) |> map_error + in + Sync.Disk_storable.simple Wrap.Key.Proving.to_string read write + + let wrap_vk_storable { read; write; _ } : Wrap.vk_storable = + let read key ~path = + read (Wrap_vk key) (Js.string path) |> or_error wrap_vk + in + let write key value path = + write (Wrap_vk key) (Wrap_vk value) (Js.string path) |> map_error + in + Sync.Disk_storable.simple Wrap.Key.Verification.to_string read write + (* TODO get this code to understand equivalence of versions of Pickles.Verification_key.t *) + |> Obj.magic + + let storables s : Pickles.Storables.t = + { step_storable = step_storable s + ; step_vk_storable = step_vk_storable s + ; wrap_storable = wrap_storable s + ; wrap_vk_storable = wrap_vk_storable s + } + + let cache_dir { can_write; _ } : Key_cache.Spec.t list = + let d : Key_cache.Spec.t = + On_disk { directory = ""; should_write = can_write } + in + [ d ] +end + +module Public_inputs_with_proofs = + Pickles_types.Hlist.H2.T (Pickles.Statement_with_proof) + +let nat_modules_list : (module Pickles_types.Nat.Intf) list = + let open Pickles_types.Nat in + [ (module N0) + ; (module N1) + ; (module N2) + ; (module N3) + ; (module N4) + ; (module N5) + ; (module N6) + ; (module N7) + ; (module N8) + ; (module N9) + ; (module N10) + ; (module N11) + ; (module N12) + ; (module N13) + ; (module N14) + ; (module N15) + ; (module N16) + ; (module N17) + ; (module N18) + ; (module N19) + ; (module N20) + ; (module N21) + ; (module N22) + ; (module N23) + ; (module N24) + ; (module N25) + ; (module N26) + ; (module N27) + ; (module N28) + ; (module N29) + ; (module N30) + ] + +let nat_add_modules_list : (module Pickles_types.Nat.Add.Intf) list = + let open Pickles_types.Nat in + [ (module N0) + ; (module N1) + ; (module N2) + ; (module N3) + ; (module N4) + ; (module N5) + ; (module N6) + ; (module N7) + ; (module N8) + ; (module N9) + ; (module N10) + ; (module N11) + ; (module N12) + ; (module N13) + ; (module N14) + ; (module N15) + ; (module N16) + ; (module N17) + ; (module N18) + ; (module N19) + ; (module N20) + ; (module N21) + ; (module N22) + ; (module N23) + ; (module N24) + ; (module N25) + ; (module N26) + ; (module N27) + ; (module N28) + ; (module N29) + ; (module N30) + ] + +let nat_module (i : int) : (module Pickles_types.Nat.Intf) = + List.nth_exn nat_modules_list i + +let nat_add_module (i : int) : (module Pickles_types.Nat.Add.Intf) = + List.nth_exn nat_add_modules_list i + +let name = "smart-contract" + +let pickles_compile (choices : pickles_rule_js array) + (config : + < publicInputSize : int Js.prop + ; publicOutputSize : int Js.prop + ; storable : Cache.js_storable Js.optdef_prop + ; overrideWrapDomain : int Js.optdef_prop + ; numChunks : int Js.optdef_prop + ; lazyMode : bool Js.optdef_prop > + Js.t ) = + (* translate number of branches and recursively verified proofs from JS *) + let branches = Array.length choices in + let max_proofs = + let choices = choices |> Array.to_list in + List.map choices ~f:(fun c -> c##.proofsToVerify |> Array.length) + |> List.max_elt ~compare |> Option.value ~default:0 + in + let (module Branches) = nat_module branches in + let (module Max_proofs_verified) = nat_add_module max_proofs in + + (* translate method circuits from JS *) + let public_input_size = config##.publicInputSize in + let public_output_size = config##.publicOutputSize in + let override_wrap_domain = + Js.Optdef.to_option config##.overrideWrapDomain + |> Option.map ~f:Pickles_base.Proofs_verified.of_int_exn + in + let num_chunks = Js.Optdef.get config##.numChunks (fun () -> 1) in + let lazy_mode = Js.Optdef.get config##.lazyMode (fun () -> false) in + let (Choices choices) = + Choices.of_js ~public_input_size ~public_output_size choices + in + let choices ~self = choices ~self:(Choices.Tag.Tag self) in + + (* parse caching configuration *) + let storables = + Js.Optdef.to_option config##.storable |> Option.map ~f:Cache.storables + in + let cache = + Js.Optdef.to_option config##.storable |> Option.map ~f:Cache.cache_dir + in + + (* call into Pickles *) + let tag, _cache, p, provers = + Pickles.compile_promise ?cache ?storables ?override_wrap_domain + ~public_input: + (Input_and_output + ( public_input_typ public_input_size + , public_input_typ public_output_size ) ) + ~auxiliary_typ:Typ.unit + ~max_proofs_verified:(module Max_proofs_verified) + ~name ~num_chunks ~lazy_mode ~choices () + in + + (* translate returned prover and verify functions to JS *) + let module Proof = (val p) in + let to_js_prover prover : Proof.t js_prover = + let prove (public_input : Public_input.Constant.t) = + prover public_input + |> Promise.map ~f:(fun (output, _, proof) -> (output, proof)) + |> Promise_js_helpers.to_js + in + prove + in + let rec to_js_provers : + type a b c. + ( a + , b + , c + , Public_input.Constant.t + , (Public_input.Constant.t * unit * Proof.t) Promise.t ) + Pickles.Provers.t + -> Proof.t js_prover list = function + | [] -> + [] + | p :: ps -> + to_js_prover p :: to_js_provers ps + in + let provers : Proof.t js_prover array = + provers |> to_js_provers |> Array.of_list + in + let verify (statement : Statement.Constant.t) (proof : _ Pickles.Proof.t) = + Proof.verify_promise [ (statement, proof) ] + |> Promise.map ~f:(fun x -> Js.bool (Or_error.is_ok x)) + |> Promise_js_helpers.to_js + in + let get_vk () = + let vk = Pickles.Side_loaded.Verification_key.of_compiled_promise tag in + Promise.map vk ~f:(fun vk -> + let data = Pickles.Side_loaded.Verification_key.to_base64 vk in + let hash = Mina_base.Zkapp_account.digest_vk vk in + (data |> Js.string, hash) ) + |> Promise_js_helpers.to_js + in + object%js + val provers = Obj.magic provers + + val verify = Obj.magic verify + + val tag = Obj.magic tag + + val getVerificationKey = get_vk + end + +module Proof0 = Pickles.Proof.Make (Pickles_types.Nat.N0) +module Proof1 = Pickles.Proof.Make (Pickles_types.Nat.N1) +module Proof2 = Pickles.Proof.Make (Pickles_types.Nat.N2) + +type some_proof = Proof0 of Proof0.t | Proof1 of Proof1.t | Proof2 of Proof2.t + +let proof_to_base64 = function + | Proof0 proof -> + Proof0.to_base64 proof |> Js.string + | Proof1 proof -> + Proof1.to_base64 proof |> Js.string + | Proof2 proof -> + Proof2.to_base64 proof |> Js.string + +let proof_of_base64 str i : some_proof = + let str = Js.to_string str in + match i with + | 0 -> + Proof0 (Proof0.of_base64 str |> Result.ok_or_failwith) + | 1 -> + Proof1 (Proof1.of_base64 str |> Result.ok_or_failwith) + | 2 -> + Proof2 (Proof2.of_base64 str |> Result.ok_or_failwith) + | _ -> + failwith "invalid proof index" + +let verify (statement : Statement.Constant.t) (proof : proof) + (vk : Js.js_string Js.t) = + let i, o = statement in + let typ = statement_typ (Array.length i) (Array.length o) in + let proof = Pickles.Side_loaded.Proof.of_proof proof in + let vk = + match Pickles.Side_loaded.Verification_key.of_base64 (Js.to_string vk) with + | Ok vk_ -> + vk_ + | Error err -> + failwithf "Could not decode base64 verification key: %s" + (Error.to_string_hum err) () + in + Pickles.Side_loaded.verify_promise ~typ [ (vk, statement, proof) ] + |> Promise.map ~f:(fun x -> Js.bool (Or_error.is_ok x)) + |> Promise_js_helpers.to_js + +let load_srs_fp () = Backend.Tick.Keypair.load_urs () + +let load_srs_fq () = Backend.Tock.Keypair.load_urs () + +let dummy_proof (max_proofs_verified : int) (domain_log2 : int) : some_proof = + match max_proofs_verified with + | 0 -> + let n = Pickles_types.Nat.N0.n in + Proof0 (Pickles.Proof.dummy n n ~domain_log2) + | 1 -> + let n = Pickles_types.Nat.N1.n in + Proof1 (Pickles.Proof.dummy n n ~domain_log2) + | 2 -> + let n = Pickles_types.Nat.N2.n in + Proof2 (Pickles.Proof.dummy n n ~domain_log2) + | _ -> + failwith "invalid" + +let dummy_verification_key () = + let vk = Pickles.Side_loaded.Verification_key.dummy in + let data = Pickles.Side_loaded.Verification_key.to_base64 vk in + let hash = Mina_base.Zkapp_account.digest_vk vk in + (data |> Js.string, hash) + +let encode_verification_key (vk : Pickles.Verification_key.t) = + Pickles.Verification_key.to_yojson vk |> Yojson.Safe.to_string |> Js.string + +let decode_verification_key (bytes : Js.js_string Js.t) = + let vk_or_error = + Pickles.Verification_key.of_yojson @@ Yojson.Safe.from_string + @@ Js.to_string bytes + in + let open Ppx_deriving_yojson_runtime.Result in + match vk_or_error with + | Ok vk -> + vk + | Error err -> + failwithf "Could not decode verification key: %s" err () + +module Util = struct + let to_ml_string s = Js.to_string s + + let from_ml_string s = Js.string s +end + +let side_loaded_create (name : Js.js_string Js.t) (max_proofs_verified : int) + (public_input_length : int) (public_output_length : int) + (feature_flags_js : bool option Pickles_types.Plonk_types.Features.t) = + let name = Js.to_string name in + let feature_flags = map_feature_flags_option feature_flags_js in + let typ = statement_typ public_input_length public_output_length in + match max_proofs_verified with + | 0 -> + Obj.magic + @@ Pickles.Side_loaded.create ~name + ~max_proofs_verified:(module Pickles_types.Nat.N0) + ~feature_flags ~typ + | 1 -> + Obj.magic + @@ Pickles.Side_loaded.create ~name + ~max_proofs_verified:(module Pickles_types.Nat.N1) + ~feature_flags ~typ + | 2 -> + Obj.magic + @@ Pickles.Side_loaded.create ~name + ~max_proofs_verified:(module Pickles_types.Nat.N2) + ~feature_flags ~typ + | _ -> + failwith "side_loaded_create is unhappy; you should pass 0, 1, or 2" + +let vk_to_circuit vk = + let vk () = + match + Pickles.Side_loaded.Verification_key.of_base64 (Js.to_string (vk ())) + with + | Ok vk_ -> + vk_ + | Error err -> + failwithf "Could not decode base64 verification key: %s" + (Error.to_string_hum err) () + in + Impl.exists Pickles.Side_loaded.Verification_key.typ ~compute:(fun () -> + vk () ) + +let vk_digest vk = + Pickles.Side_loaded.Verification_key.Checked.to_input vk + |> Random_oracle.Checked.pack_input + +let in_circuit tag checked_vk = Pickles.Side_loaded.in_circuit tag checked_vk + +let in_prover tag (vk : Js.js_string Js.t) = + let vk = + match Pickles.Side_loaded.Verification_key.of_base64 (Js.to_string vk) with + | Ok vk_ -> + vk_ + | Error err -> + failwithf "Could not decode base64 verification key: %s" + (Error.to_string_hum err) () + in + Pickles.Side_loaded.in_prover tag vk + +let pickles = + object%js + val compile = pickles_compile + + val verify = verify + + val loadSrsFp = load_srs_fp + + val loadSrsFq = load_srs_fq + + val dummyProof = dummy_proof + + val dummyVerificationKey = dummy_verification_key + + val proofToBase64 = proof_to_base64 + + val proofOfBase64 = proof_of_base64 + + val proofToBase64Transaction = + fun (proof : proof) -> + proof |> Pickles.Side_loaded.Proof.of_proof + |> Pickles.Side_loaded.Proof.to_base64 |> Js.string + + val encodeVerificationKey = encode_verification_key + + val decodeVerificationKey = decode_verification_key + + val util = + object%js + val toMlString = Util.to_ml_string + + val fromMlString = Util.from_ml_string + end + + val sideLoaded = + object%js + val create = side_loaded_create + + val inCircuit = + (* We get weak variables here, but they're synthetic. Don't try this + at home. + *) + Obj.magic in_circuit + + val inProver = + (* We get weak variables here, but they're synthetic. Don't try this + at home. + *) + Obj.magic in_prover + + val vkToCircuit = vk_to_circuit + + val vkDigest = vk_digest + end + end diff --git a/src/lib/o1js_bindings/lib/pickles_bindings.mli b/src/lib/o1js_bindings/lib/pickles_bindings.mli new file mode 100644 index 000000000000..1a606bc7c23c --- /dev/null +++ b/src/lib/o1js_bindings/lib/pickles_bindings.mli @@ -0,0 +1,132 @@ +module Js = Js_of_ocaml.Js +module Impl = Pickles.Impls.Step +module Field = Impl.Field +module Boolean = Impl.Boolean + +module Public_input : sig + type t = Field.t array + + module Constant : sig + type t = Field.Constant.t array + end +end + +type 'a statement = 'a array * 'a array + +module Statement : sig + type t = Field.t statement + + module Constant : sig + type t = Field.Constant.t statement + end +end + +type proof = Pickles_types.Nat.N0.n Pickles.Proof.t + +type pickles_rule_js = + < identifier : Js.js_string Js.t Js.prop + ; main : + ( Public_input.t + -> < publicOutput : Public_input.t Js.prop + ; previousStatements : Statement.t array Js.prop + ; previousProofs : proof array Js.prop + ; shouldVerify : Boolean.var array Js.prop > + Js.t + Promise_js_helpers.js_promise ) + Js.prop + ; featureFlags : bool option Pickles_types.Plonk_types.Features.t Js.prop + ; proofsToVerify : + < isSelf : bool Js.t Js.prop ; tag : Js.Unsafe.any Js.t Js.prop > Js.t + array + Js.prop > + Js.t + +module Cache : sig + type js_storable +end + +module Proof0 : sig + type t = Pickles_types.Nat.N0.n Pickles.Proof.t +end + +module Proof1 : sig + type t = Pickles_types.Nat.N1.n Pickles.Proof.t +end + +module Proof2 : sig + type t = Pickles_types.Nat.N2.n Pickles.Proof.t +end + +type some_proof = Proof0 of Proof0.t | Proof1 of Proof1.t | Proof2 of Proof2.t + +val pickles : + < compile : + ( pickles_rule_js array + -> < publicInputSize : int Js.prop + ; publicOutputSize : int Js.prop + ; storable : Cache.js_storable Js.optdef_prop + ; overrideWrapDomain : int Js.optdef_prop + ; numChunks : int Js.optdef_prop + ; lazyMode : bool Js.optdef_prop > + Js.t + -> < getVerificationKey : + ( unit + -> (Js.js_string Js.t * Impl.field) Promise_js_helpers.js_promise + ) + Js.readonly_prop + ; provers : 'a Js.readonly_prop + ; tag : 'b Js.readonly_prop + ; verify : 'c Js.readonly_prop > + Js.t ) + Js.readonly_prop + ; verify : + ( Statement.Constant.t + -> proof + -> Js.js_string Js.t + -> bool Js.t Promise_js_helpers.js_promise ) + Js.readonly_prop + ; loadSrsFp : (unit -> Kimchi_bindings.Protocol.SRS.Fp.t) Js.readonly_prop + ; loadSrsFq : (unit -> Kimchi_bindings.Protocol.SRS.Fq.t) Js.readonly_prop + ; dummyProof : (int -> int -> some_proof) Js.readonly_prop + ; dummyVerificationKey : + (unit -> Js.js_string Js.t * Impl.field) Js.readonly_prop + ; encodeVerificationKey : + (Pickles.Verification_key.t -> Js.js_string Js.t) Js.readonly_prop + ; decodeVerificationKey : + (Js.js_string Js.t -> Pickles.Verification_key.t) Js.readonly_prop + ; proofOfBase64 : (Js.js_string Js.t -> int -> some_proof) Js.readonly_prop + ; proofToBase64 : (some_proof -> Js.js_string Js.t) Js.readonly_prop + ; proofToBase64Transaction : (proof -> Js.js_string Js.t) Js.readonly_prop + ; util : + < fromMlString : (string -> Js.js_string Js.t) Js.readonly_prop + ; toMlString : (Js.js_string Js.t -> string) Js.readonly_prop > + Js.t + Js.readonly_prop + ; sideLoaded : + < create : + ( Js.js_string Js.t + -> int + -> int + -> int + -> bool option Pickles_types.Plonk_types.Features.t + -> 'd ) + Js_of_ocaml.Js.readonly_prop + ; inCircuit : + ( _ Pickles.Tag.t + -> Mina_wire_types.Pickles.M.Side_loaded.Verification_key.V2.t + -> unit ) + Js_of_ocaml.Js.readonly_prop + ; inProver : + (_ Pickles.Tag.t -> Js.js_string Js.t -> unit) + Js_of_ocaml.Js.readonly_prop + ; vkDigest : + ( Pickles.Side_loaded.Verification_key.Checked.t + -> Pickles.Impls.Step.Internal_Basic.Field.Var.t array ) + Js_of_ocaml.Js.readonly_prop + ; vkToCircuit : + ( (unit -> Js.js_string Js.t) + -> Pickles.Side_loaded.Verification_key.Checked.t ) + Js_of_ocaml.Js.readonly_prop > + Js.t + Js_of_ocaml.Js.readonly_prop > + Js.t diff --git a/src/lib/o1js_bindings/lib/snarky_bindings.ml b/src/lib/o1js_bindings/lib/snarky_bindings.ml new file mode 100644 index 000000000000..34ddba5171b9 --- /dev/null +++ b/src/lib/o1js_bindings/lib/snarky_bindings.ml @@ -0,0 +1,608 @@ +open Core_kernel +module Js = Js_of_ocaml.Js +module Backend = Kimchi_backend.Pasta.Vesta_based_plonk +module Impl = Pickles.Impls.Step +module Field = Impl.Field +module Boolean = Impl.Boolean +module As_prover = Impl.As_prover +module Typ = Impl.Typ +module Run_state = Snarky_backendless.Run_state + +type field = Impl.field + +(* light-weight wrapper around snarky-ml core *) + +let empty_typ : (_, _, unit) Impl.Internal_Basic.Typ.typ' = + { var_to_fields = (fun fields -> (fields, ())) + ; var_of_fields = (fun (fields, _) -> fields) + ; value_to_fields = (fun fields -> (fields, ())) + ; value_of_fields = (fun (fields, _) -> fields) + ; size_in_field_elements = 0 + ; constraint_system_auxiliary = (fun _ -> ()) + ; check = (fun _ -> Impl.Internal_Basic.Checked.return ()) + } + +let typ (size_in_field_elements : int) : (Field.t array, field array) Typ.t = + Typ { empty_typ with size_in_field_elements } + +module Run = struct + let exists (size_in_fields : int) (compute : unit -> Field.Constant.t array) = + Impl.exists (typ size_in_fields) ~compute + + let exists_one (compute : unit -> Field.Constant.t) = + Impl.exists Field.typ ~compute + + let in_prover () = Impl.in_prover () + + let as_prover = Impl.as_prover + + let in_prover_block () = As_prover.in_prover_block () |> Js.bool + + let set_eval_constraints b = Snarky_backendless.Snark0.set_eval_constraints b + + let enter_constraint_system () = + let builder = + Impl.constraint_system_manual ~input_typ:Impl.Typ.unit + ~return_typ:Impl.Typ.unit + in + builder.run_circuit (fun () () -> ()) ; + builder.finish_computation + + let enter_generate_witness () = + let builder = + Impl.generate_witness_manual ~input_typ:Impl.Typ.unit + ~return_typ:Impl.Typ.unit () + in + builder.run_circuit (fun () () -> ()) ; + let finish () = builder.finish_computation () |> fst in + finish + + let enter_as_prover size = Impl.as_prover_manual size |> Staged.unstage + + module State = struct + let alloc_var state = Backend.Run_state.alloc_var state () + + let store_field_elt state x = Backend.Run_state.store_field_elt state x + + let as_prover state = Backend.Run_state.as_prover state + + let set_as_prover state b = Backend.Run_state.set_as_prover state b + + let has_witness state = Backend.Run_state.has_witness state + + let get_variable_value state i = + Backend.Run_state.get_variable_value state i + end +end + +module Constraint_system = struct + let rows cs = Backend.R1CS_constraint_system.get_rows_len cs + + let digest cs = + Backend.R1CS_constraint_system.digest cs |> Md5.to_hex |> Js.string + + let to_json cs = + Backend.R1CS_constraint_system.to_json cs |> Js.string |> Util.json_parse +end + +module Field' = struct + (** evaluates a CVar by unfolding the AST and reading Vars from a list of public input + aux values *) + let read_var (x : Field.t) = As_prover.read_var x + + (** x === y without handling of constants *) + let assert_equal x y = Impl.assert_ (Impl.Constraint.equal x y) + + (** x*y === z without handling of constants *) + let assert_mul x y z = Impl.assert_ (Impl.Constraint.r1cs x y z) + + (** x*x === y without handling of constants *) + let assert_square x y = Impl.assert_ (Impl.Constraint.square x y) + + (** x*x === x without handling of constants *) + let assert_boolean x = Impl.assert_ (Impl.Constraint.boolean x) + + (** check x < y and x <= y. + this is used in all comparisons, including with assert *) + let compare (bit_length : int) x y = + let ({ less; less_or_equal } : Field.comparison_result) = + Field.compare ~bit_length x y + in + (less, less_or_equal) + + (** returns x truncated to the lowest [16 * length_div_16] bits + => can be used to assert that x fits in [16 * length_div_16] bits. + + more efficient than [to_bits] because it uses the [EC_endoscalar] gate; + does 16 bits per row (vs 1 bits per row that you can do with generic gates). + *) + let truncate_to_bits16 (length_div_16 : int) x = + let _a, _b, x0 = + Pickles.Scalar_challenge.to_field_checked' ~num_bits:(length_div_16 * 16) + (module Impl) + { inner = x } + in + x0 +end + +let add_gate (label : string) gate = + Impl.with_label label (fun () -> Impl.assert_ gate) + +module Gates = struct + let zero in1 in2 out = + add_gate "zero" + (Raw { kind = Zero; values = [| in1; in2; out |]; coeffs = [||] }) + + let generic sl l sr r so o sm sc = + add_gate "generic" + (Basic { l = (sl, l); r = (sr, r); o = (so, o); m = sm; c = sc }) + + let poseidon state = add_gate "poseidon" (Poseidon { state }) + + let ec_add p1 p2 p3 inf same_x slope inf_z x21_inv = + add_gate "ec_add" + (EC_add_complete { p1; p2; p3; inf; same_x; slope; inf_z; x21_inv }) ; + (* TODO: do we need this? *) + p3 + + let ec_scale state = add_gate "ec_scale" (EC_scale { state }) + + let ec_endoscale state xs ys n_acc = + add_gate "ec_endoscale" (EC_endoscale { state; xs; ys; n_acc }) + + let ec_endoscalar state = add_gate "ec_endoscalar" (EC_endoscalar { state }) + + let lookup (w0, w1, w2, w3, w4, w5, w6) = + add_gate "lookup" (Lookup { w0; w1; w2; w3; w4; w5; w6 }) + + let range_check0 v0 (v0p0, v0p1, v0p2, v0p3, v0p4, v0p5) + (v0c0, v0c1, v0c2, v0c3, v0c4, v0c5, v0c6, v0c7) compact = + add_gate "range_check0" + (RangeCheck0 + { (* Current row *) v0 + ; v0p0 + ; v0p1 + ; v0p2 + ; v0p3 + ; v0p4 + ; v0p5 + ; v0c0 + ; v0c1 + ; v0c2 + ; v0c3 + ; v0c4 + ; v0c5 + ; v0c6 + ; v0c7 + ; (* Coefficients *) + compact + } ) + + let range_check1 v2 v12 + ( v2c0 + , v2p0 + , v2p1 + , v2p2 + , v2p3 + , v2c1 + , v2c2 + , v2c3 + , v2c4 + , v2c5 + , v2c6 + , v2c7 + , v2c8 ) + ( v2c9 + , v2c10 + , v2c11 + , v0p0 + , v0p1 + , v1p0 + , v1p1 + , v2c12 + , v2c13 + , v2c14 + , v2c15 + , v2c16 + , v2c17 + , v2c18 + , v2c19 ) = + add_gate "range_check1" + (RangeCheck1 + { (* Current row *) v2 + ; v12 + ; v2c0 + ; v2p0 + ; v2p1 + ; v2p2 + ; v2p3 + ; v2c1 + ; v2c2 + ; v2c3 + ; v2c4 + ; v2c5 + ; v2c6 + ; v2c7 + ; v2c8 + ; (* Next row *) v2c9 + ; v2c10 + ; v2c11 + ; v0p0 + ; v0p1 + ; v1p0 + ; v1p1 + ; v2c12 + ; v2c13 + ; v2c14 + ; v2c15 + ; v2c16 + ; v2c17 + ; v2c18 + ; v2c19 + } ) + + let xor in1 in2 out in1_0 in1_1 in1_2 in1_3 in2_0 in2_1 in2_2 in2_3 out_0 + out_1 out_2 out_3 = + add_gate "xor" + (Xor + { in1 + ; in2 + ; out + ; in1_0 + ; in1_1 + ; in1_2 + ; in1_3 + ; in2_0 + ; in2_1 + ; in2_2 + ; in2_3 + ; out_0 + ; out_1 + ; out_2 + ; out_3 + } ) + + let foreign_field_add (left_input_lo, left_input_mi, left_input_hi) + (right_input_lo, right_input_mi, right_input_hi) field_overflow carry + (foreign_field_modulus0, foreign_field_modulus1, foreign_field_modulus2) + sign = + add_gate "foreign_field_add" + (ForeignFieldAdd + { left_input_lo + ; left_input_mi + ; left_input_hi + ; right_input_lo + ; right_input_mi + ; right_input_hi + ; field_overflow + ; carry + ; foreign_field_modulus0 + ; foreign_field_modulus1 + ; foreign_field_modulus2 + ; sign + } ) + + let foreign_field_mul (left_input0, left_input1, left_input2) + (right_input0, right_input1, right_input2) (remainder01, remainder2) + (quotient0, quotient1, quotient2) quotient_hi_bound + (product1_lo, product1_hi_0, product1_hi_1) carry0 + ( carry1_0 + , carry1_12 + , carry1_24 + , carry1_36 + , carry1_48 + , carry1_60 + , carry1_72 ) (carry1_84, carry1_86, carry1_88, carry1_90) + foreign_field_modulus2 + ( neg_foreign_field_modulus0 + , neg_foreign_field_modulus1 + , neg_foreign_field_modulus2 ) = + add_gate "foreign_field_mul" + (ForeignFieldMul + { left_input0 + ; left_input1 + ; left_input2 + ; right_input0 + ; right_input1 + ; right_input2 + ; remainder01 + ; remainder2 + ; quotient0 + ; quotient1 + ; quotient2 + ; quotient_hi_bound + ; product1_lo + ; product1_hi_0 + ; product1_hi_1 + ; carry0 + ; carry1_0 + ; carry1_12 + ; carry1_24 + ; carry1_36 + ; carry1_48 + ; carry1_60 + ; carry1_72 + ; carry1_84 + ; carry1_86 + ; carry1_88 + ; carry1_90 + ; foreign_field_modulus2 + ; neg_foreign_field_modulus0 + ; neg_foreign_field_modulus1 + ; neg_foreign_field_modulus2 + } ) + + let rotate word rotated excess + (bound_limb0, bound_limb1, bound_limb2, bound_limb3) + ( bound_crumb0 + , bound_crumb1 + , bound_crumb2 + , bound_crumb3 + , bound_crumb4 + , bound_crumb5 + , bound_crumb6 + , bound_crumb7 ) two_to_rot = + add_gate "rot64" + (Rot64 + { (* Current row *) word + ; rotated + ; excess + ; bound_limb0 + ; bound_limb1 + ; bound_limb2 + ; bound_limb3 + ; bound_crumb0 + ; bound_crumb1 + ; bound_crumb2 + ; bound_crumb3 + ; bound_crumb4 + ; bound_crumb5 + ; bound_crumb6 + ; bound_crumb7 (* Coefficients *) + ; two_to_rot (* Rotation scalar 2^rot *) + } ) + + let add_fixed_lookup_table id data = + add_gate "add_fixed_lookup_table" (AddFixedLookupTable { id; data }) + + let add_runtime_table_config id first_column = + add_gate "add_runtime_table_config" (AddRuntimeTableCfg { id; first_column }) + + let raw kind values coeffs = add_gate "raw" (Raw { kind; values; coeffs }) +end + +module Group = struct + let scale_fast_unpack (base : Field.t * Field.t) + (scalar : Field.t Pickles_types.Shifted_value.Type1.t) num_bits : + (Field.t * Field.t) * Boolean.var array = + Pickles.Step_main_inputs.Ops.scale_fast_unpack base scalar ~num_bits +end + +module Circuit = struct + module Main = struct + let of_js (main : Field.t array -> unit) = + let main' public_input () = main public_input in + main' + end + + let compile main public_input_size lazy_mode = + let input_typ = typ public_input_size in + let return_typ = Impl.Typ.unit in + let cs = Impl.constraint_system ~input_typ ~return_typ (Main.of_js main) in + Impl.Keypair.generate ~lazy_mode ~prev_challenges:0 cs + + let prove main public_input_size public_input keypair = + let pk = Impl.Keypair.pk keypair in + let input_typ = typ public_input_size in + let return_typ = Impl.Typ.unit in + Impl.generate_witness_conv ~input_typ ~return_typ + ~f:(fun { Impl.Proof_inputs.auxiliary_inputs; public_inputs } () -> + Backend.Proof.create pk ~auxiliary:auxiliary_inputs + ~primary:public_inputs ) + (Main.of_js main) public_input + + let verify public_input proof vk = + let public_input_vec = Backend.Field.Vector.create () in + Array.iter public_input ~f:(fun x -> + Backend.Field.Vector.emplace_back public_input_vec x ) ; + Backend.Proof.verify proof vk public_input_vec |> Js.bool + + module Keypair = struct + let get_vk t = Impl.Keypair.vk t + + external prover_to_json : + Kimchi_bindings.Protocol.Index.Fp.t -> Js.js_string Js.t + = "prover_to_json" + + let get_cs_json t = + (Impl.Keypair.pk t).index |> prover_to_json |> Util.json_parse + end +end + +module Poseidon = struct + let update (state : Field.t Random_oracle.State.t) (input : Field.t array) : + Field.t Random_oracle.State.t = + Random_oracle.Checked.update ~state input + + let hash_to_group (xs : Field.t array) = + let input = Random_oracle.Checked.hash xs in + Snark_params.Group_map.Checked.to_group input + + (* sponge *) + + let to_unchecked (x : Field.t) = + match x with Constant y -> y | y -> As_prover.read_var y + + module Poseidon_sponge_checked = + Sponge.Make_sponge (Pickles.Step_main_inputs.Sponge.Permutation) + module Poseidon_sponge = + Sponge.Make_sponge (Sponge.Poseidon (Pickles.Tick_field_sponge.Inputs)) + + let sponge_params = Kimchi_pasta_basic.poseidon_params_fp + + let sponge_params_checked = Sponge.Params.map sponge_params ~f:Field.constant + + type sponge = + | Checked of Poseidon_sponge_checked.t + | Unchecked of Poseidon_sponge.t + + (* returns a "sponge" that stays opaque to JS *) + let sponge_create (is_checked : bool Js.t) : sponge = + if Js.to_bool is_checked then + Checked (Poseidon_sponge_checked.create ?init:None sponge_params_checked) + else Unchecked (Poseidon_sponge.create ?init:None sponge_params) + + let sponge_absorb (sponge : sponge) (field : Field.t) : unit = + match sponge with + | Checked s -> + Poseidon_sponge_checked.absorb s field + | Unchecked s -> + Poseidon_sponge.absorb s @@ to_unchecked field + + let sponge_squeeze (sponge : sponge) : Field.t = + match sponge with + | Checked s -> + Poseidon_sponge_checked.squeeze s + | Unchecked s -> + Poseidon_sponge.squeeze s |> Impl.Field.constant +end + +let snarky = + object%js + val run = + let open Run in + object%js + method exists = exists + + method existsOne = exists_one + + val inProver = in_prover + + method asProver = as_prover + + val inProverBlock = in_prover_block + + val setEvalConstraints = set_eval_constraints + + val enterConstraintSystem = enter_constraint_system + + val enterGenerateWitness = enter_generate_witness + + val enterAsProver = enter_as_prover + + val state = + object%js + val allocVar = State.alloc_var + + val storeFieldElt = State.store_field_elt + + val asProver = State.as_prover + + val setAsProver = State.set_as_prover + + val hasWitness = State.has_witness + + val getVariableValue = State.get_variable_value + end + end + + val constraintSystem = + object%js + method rows = Constraint_system.rows + + method digest = Constraint_system.digest + + method toJson = Constraint_system.to_json + end + + val field = + let open Field' in + object%js + method readVar = read_var + + method assertEqual = assert_equal + + method assertMul = assert_mul + + method assertSquare = assert_square + + method assertBoolean = assert_boolean + + method compare = compare + + method truncateToBits16 = truncate_to_bits16 + end + + val gates = + object%js + method zero = Gates.zero + + method generic = Gates.generic + + method poseidon = Gates.poseidon + + method ecAdd = Gates.ec_add + + method ecScale = Gates.ec_scale + + method ecEndoscale = Gates.ec_endoscale + + method ecEndoscalar = Gates.ec_endoscalar + + method lookup = Gates.lookup + + method rangeCheck0 = Gates.range_check0 + + method rangeCheck1 = Gates.range_check1 + + method xor = Gates.xor + + method foreignFieldAdd = Gates.foreign_field_add + + method foreignFieldMul = Gates.foreign_field_mul + + method rotate = Gates.rotate + + method addFixedLookupTable = Gates.add_fixed_lookup_table + + method addRuntimeTableConfig = Gates.add_runtime_table_config + + method raw = Gates.raw + end + + val group = + object%js + val scaleFastUnpack = Group.scale_fast_unpack + end + + val circuit = + object%js + method compile = Circuit.compile + + method prove = Circuit.prove + + method verify = Circuit.verify + + val keypair = + object%js + method getVerificationKey = Circuit.Keypair.get_vk + + method getConstraintSystemJSON = Circuit.Keypair.get_cs_json + end + end + + val poseidon = + object%js + method update = Poseidon.update + + method hashToGroup = Poseidon.hash_to_group + + val sponge = + object%js + method create = Poseidon.sponge_create + + method absorb = Poseidon.sponge_absorb + + method squeeze = Poseidon.sponge_squeeze + end + end + end diff --git a/src/lib/o1js_bindings/lib/snarky_bindings.mli b/src/lib/o1js_bindings/lib/snarky_bindings.mli new file mode 100644 index 000000000000..4d73926a7bf2 --- /dev/null +++ b/src/lib/o1js_bindings/lib/snarky_bindings.mli @@ -0,0 +1,279 @@ +module Js = Js_of_ocaml.Js +module Backend = Kimchi_backend.Pasta.Vesta_based_plonk +module Impl = Pickles.Impls.Step +module Field = Impl.Field +module Boolean = Impl.Boolean +module Run_state = Snarky_backendless.Run_state + +type field = Impl.field + +module Poseidon : sig + type sponge +end + +val snarky : + < run : + < exists : (int -> (unit -> field array) -> Field.t array) Js.meth + ; existsOne : ((unit -> field) -> Field.t) Js.meth + ; inProver : (unit -> bool) Js.readonly_prop + ; asProver : ((unit -> unit) -> unit) Js.meth + ; inProverBlock : (unit -> bool Js.t) Js.readonly_prop + ; setEvalConstraints : (bool -> unit) Js.readonly_prop + ; enterConstraintSystem : + (unit -> unit -> Backend.R1CS_constraint_system.t) Js.readonly_prop + ; enterGenerateWitness : + (unit -> unit -> Impl.Proof_inputs.t) Js.readonly_prop + ; enterAsProver : + (int -> field array option -> Field.t array) Js.readonly_prop + ; state : + < allocVar : + (Backend.Run_state.t -> field Snarky_backendless.Cvar.t) + Js.readonly_prop + ; storeFieldElt : + (Backend.Run_state.t -> field -> field Snarky_backendless.Cvar.t) + Js.readonly_prop + ; asProver : (Backend.Run_state.t -> bool) Js.readonly_prop + ; setAsProver : (Backend.Run_state.t -> bool -> unit) Js.readonly_prop + ; hasWitness : (Backend.Run_state.t -> bool) Js.readonly_prop + ; getVariableValue : + (Backend.Run_state.t -> int -> field) Js.readonly_prop > + Js.t + Js.readonly_prop > + Js.t + Js.readonly_prop + ; constraintSystem : + < rows : (Backend.R1CS_constraint_system.t -> int) Js.meth + ; digest : (Backend.R1CS_constraint_system.t -> Js.js_string Js.t) Js.meth + ; toJson : (Backend.R1CS_constraint_system.t -> 'a) Js.meth > + Js.t + Js.readonly_prop + ; field : + < assertEqual : (Field.t -> Field.t -> unit) Js.meth + ; assertMul : (Field.t -> Field.t -> Field.t -> unit) Js.meth + ; assertSquare : (Field.t -> Field.t -> unit) Js.meth + ; assertBoolean : (Field.t -> unit) Js.meth + ; compare : + (int -> Field.t -> Field.t -> Boolean.var * Boolean.var) Js.meth + ; readVar : (Field.t -> field) Js.meth + ; truncateToBits16 : + ( int + -> field Snarky_backendless.Cvar.t + -> field Snarky_backendless.Cvar.t ) + Js.meth > + Js.t + Js.readonly_prop + ; gates : + < zero : (Field.t -> Field.t -> Field.t -> unit) Js.meth + ; generic : + ( field + -> Field.t + -> field + -> Field.t + -> field + -> Field.t + -> field + -> field + -> unit ) + Js.meth + ; poseidon : (Field.t array array -> unit) Js.meth + ; ecAdd : + ( Field.t * Field.t + -> Field.t * Field.t + -> Field.t * Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t * Field.t ) + Js.meth + ; ecScale : + (Field.t Kimchi_backend_common.Scale_round.t array -> unit) Js.meth + ; ecEndoscale : + ( Field.t Kimchi_backend_common.Endoscale_round.t array + -> Field.t + -> Field.t + -> Field.t + -> unit ) + Js.meth + ; ecEndoscalar : + (Field.t Kimchi_backend_common.Endoscale_scalar_round.t array -> unit) + Js.meth + ; lookup : + ( Field.t * Field.t * Field.t * Field.t * Field.t * Field.t * Field.t + -> unit ) + Js.meth + ; rangeCheck0 : + ( Field.t + -> Field.t * Field.t * Field.t * Field.t * Field.t * Field.t + -> Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + -> field + -> unit ) + Js.meth + ; rangeCheck1 : + ( Field.t + -> Field.t + -> Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + -> Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + -> unit ) + Js.meth + ; xor : + ( Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> Field.t + -> unit ) + Js.meth + ; foreignFieldAdd : + ( Field.t * Field.t * Field.t + -> Field.t * Field.t * Field.t + -> Field.t + -> Field.t + -> field * field * field + -> field + -> unit ) + Js.meth + ; foreignFieldMul : + ( Field.t * Field.t * Field.t + -> Field.t * Field.t * Field.t + -> Field.t * Field.t + -> Field.t * Field.t * Field.t + -> Field.t + -> Field.t * Field.t * Field.t + -> Field.t + -> Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + -> Field.t * Field.t * Field.t * Field.t + -> field + -> field * field * field + -> unit ) + Js.meth + ; rotate : + ( Field.t + -> Field.t + -> Field.t + -> Field.t * Field.t * Field.t * Field.t + -> Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + * Field.t + -> field + -> unit ) + Js.meth + ; addFixedLookupTable : (int32 -> field array array -> unit) Js.meth + ; addRuntimeTableConfig : (int32 -> field array -> unit) Js.meth + ; raw : + (Kimchi_types.gate_type -> Field.t array -> field array -> unit) + Js.meth > + Js.t + Js.readonly_prop + ; group : + < scaleFastUnpack : + ( Field.t * Field.t + -> Field.t Pickles_types.Shifted_value.Type1.t + -> int + -> (Field.t * Field.t) * Boolean.var array ) + Js.readonly_prop > + Js.t + Js.readonly_prop + ; poseidon : + < update : + ( Field.t Random_oracle.State.t + -> Field.t array + -> Field.t Random_oracle.State.t ) + Js.meth + ; hashToGroup : + ( Field.t array + -> field Snarky_backendless.Cvar.t * field Snarky_backendless.Cvar.t + ) + Js.meth + ; sponge : + < absorb : (Poseidon.sponge -> Field.t -> unit) Js.meth + ; create : (bool Js.t -> Poseidon.sponge) Js.meth + ; squeeze : (Poseidon.sponge -> Field.t) Js.meth > + Js.t + Js.readonly_prop > + Js.t + Js.readonly_prop + ; circuit : + < compile : + ((Field.t array -> unit) -> int -> bool -> Impl.Keypair.t) Js.meth + ; keypair : + < getConstraintSystemJSON : (Impl.Keypair.t -> 'a) Js.meth + ; getVerificationKey : + (Impl.Keypair.t -> Impl.Verification_key.t) Js.meth > + Js.t + Js.readonly_prop + ; prove : + ( (Field.t array -> unit) + -> int + -> field array + -> Impl.Keypair.t + -> Backend.Proof.with_public_evals ) + Js.meth + ; verify : + ( field array + -> Backend.Proof.with_public_evals + -> ( field + , Kimchi_bindings.Protocol.SRS.Fp.t + , Pasta_bindings.Fq.t Kimchi_types.or_infinity + Kimchi_types.poly_comm ) + Kimchi_types.VerifierIndex.verifier_index + -> bool Js.t ) + Js.meth > + Js.t + Js.readonly_prop > + Js.t diff --git a/src/lib/o1js_bindings/lib/util.ml b/src/lib/o1js_bindings/lib/util.ml new file mode 100644 index 000000000000..f76bc6e2c7ac --- /dev/null +++ b/src/lib/o1js_bindings/lib/util.ml @@ -0,0 +1,45 @@ +open Core_kernel +module Js = Js_of_ocaml.Js + +external get_ts_bindings : unit -> Js.Unsafe.any Js.Optdef.t = "getTsBindings" + +(* the ?. operator from JS *) +let ( |. ) (value : _ Js.Optdef.t) (key : string) = + Js.( + if phys_equal value undefined then undefined + else Unsafe.get value (string key)) + +module Js_environment = struct + type t = Node | Web | Unknown + + let value = + let env = get_ts_bindings () |. "jsEnvironment" in + Js.( + if phys_equal env (def (string "node")) then Node + else if phys_equal env (def (string "web")) then Web + else Unknown) +end + +let _console_log_string s = Js_of_ocaml.Firebug.console##log (Js.string s) + +let _console_log s = Js_of_ocaml.Firebug.console##log s + +let _console_dir s : unit = + let f = + Js.Unsafe.eval_string {js|(function(s) { console.dir(s, {depth: 5}); })|js} + in + Js.Unsafe.(fun_call f [| inject s |]) + +let _console_trace s : unit = + let f = Js.Unsafe.eval_string {js|(function(s) { console.trace(s); })|js} in + Js.Unsafe.(fun_call f [| inject s |]) + +let raise_error s = + Js.Js_error.(raise_ @@ of_error (new%js Js.error_constr (Js.string s))) + +external raise_exn_js : exn -> Js.js_string Js.t -> 'a = "custom_reraise_exn" + +let raise_exn exn = raise_exn_js exn (Js.string (Exn.to_string exn)) + +let json_parse (str : Js.js_string Js.t) = + Js.Unsafe.(fun_call global ##. JSON##.parse [| inject str |]) diff --git a/src/lib/o1js_bindings/o1js_constants.ml b/src/lib/o1js_bindings/o1js_constants.ml new file mode 100644 index 000000000000..a48212b259fa --- /dev/null +++ b/src/lib/o1js_bindings/o1js_constants.ml @@ -0,0 +1,198 @@ +(** + this file is used to generate the content of bindings/crypto/constants.ts + these constants are therefore available to o1js and mina-signer + -) without causing a runtime dependency on ocaml code + -) without having to be regenerated at startup + *) + +open Core_kernel +module Field = Pickles.Impls.Step.Field.Constant + +let string s = `String s + +let field f = `String (Field.to_string f) + +let array element array = `List (array |> Array.map ~f:element |> Array.to_list) + +let ledger_merkle_tree_depth = 35 + +let prefixes = + let open Hash_prefixes in + `Assoc + [ ("event", `String (zkapp_event :> string)) + ; ("events", `String (zkapp_events :> string)) + ; ("sequenceEvents", `String (zkapp_actions :> string)) + ; ("zkappBodyMainnet", `String (zkapp_body_mainnet :> string)) + ; ("zkappBodyTestnet", `String (zkapp_body_testnet :> string)) + ; ("accountUpdateCons", `String (account_update_cons :> string)) + ; ("accountUpdateNode", `String (account_update_node :> string)) + ; ("account", `String (account :> string)) + ; ("zkappAccount", `String (zkapp_account :> string)) + ; ("zkappMemo", `String (zkapp_memo :> string)) + ; ("signatureMainnet", `String (signature_mainnet :> string)) + ; ("signatureTestnet", `String (signature_testnet :> string)) + ; ("zkappUri", `String (zkapp_uri :> string)) + ; ("deriveTokenId", `String (derive_token_id :> string)) + ; ("sideLoadedVK", `String (side_loaded_vk :> string)) + ; ( "merkleTree" + , `List + (List.init ledger_merkle_tree_depth ~f:(fun idx -> + `String (merkle_tree idx :> string) ) ) ) + ] + +type hash_prefix_kind = Kimchi | Legacy + +let prefix_hash_entry (kind : hash_prefix_kind) (s : string) = + let s, fields = + match kind with + | Kimchi -> + (s, Random_oracle.(State.to_array (salt s))) + | Legacy -> + (s, Random_oracle.Legacy.(State.to_array (salt s))) + in + ((s :> string), array field fields) + +let prefix_hashes = + let open Hash_prefixes in + `Assoc + (List.map ~f:(prefix_hash_entry Kimchi) + ( [ (receipt_chain_user_command :> string) + ; (receipt_chain_zkapp :> string) + ; (coinbase :> string) + ; (pending_coinbases :> string) + ; (coinbase_stack_data :> string) + ; (coinbase_stack_state_hash :> string) + ; (coinbase_stack :> string) + ; (checkpoint_list :> string) + ; (merge_snark :> string) + ; (base_snark :> string) + ; (protocol_state :> string) + ; (protocol_state_body :> string) + ; (vrf_message :> string) + ; (signature_mainnet :> string) + ; (signature_testnet :> string) + ; (vrf_output :> string) + ; (vrf_evaluation :> string) + ; (epoch_seed :> string) + ; (transition_system_snark :> string) + ; (account :> string) + ; (zkapp_account :> string) + ; (side_loaded_vk :> string) + ; (zkapp_payload :> string) + ; (zkapp_body_mainnet :> string) + ; (zkapp_body_testnet :> string) + ; (zkapp_precondition :> string) + ; (zkapp_precondition_account :> string) + ; (zkapp_precondition_protocol_state :> string) + ; (account_update_account_precondition :> string) + ; (account_update_cons :> string) + ; (account_update_node :> string) + ; (account_update_stack_frame :> string) + ; (account_update_stack_frame_cons :> string) + ; (zkapp_uri :> string) + ; (zkapp_event :> string) + ; (zkapp_events :> string) + ; (zkapp_actions :> string) + ; (zkapp_memo :> string) + ; (zkapp_test :> string) + ; (derive_token_id :> string) + ; "CodaReceiptEmpty" + ; "MinaZkappEventsEmpty" + ; "MinaZkappActionsEmpty" + ; "MinaZkappActionStateEmptyElt" + ; "CoinbaseStack" + ; "PendingCoinbaseMerkleTree" + ] + @ List.init ledger_merkle_tree_depth ~f:(fun idx -> + (merkle_tree idx :> string) ) ) ) + +let prefix_hashes_legacy = + let open Hash_prefixes in + `Assoc + (List.map ~f:(prefix_hash_entry Legacy) + [ (receipt_chain_user_command :> string) + ; (signature_mainnet :> string) + ; (signature_testnet :> string) + ] ) + +let version_bytes = + let open Base58_check.Version_bytes in + let open Core_kernel in + `Assoc + [ ("tokenIdKey", `Int (Char.to_int token_id_key)) + ; ("receiptChainHash", `Int (Char.to_int receipt_chain_hash)) + ; ("ledgerHash", `Int (Char.to_int ledger_hash)) + ; ("epochSeed", `Int (Char.to_int epoch_seed)) + ; ("stateHash", `Int (Char.to_int state_hash)) + ; ("publicKey", `Int (Char.to_int non_zero_curve_point_compressed)) + ; ("userCommandMemo", `Int (Char.to_int user_command_memo)) + ; ("privateKey", `Int (Char.to_int private_key)) + ; ("signature", `Int (Char.to_int signature)) + ; ("transactionHash", `Int (Char.to_int transaction_hash)) + ; ("signedCommandV1", `Int (Char.to_int signed_command_v1)) + ] + +let protocol_versions = + let open Protocol_version in + `Assoc [ ("txnVersion", `Int (transaction current)) ] + +let poseidon_params_kimchi = + `Assoc + [ ("mds", array (array string) Sponge.Params.pasta_p_kimchi.mds) + ; ( "roundConstants" + , array (array string) Sponge.Params.pasta_p_kimchi.round_constants ) + ; ("fullRounds", `Int Pickles.Tick_field_sponge.Inputs.rounds_full) + ; ("partialRounds", `Int Pickles.Tick_field_sponge.Inputs.rounds_partial) + ; ( "hasInitialRoundConstant" + , `Bool Pickles.Tick_field_sponge.Inputs.initial_ark ) + ; ("stateSize", `Int Random_oracle.state_size) + ; ("rate", `Int Random_oracle.rate) + ; ("power", `Int Pickles.Tick_field_sponge.Inputs.alpha) + ] + +let poseidon_params_legacy = + `Assoc + [ ("mds", array (array string) Sponge.Params.pasta_p_legacy.mds) + ; ( "roundConstants" + , array (array string) Sponge.Params.pasta_p_legacy.round_constants ) + ; ("fullRounds", `Int Random_oracle.Legacy.Inputs.rounds_full) + ; ("partialRounds", `Int Random_oracle.Legacy.Inputs.rounds_partial) + ; ("hasInitialRoundConstant", `Bool Random_oracle.Legacy.Inputs.initial_ark) + ; ("stateSize", `Int Random_oracle.Legacy.state_size) + ; ("rate", `Int Random_oracle.Legacy.rate) + ; ("power", `Int Random_oracle.Legacy.Inputs.alpha) + ] + +let dummy_verification_key_hash () = + Pickles.Side_loaded.Verification_key.dummy + |> Mina_base.Zkapp_account.digest_vk + |> Pickles.Impls.Step.Field.Constant.to_string + +let mocks = + `Assoc + [ ("dummyVerificationKeyHash", string (dummy_verification_key_hash ())) ] + +let constants = + [ ("prefixes", prefixes) + ; ("prefixHashes", prefix_hashes) + ; ("prefixHashesLegacy", prefix_hashes_legacy) + ; ("versionBytes", version_bytes) + ; ("protocolVersions", protocol_versions) + ; ("poseidonParamsKimchiFp", poseidon_params_kimchi) + ; ("poseidonParamsLegacyFp", poseidon_params_legacy) + ; ("mocks", mocks) + ] + +let () = + let to_js (key, value) = + "let " ^ key ^ " = " ^ Yojson.Safe.pretty_to_string value ^ ";\n" + in + let content = + "// @gen this file is generated from `bindings/ocaml/o1js_constants.ml` - \ + don't edit it directly\n" ^ "export { " + ^ (List.map ~f:fst constants |> String.concat ~sep:", ") + ^ " }\n\n" + ^ (List.map ~f:to_js constants |> String.concat ~sep:"") + in + + print_endline content diff --git a/src/lib/o1js_bindings/o1js_types.ml b/src/lib/o1js_bindings/o1js_types.ml new file mode 100644 index 000000000000..6064334cc5e7 --- /dev/null +++ b/src/lib/o1js_bindings/o1js_types.ml @@ -0,0 +1,13 @@ +open Mina_base + +let () = + let layout = Fields_derivers_zkapps.js_layout in + let js_layout = + `Assoc + [ ("ZkappCommand", layout Zkapp_command.deriver) + ; ("AccountUpdate", layout Account_update.Graphql_repr.deriver) + ; ("Account", layout Account.deriver) + ] + in + + print_endline (js_layout |> Yojson.Safe.pretty_to_string)