diff --git a/justfile b/justfile index e1dfb62fb..003468289 100644 --- a/justfile +++ b/justfile @@ -263,6 +263,7 @@ stop: killall -9 virtualization-layers db-backend node .electron-wrapped || true killall -9 electron || true killall -9 backend-manager || true + killall -9 ct-rr-worker || true reset-config: rm --force ~/.config/codetracer/.config.yaml && \ diff --git a/src/common/common_lang.nim b/src/common/common_lang.nim index a92b88aaf..fd799e465 100644 --- a/src/common/common_lang.nim +++ b/src/common/common_lang.nim @@ -30,6 +30,7 @@ IS_DB_BASED[LangSmall] = true IS_DB_BASED[LangRustWasm] = true IS_DB_BASED[LangCppWasm] = true IS_DB_BASED[LangPythonDb] = true +IS_DB_BASED[LangPascal] = false proc isDbBased*(lang: Lang): bool = ## return true if `lang` uses the db backend diff --git a/src/common/common_types/language_features/value.nim b/src/common/common_types/language_features/value.nim index 0e0f5545f..0ccc8de63 100644 --- a/src/common/common_types/language_features/value.nim +++ b/src/common/common_types/language_features/value.nim @@ -72,6 +72,7 @@ type rrTicks*: int countBudget*: int minCountLimit*: int + lang*: Lang CtLoadLocalsResponseBody* = ref object locals*: seq[Variable] diff --git a/src/common/config.nim b/src/common/config.nim index 6783d15f5..186af9f91 100644 --- a/src/common/config.nim +++ b/src/common/config.nim @@ -11,6 +11,7 @@ type path*: string ctPaths*: string debugInfoToolPath*: string + ctRRWorkerExe*: string FlowConfigObjWrapper* = object enabled*: bool @@ -63,7 +64,8 @@ type enabled: false, path: "", ctPaths: "", - debugInfoToolPath: "" + debugInfoToolPath: "", + ctRRWorkerExe: "", ).}: RRBackendConfig skipInstall: bool diff --git a/src/config/default_config.yaml b/src/config/default_config.yaml index 7633804f0..9a04bdadd 100644 --- a/src/config/default_config.yaml +++ b/src/config/default_config.yaml @@ -116,6 +116,7 @@ rrBackend: path: "" ctPaths: "" debugInfoToolPath: "" + ctRRWorkerExe: "" skipInstall: true diff --git a/src/db-backend/Cargo.lock b/src/db-backend/Cargo.lock index fdc653e32..7fdf60347 100644 --- a/src/db-backend/Cargo.lock +++ b/src/db-backend/Cargo.lock @@ -102,9 +102,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "bitflags" -version = "2.9.4" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "blake2b_simd" @@ -125,9 +125,9 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "capnp" -version = "0.21.5" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f62fcad97587224e2a1bd12ec1c7c0e95b93cefd285763a174cf1b34048c6437" +checksum = "4e92edec8974fcd7ece90bb021db782abe14a61c10c817f197f700fef7430eb8" dependencies = [ "embedded-io", ] @@ -152,9 +152,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.40" +version = "1.2.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d05d92f4b1fd76aad469d46cdd858ca761576082cd37df81416691e50199fb" +checksum = "81bbf3b3619004ad9bd139f62a9ab5cfe467f307455a0d307b0cf58bf070feaa" dependencies = [ "find-msvc-tools", "jobserver", @@ -164,9 +164,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "chrono" @@ -183,9 +183,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.48" +version = "4.5.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae" +checksum = "0c2cfd7bf8a6017ddaa4e32ffe7403d547790db06bd171c1c53926faab501623" dependencies = [ "clap_builder", "clap_derive", @@ -193,9 +193,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.48" +version = "4.5.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9" +checksum = "0a4c05b9e80c5ccd3a7ef080ad7b6ba7d6fc00a985b8b157197075677c82c7a0" dependencies = [ "anstream", "anstyle", @@ -205,21 +205,21 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.47" +version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ "heck", "proc-macro2", "quote", - "syn", + "syn 2.0.108", ] [[package]] name = "clap_lex" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" [[package]] name = "colorchoice" @@ -270,6 +270,7 @@ dependencies = [ "indexmap 1.9.3", "js-sys", "log", + "ntest", "num-bigint", "num-derive", "num-traits", @@ -319,9 +320,9 @@ checksum = "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d" [[package]] name = "env_filter" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" +checksum = "1bf3c259d255ca70051b30e2e95b5446cdb8949ac4cd22c0d7fd634d89f568e2" dependencies = [ "log", "regex", @@ -371,9 +372,9 @@ dependencies = [ [[package]] name = "find-msvc-tools" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0399f9d26e5191ce32c498bebd31e7a3ceabc2745f0ac54af3f335126c3f24b3" +checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127" [[package]] name = "fscommon" @@ -440,7 +441,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.108", ] [[package]] @@ -487,19 +488,19 @@ checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" dependencies = [ "cfg-if", "libc", - "wasi 0.9.0+wasi-snapshot-preview1", + "wasi", ] [[package]] name = "getrandom" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", "libc", "r-efi", - "wasi 0.14.7+wasi-0.2.4", + "wasip2", ] [[package]] @@ -556,9 +557,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.11.4" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" +checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f" dependencies = [ "equivalent", "hashbrown 0.16.0", @@ -566,9 +567,9 @@ dependencies = [ [[package]] name = "is_terminal_polyfill" -version = "1.70.1" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" [[package]] name = "itoa" @@ -597,7 +598,7 @@ checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.108", ] [[package]] @@ -606,7 +607,7 @@ version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" dependencies = [ - "getrandom 0.3.3", + "getrandom 0.3.4", "libc", ] @@ -628,9 +629,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.176" +version = "0.2.177" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" +checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" [[package]] name = "libredox" @@ -655,6 +656,39 @@ version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" +[[package]] +name = "ntest" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb183f0a1da7a937f672e5ee7b7edb727bf52b8a52d531374ba8ebb9345c0330" +dependencies = [ + "ntest_test_cases", + "ntest_timeout", +] + +[[package]] +name = "ntest_test_cases" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d0d3f2a488592e5368ebbe996e7f1d44aa13156efad201f5b4d84e150eaa93" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ntest_timeout" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc7c92f190c97f79b4a332f5e81dcf68c8420af2045c936c9be0bc9de6f63b5" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "num-bigint" version = "0.4.6" @@ -673,7 +707,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.108", ] [[package]] @@ -702,9 +736,9 @@ checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "once_cell_polyfill" -version = "1.70.1" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" [[package]] name = "pin-project-lite" @@ -739,11 +773,20 @@ dependencies = [ "portable-atomic", ] +[[package]] +name = "proc-macro-crate" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" +dependencies = [ + "toml_edit", +] + [[package]] name = "proc-macro2" -version = "1.0.101" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" dependencies = [ "unicode-ident", ] @@ -801,9 +844,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.11.3" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b5288124840bee7b386bc413c487869b360b2b4ec421ea56425128692f2a82c" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", @@ -813,9 +856,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.11" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "833eb9ce86d40ef33cb1306d8accf7bc8ec2bfea4355cbdebb3df68b40925cad" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", @@ -824,9 +867,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.6" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "runtime_tracing" @@ -914,7 +957,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn", + "syn 2.0.108", ] [[package]] @@ -955,7 +998,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.108", ] [[package]] @@ -966,7 +1009,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.108", ] [[package]] @@ -975,7 +1018,7 @@ version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ - "indexmap 2.11.4", + "indexmap 2.12.0", "itoa", "memchr", "ryu", @@ -991,7 +1034,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.108", ] [[package]] @@ -1020,9 +1063,20 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "syn" -version = "2.0.106" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da58917d35242480a05c2897064da0a80589a2a0476c9a3f2fdc83b53502e917" dependencies = [ "proc-macro2", "quote", @@ -1046,7 +1100,37 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.108", +] + +[[package]] +name = "toml_datetime" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_edit" +version = "0.23.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6485ef6d0d9b5d0ec17244ff7eb05310113c3f316f2d14200d4de56b3cb98f8d" +dependencies = [ + "indexmap 2.12.0", + "toml_datetime", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" +dependencies = [ + "winnow", ] [[package]] @@ -1124,9 +1208,9 @@ checksum = "9ea3136b675547379c4bd395ca6b938e5ad3c3d20fad76e7fe85f9e0d011419c" [[package]] name = "unicode-ident" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" +checksum = "462eeb75aeb73aea900253ce739c8e18a67423fadf006037cd3ff27e82748a06" [[package]] name = "utf8parse" @@ -1149,15 +1233,6 @@ version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" -[[package]] -name = "wasi" -version = "0.14.7+wasi-0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" -dependencies = [ - "wasip2", -] - [[package]] name = "wasip2" version = "1.0.1+wasi-0.2.4" @@ -1190,7 +1265,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn", + "syn 2.0.108", "wasm-bindgen-shared", ] @@ -1212,7 +1287,7 @@ checksum = "9f07d2f20d4da7b26400c9f4a0511e6e0345b040694e8a75bd41d578fa4421d7" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.108", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -1290,7 +1365,7 @@ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.108", ] [[package]] @@ -1301,7 +1376,7 @@ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.108", ] [[package]] @@ -1402,6 +1477,15 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" +[[package]] +name = "winnow" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" +dependencies = [ + "memchr", +] + [[package]] name = "wit-bindgen" version = "0.46.0" diff --git a/src/db-backend/Cargo.toml b/src/db-backend/Cargo.toml index 2bb14e08f..b8faa8da6 100644 --- a/src/db-backend/Cargo.toml +++ b/src/db-backend/Cargo.toml @@ -51,6 +51,8 @@ vfs = "0.12" expanduser = { version = "1.2.2", optional = true} +ntest = "0.9.3" + [lib] name = "db_backend" path = "src/lib.rs" diff --git a/src/db-backend/lldbinit.py b/src/db-backend/lldbinit.py new file mode 100644 index 000000000..75d214146 --- /dev/null +++ b/src/db-backend/lldbinit.py @@ -0,0 +1,144 @@ +# GENERATED BY `RR`: rr lldbinit +# supposedly we assume MIT License as RR +# TODO: maybe autogenerate in replay + +# This is a Python script. Save it to a file and run it from LLDB using +# script exec(open('').read()) +# or similar. + +def hex_unescape(string): + str_len = len(string) + if str_len % 2: # check for unexpected string length + return "" + result = bytearray() + try: + pos = 0 + while pos < str_len: + hex_char = string[pos:pos+2] + result.append(int(hex_char, 16)) + pos += 2 + except: # check for unexpected string value + return "" + return result.decode('utf-8') + +def hex_escape(string): + result = "" + for curr_char in string.encode('utf-8'): + if isinstance(curr_char, str): + curr_char = ord(curr_char) + result += format(curr_char, '02x') + return result + + +import lldb +import re +import shlex + +def run_command_and_get_output(debugger, command): + result = lldb.SBCommandReturnObject() + debugger.GetCommandInterpreter().HandleCommand(command, result) + assert result.Succeeded() + return result.GetOutput() + +def command_impl(debugger, command, exe_ctx, result, cmd_name, auto_args): + interpreter = debugger.GetCommandInterpreter() + args = shlex.split(command) + # Ensure lldb tells rr its current thread + curr_thread = exe_ctx.thread + cmd_prefix = ("process plugin packet send qRRCmd:%s:%d"% + (cmd_name, -1 if curr_thread is None else curr_thread.GetThreadID())) + arg_strs = [] + for auto_arg in auto_args: + arg_strs.append(":" + hex_escape(run_command_and_get_output(debugger, auto_arg))) + for arg in args: + arg_strs.append(":" + hex_escape(arg)) + rv = run_command_and_get_output(debugger, cmd_prefix + ''.join(arg_strs)); + rv_match = re.search('response: (.*)$', rv, re.MULTILINE); + if not rv_match: + result.SetError(None, "Invalid response: %s" % rv) + return + response = hex_unescape(rv_match.group(1)) + result.Print(response.strip()) + +def rr_command_elapsed_time(debugger, command, exe_ctx, result, internal_dict): + """Print elapsed time (in seconds) since the start of the trace, in the 'record' timeline.""" + cmd_name = 'elapsed-time' + auto_args = [] + command_impl(debugger, command, exe_ctx, result, cmd_name, auto_args) + +lldb.debugger.HandleCommand('command script add -f rr_command_elapsed_time elapsed-time') + +def rr_command_when(debugger, command, exe_ctx, result, internal_dict): + """Print the number of the last completely replayed rr event.""" + cmd_name = 'when' + auto_args = [] + command_impl(debugger, command, exe_ctx, result, cmd_name, auto_args) + +lldb.debugger.HandleCommand('command script add -f rr_command_when when') + +def rr_command_when_ticks(debugger, command, exe_ctx, result, internal_dict): + """Print the current rr tick count for the current thread.""" + cmd_name = 'when-ticks' + auto_args = [] + command_impl(debugger, command, exe_ctx, result, cmd_name, auto_args) + +lldb.debugger.HandleCommand('command script add -f rr_command_when_ticks when-ticks') + +def rr_command_when_tid(debugger, command, exe_ctx, result, internal_dict): + """Print the real tid for the current thread.""" + cmd_name = 'when-tid' + auto_args = [] + command_impl(debugger, command, exe_ctx, result, cmd_name, auto_args) + +lldb.debugger.HandleCommand('command script add -f rr_command_when_tid when-tid') + +def rr_command_seek_ticks(debugger, command, exe_ctx, result, internal_dict): + """Print the current rr tick count for the current thread.""" + cmd_name = 'seek-ticks' + auto_args = [] + command_impl(debugger, command, exe_ctx, result, cmd_name, auto_args) + +lldb.debugger.HandleCommand('command script add -f rr_command_seek_ticks seek-ticks') + +def rr_command_reverse_finish(debugger, command, exe_ctx, result, internal_dict): + """Print the current rr tick count for the current thread.""" + cmd_name = 'reverse-finish' + auto_args = [] + command_impl(debugger, command, exe_ctx, result, cmd_name, auto_args) + +lldb.debugger.HandleCommand('command script add -f rr_command_reverse_finish reverse-finish') + +def rr_command_rr_history_push(debugger, command, exe_ctx, result, internal_dict): + """Push an entry into the rr history.""" + cmd_name = 'rr-history-push' + auto_args = [] + command_impl(debugger, command, exe_ctx, result, cmd_name, auto_args) + +lldb.debugger.HandleCommand('command script add -f rr_command_rr_history_push rr-history-push') + +def rr_command_back(debugger, command, exe_ctx, result, internal_dict): + """Go back one entry in the rr history.""" + cmd_name = 'back' + auto_args = [] + command_impl(debugger, command, exe_ctx, result, cmd_name, auto_args) + +lldb.debugger.HandleCommand('command script add -f rr_command_back back') + +def rr_command_forward(debugger, command, exe_ctx, result, internal_dict): + """Go forward one entry in the rr history.""" + cmd_name = 'forward' + auto_args = [] + command_impl(debugger, command, exe_ctx, result, cmd_name, auto_args) + +lldb.debugger.HandleCommand('command script add -f rr_command_forward forward') + +def rr_command_checkpoint(debugger, command, exe_ctx, result, internal_dict): + """create a checkpoint representing a point in the execution +use the 'restart' command to return to the checkpoint""" + cmd_name = 'checkpoint' + auto_args = ['rr-where'] + command_impl(debugger, command, exe_ctx, result, cmd_name, auto_args) + +lldb.debugger.HandleCommand('command script add -f rr_command_checkpoint checkpoint') + +lldb.debugger.HandleCommand('set set prompt "(rr) "') diff --git a/src/db-backend/src/bin/virtualization-layers.rs b/src/db-backend/src/bin/virtualization-layers.rs index 5813d8515..bfb51db13 100644 --- a/src/db-backend/src/bin/virtualization-layers.rs +++ b/src/db-backend/src/bin/virtualization-layers.rs @@ -21,6 +21,8 @@ extern crate db_backend; use db_backend::core::Core; use db_backend::db::Db; use db_backend::handler::Handler; +use db_backend::rr_dispatcher::CtRRArgs; +use db_backend::task::TraceKind; // use db_backend::receiver::Receiver; // use db_backend::response::Response; @@ -78,7 +80,7 @@ fn main() -> Result<(), Box> { let db = Db::new(&PathBuf::from("")); // receiver.setup_for_virtualization_layers(&cli.socket_path, cli.caller_process_pid)?; - let mut _handler = Handler::construct(Box::new(db), true); + let mut _handler = Handler::construct(TraceKind::DB, CtRRArgs::default(), Box::new(db), true); // receiver.receive_loop(&mut handler)?; diff --git a/src/db-backend/src/dap.rs b/src/db-backend/src/dap.rs index fb26e1049..454b2d93d 100644 --- a/src/db-backend/src/dap.rs +++ b/src/db-backend/src/dap.rs @@ -37,6 +37,8 @@ pub struct LaunchRequestArguments { pub trace_file: Option, #[serde(rename = "rawDiffIndex", skip_serializing_if = "Option::is_none")] pub raw_diff_index: Option, + #[serde(rename = "ctRRWorkerExe", skip_serializing_if = "Option::is_none")] + pub ct_rr_worker_exe: Option, #[serde(skip_serializing_if = "Option::is_none")] pub pid: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -369,7 +371,7 @@ pub fn to_json(message: &DapMessage) -> DapResult { pub fn read_dap_message_from_reader(reader: &mut R) -> DapResult { use log::info; - info!("from_reader"); + info!("read_dap_message_from_reader"); let mut header = String::new(); reader.read_line(&mut header).map_err(|e| { use log::error; @@ -377,6 +379,7 @@ pub fn read_dap_message_from_reader(reader: &mut R) -> DapR error!("Read Line: {:?}", e); serde_json::Error::custom(e.to_string()) })?; + info!("line read"); if !header.to_ascii_lowercase().starts_with("content-length:") { // println!("no content-length!"); return Err(serde_json::Error::custom("Missing Content-Length header").into()); @@ -398,7 +401,7 @@ pub fn read_dap_message_from_reader(reader: &mut R) -> DapR .read_exact(&mut buf) .map_err(|e| serde_json::Error::custom(e.to_string()))?; let json_text = std::str::from_utf8(&buf).map_err(|e| serde_json::Error::custom(e.to_string()))?; - info!("DAP raw <- {json_text}"); + // info!("DAP raw <- {json_text}"); from_json(json_text) } diff --git a/src/db-backend/src/dap_server.rs b/src/db-backend/src/dap_server.rs index b93e8252a..6a8111bf6 100644 --- a/src/db-backend/src/dap_server.rs +++ b/src/db-backend/src/dap_server.rs @@ -1,13 +1,14 @@ use crate::dap::{self, Capabilities, DapMessage, Event, ProtocolMessage, Response}; -use crate::dap_types::{self, Breakpoint, SetBreakpointsArguments, SetBreakpointsResponseBody, Source}; +use crate::dap_types; use crate::db::Db; use crate::handler::Handler; use crate::paths::CODETRACER_PATHS; +use crate::rr_dispatcher::CtRRArgs; use crate::task::{ - gen_task_id, Action, CallSearchArg, CalltraceLoadArgs, CollapseCallsArgs, CtLoadFlowArguments, + Action, CallSearchArg, CalltraceLoadArgs, CollapseCallsArgs, CtLoadFlowArguments, CtLoadLocalsArguments, FunctionLocation, LoadHistoryArg, LocalStepJump, Location, ProgramEvent, RunTracepointsArg, - SourceCallJumpTarget, SourceLocation, StepArg, Task, TaskKind, TracepointId, UpdateTableArgs, + SourceCallJumpTarget, SourceLocation, StepArg, TraceKind, TracepointId, UpdateTableArgs, }; use crate::trace_processor::{load_trace_data, load_trace_metadata, TraceProcessor}; @@ -17,9 +18,9 @@ use crate::transport::DapTransport; #[cfg(feature = "browser-transport")] use crate::transport::{DapResult, WorkerTransport}; -use log::info; +use log::{info, warn}; use serde_json::json; -use std::collections::{HashMap, HashSet}; +// use std::collections::{HashMap, HashSet}; use std::error::Error; use std::fmt; @@ -86,8 +87,13 @@ pub fn run(socket_path: &PathBuf) -> Result<(), Box> { handle_client(&mut reader, &mut writer) } -fn launch(trace_folder: &Path, trace_file: &Path, raw_diff_index: Option, seq: i64) -> Result> { - // TODO: log this when logging logic is properly abstracted +fn launch( + trace_folder: &Path, + trace_file: &Path, + raw_diff_index: Option, + ct_rr_worker_exe: &Path, + seq: i64, +) -> Result> { info!("run launch() for {:?}", trace_folder); let trace_file_format = if trace_file.extension() == Some(std::ffi::OsStr::new("json")) { runtime_tracing::TraceEventsFileFormat::Json @@ -106,13 +112,28 @@ fn launch(trace_folder: &Path, trace_file: &Path, raw_diff_index: Option let mut proc = TraceProcessor::new(&mut db); proc.postprocess(&trace)?; - let mut handler = Handler::new(Box::new(db)); + let mut handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); handler.dap_client.seq = seq; handler.raw_diff_index = raw_diff_index; handler.run_to_entry(dap::Request::default())?; Ok(handler) } else { - Err("problem with reading metadata or path trace files".into()) + warn!("problem with reading metadata or path trace files: try rr?"); + let path = trace_folder.join("rr").join("latest-trace"); + if path.exists() { + let db = Db::new(&PathBuf::from("")); + let ct_rr_args = CtRRArgs { + worker_exe: PathBuf::from(ct_rr_worker_exe), + rr_trace_folder: path, + }; + let mut handler = Handler::new(TraceKind::RR, ct_rr_args, Box::new(db)); + handler.dap_client.seq = seq; + handler.raw_diff_index = raw_diff_index; + handler.run_to_entry(dap::Request::default())?; + Ok(handler) + } else { + Err("problem with reading metadata or path trace files".into()) + } } } @@ -179,6 +200,7 @@ fn handle_request( "stackTrace" => handler.stack_trace(req.clone(), req.load_args::()?)?, "variables" => handler.variables(req.clone(), req.load_args::()?)?, "restart" => handler.run_to_entry(req.clone())?, + "setBreakpoints" => handler.set_breakpoints(req.clone(), req.load_args::()?)?, "ct/load-locals" => handler.load_locals(req.clone(), req.load_args::()?)?, "ct/update-table" => handler.update_table(req.clone(), req.load_args::()?)?, "ct/event-load" => handler.event_load(req.clone())?, @@ -229,12 +251,12 @@ fn handle_request( pub struct Ctx { pub seq: i64, - pub breakpoints: HashMap>, pub handler: Option, pub received_launch: bool, pub launch_trace_folder: PathBuf, pub launch_trace_file: PathBuf, pub launch_raw_diff_index: Option, + pub ct_rr_worker_exe: PathBuf, pub received_configuration_done: bool, } @@ -242,12 +264,12 @@ impl Default for Ctx { fn default() -> Self { Self { seq: 1i64, - breakpoints: HashMap::new(), handler: None, received_launch: false, launch_trace_folder: PathBuf::from(""), launch_trace_file: PathBuf::from(""), launch_raw_diff_index: None, + ct_rr_worker_exe: PathBuf::from(""), received_configuration_done: false, } } @@ -259,6 +281,10 @@ pub fn handle_message( ctx: &mut Ctx, ) -> Result<(), Box> { info!("Handling message: {:?}", msg); + if let DapMessage::Request(req) = msg { + info!(" request {}", req.command); + } + match msg { DapMessage::Request(req) if req.command == "initialize" => { let capabilities = Capabilities { @@ -295,92 +321,6 @@ pub fn handle_message( ctx.seq += 1; transport.send(&event)?; } - DapMessage::Request(req) if req.command == "setBreakpoints" => { - let mut results = Vec::new(); - let args = req.load_args::()?; - if let Some(path) = args.source.path.clone() { - let lines: Vec = if let Some(bps) = args.breakpoints { - bps.into_iter().map(|b| b.line).collect() - } else { - args.lines.unwrap_or_default() - }; - let entry = ctx.breakpoints.entry(path.clone()).or_default(); - if let Some(h) = ctx.handler.as_mut() { - h.clear_breakpoints(); - for line in lines { - entry.insert(line); - let _ = h.add_breakpoint( - SourceLocation { - path: path.clone(), - line: line as usize, - }, - Task { - kind: TaskKind::AddBreak, - id: gen_task_id(TaskKind::AddBreak), - }, - ); - results.push(Breakpoint { - id: None, - verified: true, - message: None, - source: Some(Source { - name: args.source.name.clone(), - path: Some(path.clone()), - source_reference: args.source.source_reference, - presentation_hint: None, - origin: None, - sources: None, - adapter_data: None, - checksums: None, - }), - line: Some(line), - column: None, - end_line: None, - end_column: None, - instruction_reference: None, - offset: None, - reason: None, - }); - } - } - } else { - let lines = args - .breakpoints - .unwrap_or_default() - .into_iter() - .map(|b| b.line) - .collect::>(); - for line in lines { - results.push(Breakpoint { - id: None, - verified: false, - message: Some("missing source path".to_string()), - source: None, - line: Some(line), - column: None, - end_line: None, - end_column: None, - instruction_reference: None, - offset: None, - reason: None, - }); - } - } - let body = SetBreakpointsResponseBody { breakpoints: results }; - let resp = DapMessage::Response(Response { - base: ProtocolMessage { - seq: ctx.seq, - type_: "response".to_string(), - }, - request_seq: req.base.seq, - success: true, - command: "setBreakpoints".to_string(), - message: None, - body: serde_json::to_value(body)?, - }); - ctx.seq += 1; - transport.send(&resp)?; - } DapMessage::Request(req) if req.command == "launch" => { ctx.received_launch = true; let args = req.load_args::()?; @@ -396,9 +336,15 @@ pub fn handle_message( //info!("stored launch trace folder: {0:?}", ctx.launch_trace_folder) ctx.launch_raw_diff_index = args.raw_diff_index.clone(); + ctx.ct_rr_worker_exe = args.ct_rr_worker_exe.unwrap_or(PathBuf::from("")); if ctx.received_configuration_done { - ctx.handler = Some(launch(&ctx.launch_trace_folder, &ctx.launch_trace_file, ctx.launch_raw_diff_index.clone(), ctx.seq)?); + ctx.handler = Some(launch( + &ctx.launch_trace_folder, + &ctx.launch_trace_file, + ctx.launch_raw_diff_index.clone(), + &ctx.ct_rr_worker_exe, + ctx.seq)?); if let Some(h) = ctx.handler.as_mut() { write_dap_messages(transport, h, &mut ctx.seq)?; } @@ -447,7 +393,12 @@ pub fn handle_message( // ctx.received_launch // ); if ctx.received_launch { - ctx.handler = Some(launch(&ctx.launch_trace_folder, &ctx.launch_trace_file, ctx.launch_raw_diff_index.clone(), ctx.seq)?); + ctx.handler = Some(launch( + &ctx.launch_trace_folder, + &ctx.launch_trace_file, + ctx.launch_raw_diff_index.clone(), + &ctx.ct_rr_worker_exe, + ctx.seq)?); if let Some(h) = ctx.handler.as_mut() { write_dap_messages(transport, h, &mut ctx.seq)?; } @@ -495,10 +446,21 @@ fn handle_client( let mut ctx = Ctx::default(); - while let Ok(msg) = dap::read_dap_message_from_reader(reader) { - let _ = handle_message(&msg, transport, &mut ctx); + loop { + match dap::read_dap_message_from_reader(reader) { + Ok(msg) => { + let res = handle_message(&msg, transport, &mut ctx); + if let Err(e) = res { + error!("handle_message error: {e:?}"); + } + }, + Err(e) => { + error!("error from read_dap_message_from_reader: {e:?}"); + break; + } + } } - error!("maybe error from reader"); + Ok(()) } diff --git a/src/db-backend/src/db.rs b/src/db-backend/src/db.rs index 95bc15cbf..b3b70792f 100644 --- a/src/db-backend/src/db.rs +++ b/src/db-backend/src/db.rs @@ -1,23 +1,26 @@ use num_bigint::BigInt; use serde::{Deserialize, Serialize}; use std::collections::HashMap; +use std::error::Error; use std::path::PathBuf; use std::vec::Vec; -use crate::distinct_vec::DistinctVec; -use crate::expr_loader::ExprLoader; -use crate::lang::Lang; -use crate::task::{Call, CallArg, Location, RRTicks}; -use crate::value::{Type, Value}; use log::{error, info, warn}; use runtime_tracing::{ CallKey, EventLogKind, FullValueRecord, FunctionId, FunctionRecord, Line, PathId, Place, StepId, TypeId, TypeKind, TypeRecord, TypeSpecificInfo, ValueRecord, VariableId, NO_KEY, }; +use crate::distinct_vec::DistinctVec; +use crate::expr_loader::ExprLoader; +use crate::lang::Lang; +use crate::replay::Replay; +use crate::task::{Action, Breakpoint, Call, CallArg, CoreTrace, Events, Location, ProgramEvent, RRTicks, NO_INDEX, NO_PATH, NO_POSITION, CtLoadLocalsArguments, VariableWithRecord}; +use crate::value::{Type, Value, ValueRecordWithType}; + const NEXT_INTERNAL_STEP_OVERS_LIMIT: usize = 1_000; -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Db { pub workdir: PathBuf, pub functions: DistinctVec, @@ -258,6 +261,10 @@ impl Db { } fn to_ct_type(&self, type_id: &TypeId) -> Type { + if self.types.len() == 0 { // probably rr trace case + warn!("to_ct_type: for now returning just a placeholder type: assuming rr trace!"); + return Type::new(TypeKind::None, ""); + } let type_record = &self.types[*type_id]; match self.types[*type_id].kind { TypeKind::Struct => { @@ -769,3 +776,366 @@ pub enum EndOfProgram { // pub step_id: StepId, // pub place: Place, // } + +// type LineTraceMap = HashMap>; + +#[derive(Debug)] +pub struct DbReplay { + pub db: Box, + pub step_id: StepId, + pub call_key: CallKey, + pub breakpoint_list: Vec>, + breakpoint_next_id: usize, +} + +impl DbReplay { + pub fn new(db: Box) -> DbReplay { + let mut breakpoint_list: Vec> = Default::default(); + breakpoint_list.resize_with(db.paths.len(), HashMap::new); + DbReplay { db, step_id: StepId(0), call_key: CallKey(0), breakpoint_list, breakpoint_next_id: 0 } + } + + pub fn register_type(&mut self, typ: TypeRecord) -> TypeId { + // for no checking for typ.name logic: eventually in ensure_type? + self.db.types.push(typ); + TypeId(self.db.types.len() - 1) + } + + pub fn to_value_record(&mut self, v: ValueRecordWithType) -> ValueRecord { + match v { + ValueRecordWithType::Int { i, typ } => { + let type_id = self.register_type(typ); + ValueRecord::Int { i, type_id } + }, + ValueRecordWithType::Float { f, typ } => { + let type_id = self.register_type(typ); + ValueRecord::Float { f, type_id } + } + ValueRecordWithType::Bool { b, typ } => { + let type_id = self.register_type(typ); + ValueRecord::Bool { b, type_id } + } + ValueRecordWithType::String { text, typ } => { + let type_id = self.register_type(typ); + ValueRecord::String { text, type_id } + } + _ => todo!() + } + } + + pub fn to_value_record_with_type(&mut self, v: &ValueRecord) -> ValueRecordWithType { + match v { + ValueRecord::Int { i, type_id } => { + ValueRecordWithType::Int { i: *i, typ: self.db.types[*type_id].clone() } + }, + ValueRecord::Float { f, type_id } => { + ValueRecordWithType::Float { f: *f, typ: self.db.types[*type_id].clone() } + } + ValueRecord::Bool { b, type_id } => { + ValueRecordWithType::Bool { b: *b, typ: self.db.types[*type_id].clone() } + } + ValueRecord::String { text, type_id } => { + ValueRecordWithType::String { text: text.to_string(), typ: self.db.types[*type_id].clone() } + } + _ => todo!() + } + } + + pub fn step_id_jump(&mut self, step_id: StepId) { + if step_id.0 != NO_INDEX { + self.step_id = step_id; + } + } + + fn to_program_event(&self, event_record: &DbRecordEvent, index: usize) -> ProgramEvent { + let step_id_int = event_record.step_id.0; + let (path, line) = if step_id_int != NO_INDEX { + let step_record = &self.db.steps[event_record.step_id]; + ( + self.db + .workdir + .join(self.db.load_path_from_id(&step_record.path_id)) + .display() + .to_string(), + step_record.line.0, + ) + } else { + (NO_PATH.to_string(), NO_POSITION) + }; + + ProgramEvent { + kind: event_record.kind, + content: event_record.content.clone(), + bytes: event_record.content.len(), + rr_event_id: index, + direct_location_rr_ticks: step_id_int, + metadata: event_record.metadata.to_string(), + stdout: true, + event_index: index, + tracepoint_result_index: NO_INDEX, + high_level_path: path, + high_level_line: line, + base64_encoded: false, + max_rr_ticks: self + .db + .steps + .last() + .unwrap_or(&DbStep { + step_id: StepId(0), + path_id: PathId(0), + line: Line(0), + call_key: CallKey(0), + global_call_key: CallKey(0), + }) + .step_id + .0, + } + } + + fn single_step_line(&self, step_index: usize, forward: bool) -> usize { + // taking note of db.lines limits: returning a valid step id always + if forward { + if step_index < self.db.steps.len() - 1 { + step_index + 1 + } else { + step_index + } + } else if step_index > 0 { + step_index - 1 + } else { + // auto-returning the same 0 if stepping backwards from 0 + step_index + } + } + + fn step_in(&mut self, forward: bool) -> Result> { + self.step_id = StepId(self.single_step_line(self.step_id.0 as usize, forward) as i64); + Ok(true) + } + + fn step_out(&mut self, forward: bool) -> Result> { + (self.step_id, _) = self.db.step_out_step_id_relative_to(self.step_id, forward); + Ok(true) + } + + fn next(&mut self, forward: bool) -> Result> { + let step_to_different_line = true; // which is better/should be let the user configure it? + (self.step_id, _) = self + .db + .next_step_id_relative_to(self.step_id, forward, step_to_different_line); + Ok(true) + } + + // returns if it has hit any breakpoints + fn step_continue(&mut self, forward: bool) -> Result> { + for step in self.db.step_from(self.step_id, forward) { + if !self.breakpoint_list.is_empty() { + if let Some(enabled) = self.breakpoint_list[step.path_id.0] + .get(&step.line.into()) + .map(|bp| bp.enabled) + { + if enabled { + self.step_id_jump(step.step_id); + // true: has hit a breakpoint + return Ok(true); + } + } + } else { + break; + } + } + + // If the continue step doesn't find a valid breakpoint. + if forward { + self.step_id_jump( + self.db + .steps + .last() + .expect("unexpected 0 steps in trace for step_continue") + .step_id, + ); + } else { + self.step_id_jump( + self.db + .steps + .first() + .expect("unexpected 0 steps in trace for step_continue") + .step_id, + ) + } + // false: hasn't hit a breakpoint + Ok(false) + } + + fn load_path_id(&self, path: &str) -> Option { + self.db.path_map.get(path).copied() + } +} + +impl Replay for DbReplay { + fn load_location(&mut self, expr_loader: &mut ExprLoader) -> Result> { + info!("load_location: db replay"); + let call_key = self.db.call_key_for_step(self.step_id); + self.call_key = call_key; + Ok(self.db.load_location(self.step_id, call_key, expr_loader)) + } + + fn run_to_entry(&mut self) -> Result<(), Box> { + self.step_id_jump(StepId(0)); + Ok(()) + } + + fn load_events(&mut self) -> Result> { + let mut events: Vec = vec![]; + let mut first_events: Vec = vec![]; + let mut contents: String = "".to_string(); + + for (i, event_record) in self.db.events.iter().enumerate() { + let mut event = self.to_program_event(event_record, i); + event.content = event_record.content.to_string(); + events.push(event.clone()); + if i < 20 { + first_events.push(event); + contents.push_str(&format!("{}\\n\n", event_record.content)); + } + } + + Ok(Events { + events, + first_events, + contents, + }) + } + + // for Ok cases: + // continue returns if it has hit any breakpoints, others return always true + fn step(&mut self, action: Action, forward: bool) -> Result> { + match action { + Action::StepIn => self.step_in(forward), + Action::StepOut => self.step_out(forward), + Action::Next => self.next(forward), + Action::Continue => self.step_continue(forward), + _ => todo!(), + } + } + + fn load_locals(&mut self, _arg: CtLoadLocalsArguments) -> Result, Box> { + let variables_for_step = self.db.variables[self.step_id].clone(); + let full_value_locals: Vec = variables_for_step + .iter() + .map(|v| VariableWithRecord { + expression: self.db.variable_name(v.variable_id).to_string(), + value: self.to_value_record_with_type(&v.value), + // &self.db.to_ct_value(&v.value), + }) + .collect(); + + // TODO: fix random order here as well: ensure order(or in final locals?) + let variable_cells_for_step = self.db.variable_cells[self.step_id].clone(); + let value_tracking_locals: Vec = variable_cells_for_step + .iter() + .map(|(variable_id, place)| { + let name = self.db.variable_name(*variable_id); + info!("log local {variable_id:?} {name} place: {place:?}"); + let value = self.db.load_value_for_place(*place, self.step_id); + VariableWithRecord { + expression: self.db.variable_name(*variable_id).to_string(), + value: self.to_value_record_with_type(&value), + } + }) + .collect(); + // based on https://stackoverflow.com/a/56490417/438099 + let mut locals: Vec = full_value_locals.into_iter().chain(value_tracking_locals).collect(); + + locals.sort_by(|left, right| Ord::cmp(&left.expression, &right.expression)); + // for now just removing duplicated variables/expressions: even if storing different values + locals.dedup_by(|a, b| a.expression == b.expression); + + Ok(locals) + } + + fn load_value(&mut self, expression: &str, _lang: Lang) -> Result> { + // TODO: a more optimal way: cache a hashmap? or change structure? + // or again start directly loading available values matching all expressions in the same time?: + // taking a set of expressions: probably best(maybe add an additional load_values) + for variable in &self.db.variables[self.step_id] { + if self.db.variable_names[variable.variable_id] == expression { + return Ok(self.to_value_record_with_type(&variable.value.clone())) + } + } + return Err(format!("variable {expression} not found on this step").into()) + } + + fn load_return_value(&mut self, _lang: Lang) -> Result> { + // assumes self.load_location() has been ran, and that we have the current call key + Ok(self.to_value_record_with_type(&self.db.calls[self.call_key].return_value.clone())) + } + + fn load_step_events(&mut self, step_id: StepId, exact: bool) -> Vec { + self.db.load_step_events(step_id, exact) + } + + fn jump_to(&mut self, step_id: StepId) -> Result> { + self.step_id = step_id; + Ok(true) + } + + fn add_breakpoint(&mut self, path: &str, line: i64) -> Result> { + let path_id_res: Result> = self + .load_path_id(path) + .ok_or(format!("can't add a breakpoint: can't find path `{}`` in trace", path).into()); + let path_id = path_id_res?; + let inner_map = &mut self.breakpoint_list[path_id.0]; + let breakpoint = Breakpoint { enabled: true, id: self.breakpoint_next_id as i64 }; + self.breakpoint_next_id += 1; + inner_map.insert(line as usize, breakpoint.clone()); + Ok(breakpoint) + } + + fn delete_breakpoint(&mut self, _breakpoint: &Breakpoint) -> Result> { + // let path_id_res: Result> = self + // .load_path_id(&loc.path) + // .ok_or(format!("can't add a breakpoint: can't find path `{}`` in trace", loc.path).into()); + // let path_id = path_id_res?; + // let inner_map = &mut self.breakpoint_list[path_id.0]; + // inner_map.remove(&loc.line); + todo!() + } + + fn delete_breakpoints(&mut self) -> Result> { + self.breakpoint_list.clear(); + self.breakpoint_list.resize_with(self.db.paths.len(), HashMap::new); + Ok(true) + } + + fn toggle_breakpoint(&mut self, breakpoint: &Breakpoint) -> Result> { + // let path_id_res: Result> = self + // .load_path_id(&loc.path) + // .ok_or(format!("can't add a breakpoint: can't find path `{}`` in trace", loc.path).into()); + // let path_id = path_id_res?; + // if let Some(breakpoint) = self.breakpoint_list[path_id.0].get_mut(&loc.line) { + let mut toggled_breakpoint = breakpoint.clone(); + toggled_breakpoint.enabled = !toggled_breakpoint.enabled; + Ok(toggled_breakpoint) + } + + fn jump_to_call(&mut self, location: &Location) -> Result> { + let step = self.db.steps[StepId(location.rr_ticks.0)]; + let call_key = step.call_key; + let first_call_step_id = self.db.calls[call_key].step_id; + self.jump_to(first_call_step_id)?; + let mut expr_loader = ExprLoader::new(CoreTrace::default()); + self.load_location(&mut expr_loader) + } + + fn event_jump(&mut self, event: &ProgramEvent) -> Result> { + let step_id = StepId(event.direct_location_rr_ticks); // currently using this field + // for compat with rr/gdb core support + self.jump_to(step_id)?; + Ok(true) + } + + fn current_step_id(&mut self) -> StepId { + self.step_id + } +} diff --git a/src/db-backend/src/diff.rs b/src/db-backend/src/diff.rs index dc5959aa8..56c2ca290 100644 --- a/src/db-backend/src/diff.rs +++ b/src/db-backend/src/diff.rs @@ -8,7 +8,8 @@ use num_derive::FromPrimitive; use runtime_tracing::FunctionId; use log::info; -use crate::db::Db; +use crate::db::{Db,DbReplay}; +use crate::task::{TraceKind, FlowUpdate}; use crate::trace_processor::{load_trace_data, load_trace_metadata, TraceProcessor}; use crate::flow_preloader::FlowPreloader; @@ -146,7 +147,11 @@ pub fn index_diff(diff: Diff, trace_folder: &Path) -> Result<(), Box> info!("diff_lines {diff_lines:?}"); let mut flow_preloader = FlowPreloader::new(); - let flow_update = flow_preloader.load_diff_flow(diff_lines, &db); + let mut replay = DbReplay::new(Box::new(db.clone())); + let flow_update = match flow_preloader.load_diff_flow(diff_lines, &db, TraceKind::DB, &mut replay) { + Ok(flow_update_direct) => flow_update_direct, + Err(_e) => FlowUpdate::error("load diff flow error: {e:?}"), + }; let raw = serde_json::to_string(&flow_update)?; std::fs::write(trace_folder.join("diff_index.json"), raw)?; diff --git a/src/db-backend/src/expr_loader.rs b/src/db-backend/src/expr_loader.rs index 62a0f080f..af36a5530 100644 --- a/src/db-backend/src/expr_loader.rs +++ b/src/db-backend/src/expr_loader.rs @@ -1,5 +1,4 @@ use crate::{ - db::DbStep, lang::Lang, task::{ Branch, BranchId, BranchState, CoreTrace, Location, LoopShape, LoopShapeId, Position, NO_BRANCH_ID, NO_POSITION, @@ -295,13 +294,15 @@ impl ExprLoader { // extract function names and positions } else if NODE_NAMES[&lang].functions.contains(&node.kind().to_string()) { if let Some(name) = self.get_method_name(node, path, row) { - self.processed_files - .get_mut(path) - .unwrap() - .functions - .entry(start) - .or_default() - .push((name.to_string(), start, end)); + for i in start.0 .. end.0 { + self.processed_files + .get_mut(path) + .unwrap() + .functions + .entry(Position(i)) + .or_default() + .push((name.to_string(), start, end)); + }; self.loop_index = 1; } } else if NODE_NAMES[&lang].loops.contains(&node.kind().to_string()) && start != end { @@ -401,10 +402,9 @@ impl ExprLoader { Ok(()) } - pub fn load_branch_for_step(&self, step: &DbStep, path: &PathBuf) -> HashMap { - let position = Position(step.line.0); + pub fn load_branch_for_position(&self, position: Position, path: &PathBuf) -> HashMap { let mut results: HashMap = HashMap::default(); - if self.processed_files[path].position_branches.contains_key(&position) { + if self.processed_files.contains_key(path) && self.processed_files[path].position_branches.contains_key(&position) { let mut branch = self.processed_files[path].position_branches[&position].clone(); branch.status = BranchState::Taken; results.insert(branch.header_line.0 as usize, branch.status); @@ -434,14 +434,14 @@ impl ExprLoader { results } - pub fn get_loop_shape(&self, step: &DbStep, path: &PathBuf) -> Option { + pub fn get_loop_shape(&self, line: Position, path: &PathBuf) -> Option { info!("path {}", path.display()); info!( "get_loop_shape {} {:?}", - step.line.0, self.processed_files[path].position_loops + line.0, self.processed_files.get(path)?.position_loops ); - if let Some(loop_shape_id) = self.processed_files[path].position_loops.get(&Position(step.line.0)) { - return Some(self.processed_files[path].loop_shapes[loop_shape_id.0 as usize].clone()); + if let Some(loop_shape_id) = self.processed_files.get(path)?.position_loops.get(&line) { + return Some(self.processed_files.get(path)?.loop_shapes[loop_shape_id.0 as usize].clone()); } None } @@ -508,6 +508,8 @@ impl ExprLoader { } pub fn get_first_last_fn_lines(&self, location: &Location, line: &Line) -> (i64, i64) { + info!("functions {:?}", self.processed_files); + info!("get_first_last_fn_lines {:?}:{}", location.path, line.0); let (_, mut start, mut end): (String, Position, Position) = (String::default(), Position(NO_POSITION), Position(NO_POSITION)); let path_buf = &PathBuf::from(&location.path); @@ -539,10 +541,10 @@ impl ExprLoader { updated_location } - pub fn get_expr_list(&self, line: Line, location: &Location) -> Option> { + pub fn get_expr_list(&self, line: Position, location: &Location) -> Option> { self.processed_files .get(&PathBuf::from(&location.path)) - .and_then(|file| file.variables.get(&Position(line.0)).cloned()) + .and_then(|file| file.variables.get(&line).cloned()) } // pub fn load_loops(&mut self, ) diff --git a/src/db-backend/src/flow_preloader.rs b/src/db-backend/src/flow_preloader.rs index b36bcda5b..3967dacaa 100644 --- a/src/db-backend/src/flow_preloader.rs +++ b/src/db-backend/src/flow_preloader.rs @@ -1,15 +1,21 @@ +use std::collections::{HashMap, HashSet}; +use std::path::{Path, PathBuf}; +use std::error::Error; + +use log::{info, warn, error}; +use runtime_tracing::{CallKey, Line, StepId, TypeKind, TypeRecord, TypeSpecificInfo}; + use crate::{ - db::{Db, DbRecordEvent, DbStep}, + db::{Db, DbRecordEvent}, expr_loader::ExprLoader, task::{ - BranchesTaken, CoreTrace, FlowEvent, FlowStep, FlowUpdate, FlowUpdateState, FlowUpdateStateKind, - FlowMode, FlowViewUpdate, Iteration, Location, Loop, LoopId, LoopIterationSteps, Position, RRTicks, StepCount, + Action, BranchesTaken, CoreTrace, FlowEvent, FlowStep, FlowUpdate, FlowUpdateState, FlowUpdateStateKind, + FlowMode, FlowViewUpdate, Iteration, Location, Loop, LoopId, LoopIterationSteps, Position, RRTicks, StepCount, TraceKind, }, + lang::{lang_from_context, Lang}, + replay::Replay, + value::{to_ct_value, Value, ValueRecordWithType}, }; -use log::{info, warn}; -use runtime_tracing::{CallKey, FullValueRecord, Line, StepId}; -use std::collections::{HashMap, HashSet}; -use std::path::PathBuf; #[derive(Debug)] pub struct FlowPreloader { @@ -24,14 +30,14 @@ impl FlowPreloader { } } - pub fn load(&mut self, location: Location, line: Line, mode: FlowMode, db: &Db) -> FlowUpdate { + pub fn load(&mut self, location: Location, mode: FlowMode, kind: TraceKind, replay: &mut dyn Replay) -> FlowUpdate { info!("flow: load: {:?}", location); let path_buf = PathBuf::from(&location.path); match self.expr_loader.load_file(&path_buf) { Ok(_) => { info!("Expression loader complete!"); - let mut call_flow_preloader: CallFlowPreloader = CallFlowPreloader::new(self, location, HashSet::new(), HashSet::new(), mode); - call_flow_preloader.load_flow(line, db) + let mut call_flow_preloader: CallFlowPreloader = CallFlowPreloader::new(self, location.clone(), HashSet::new(), HashSet::new(), mode, kind); + call_flow_preloader.load_flow(location, replay) } Err(e) => { warn!("can't process file {}: error {}", location.path, e); @@ -40,7 +46,7 @@ impl FlowPreloader { } } - pub fn load_diff_flow(&mut self, diff_lines: HashSet<(PathBuf, i64)>, db: &Db) -> FlowUpdate { + pub fn load_diff_flow(&mut self, diff_lines: HashSet<(PathBuf, i64)>, db: &Db, trace_kind: TraceKind, replay: &mut dyn Replay) -> Result> { info!("load_diff_flow"); for diff_line in &diff_lines { match self.expr_loader.load_file(&diff_line.0) { @@ -49,12 +55,23 @@ impl FlowPreloader { } Err(e) => { warn!("can't process file {}: error {}", diff_line.0.display(), e); - return FlowUpdate::error(&format!("can't process file {}", diff_line.0.display())); + return Err(format!("can't process file {}", diff_line.0.display()).into()); // FlowUpdate::error(&format!("can't process file {}", diff_line.0.display())); } } } let mut diff_call_keys = HashSet::new(); + // put breakpoints on all of them + for diff_line in &diff_lines { + let _ = replay.add_breakpoint(&diff_line.0.display().to_string(), diff_line.1)?; + } + // TODO: breakpoints on function entries or function names as well + // so => we can count how many stops? + // + // just continue for now in next diff flow step; and if we go through the function/function entry line or + // breakpoint; count a next call for it; + // maybe this will just work because they're registered as loop first line + for step in db.step_from(runtime_tracing::StepId(0), true) { if diff_lines.contains(&(PathBuf::from(db.paths[step.path_id].clone()), step.line.0)) { diff_call_keys.insert(step.call_key.0); @@ -66,15 +83,16 @@ impl FlowPreloader { } } - let mut call_flow_preloader = CallFlowPreloader::new(self, Location::default(), diff_lines, diff_call_keys, FlowMode::Diff); - call_flow_preloader.load_flow(Line(1), db) + let mut call_flow_preloader = CallFlowPreloader::new(self, Location::default(), diff_lines, diff_call_keys, FlowMode::Diff, trace_kind); + let location = Location { line: 1, ..Location::default() }; + Ok(call_flow_preloader.load_flow(location, replay)) } // fn load_file(&mut self, path: &str) { // self.expr_loader.load_file(&PathBuf::from(path.to_string())).unwrap(); // } - pub fn get_var_list(&self, line: Line, location: &Location) -> Option> { + pub fn get_var_list(&self, line: Position, location: &Location) -> Option> { self.expr_loader.get_expr_list(line, location) } @@ -93,19 +111,29 @@ pub struct CallFlowPreloader<'a> { diff_lines: HashSet<(PathBuf, i64)>, diff_call_keys: HashSet, // TODO: if we add Eq, Hash it seems we can do CallKey mode: FlowMode, + trace_kind: TraceKind, + lang: Lang, } impl<'a> CallFlowPreloader<'a> { - pub fn new(flow_preloader: &'a FlowPreloader, location: Location, diff_lines: HashSet<(PathBuf, i64)>, diff_call_keys: HashSet, mode: FlowMode) -> Self { + pub fn new( + flow_preloader: &'a FlowPreloader, + location: Location, + diff_lines: HashSet<(PathBuf, i64)>, + diff_call_keys: HashSet, + mode: FlowMode, + trace_kind: TraceKind) -> Self { CallFlowPreloader { flow_preloader, - location, + location: location.clone(), active_loops: vec![], last_step_id: StepId(-1), last_expr_order: vec![], diff_lines, diff_call_keys, mode, + trace_kind, + lang: lang_from_context(&Path::new(&location.path), trace_kind), } } @@ -123,38 +151,60 @@ impl<'a> CallFlowPreloader<'a> { // last // #[allow(clippy::unwrap_used)] - pub fn load_flow(&mut self, line: Line, db: &Db) -> FlowUpdate { + pub fn load_flow(&mut self, location: Location, replay: &mut dyn Replay) -> FlowUpdate { + // Update location on flow load if self.mode == FlowMode::Call { + // let step_id = StepId(location.rr_ticks.0); + // let call_key = self.db.steps[step_id].call_key; + // let function_id = self.db.calls[call_key].function_id; + // let function_first = self.db.functions[function_id].line; + // info!("load {arg:?}"); + self.location = self .flow_preloader .expr_loader - .find_function_location(&self.location, &line); + .find_function_location(&location, &Line(location.line)); } // info!("location flow {:?}", self.location); - let mut flow_update = FlowUpdate::new(); - let flow_view_update = self.load_view_update(db); - - flow_update.location = self.location.clone(); - flow_update.view_updates.push(flow_view_update); - flow_update.status = FlowUpdateState { - kind: FlowUpdateStateKind::FlowFinished, - steps: 0, - }; - flow_update + match self.load_view_update(replay) { + Ok(flow_view_update) => { + let mut flow_update = FlowUpdate::new(); + flow_update.location = self.location.clone(); + flow_update.view_updates.push(flow_view_update); + flow_update.status = FlowUpdateState { + kind: FlowUpdateStateKind::FlowFinished, + steps: 0, + }; + flow_update + } + Err(e) => { + FlowUpdate::error(&format!("{:?}", e)) + } + } } - fn add_return_value(&mut self, mut flow_view_update: FlowViewUpdate, db: &Db, call_key: CallKey) -> FlowViewUpdate { + fn add_return_value(&mut self, mut flow_view_update: FlowViewUpdate, replay: &mut dyn Replay) -> FlowViewUpdate { + // assumes that replay is stopped on the place where return value is available + + let return_string = "return".to_string(); + // The if condition ensures, that the Options on which .unwrap() is called // are never None, so it is safe to unwrap them. - let return_string = "return".to_string(); if !flow_view_update.steps.is_empty() { + + let return_value_record = replay.load_return_value(self.lang).unwrap_or(ValueRecordWithType::Error { + msg: "".to_string(), + typ: TypeRecord { kind: TypeKind::Error, lang_type: "".to_string(), specific_info: TypeSpecificInfo::None }, + }); + let return_value = to_ct_value(&return_value_record); + #[allow(clippy::unwrap_used)] flow_view_update.steps.last_mut().unwrap().before_values.insert( return_string.clone(), - db.to_ct_value(&db.calls[call_key].return_value.clone()), + return_value.clone(), ); #[allow(clippy::unwrap_used)] @@ -168,7 +218,7 @@ impl<'a> CallFlowPreloader<'a> { #[allow(clippy::unwrap_used)] flow_view_update.steps.first_mut().unwrap().before_values.insert( return_string.clone(), - db.to_ct_value(&db.calls[call_key].return_value.clone()), + return_value.clone(), ); #[allow(clippy::unwrap_used)] @@ -182,91 +232,153 @@ impl<'a> CallFlowPreloader<'a> { flow_view_update } - fn next_diff_flow_step(&self, from_step_id: StepId, including_from: bool, db: &Db) -> (StepId, bool) { - if from_step_id.0 >= db.steps.len() as i64 { - (from_step_id, false) - } else { - // TODO: next diff step - let mut next_step_id = if !including_from { from_step_id + 1 } else { from_step_id }; - loop { - if next_step_id.0 >= db.steps.len() as i64 { // must be + 1! then we assume we should stay and report not progressing - return (from_step_id, false); - } - let next_step = db.steps[next_step_id]; - info!("check {:?}", (PathBuf::from(db.paths[next_step.path_id].clone()), next_step.line.0)); - if self.diff_call_keys.contains(&next_step.call_key.0) { - // &(PathBuf::from(db.paths[next_step.path_id].clone()), next_step.line.0)) { - return (next_step_id, true); - } else { - next_step_id = next_step_id + 1; - continue; - } - } - } + fn next_diff_flow_step(&self, _from_step_id: StepId, _including_from: bool, _replay: &mut dyn Replay) -> (StepId, bool) { + // TODO: maybe combination of replay.next, diff_call_keys check, different for cases?s + // + // if from_step_id.0 >= db.steps.len() as i64 { + // (from_step_id, false) + // } else { + // // TODO: next diff step + // let mut next_step_id = if !including_from { from_step_id + 1 } else { from_step_id }; + // loop { + // if next_step_id.0 >= db.steps.len() as i64 { // must be + 1! then we assume we should stay and report not progressing + // return (from_step_id, false); + // } + // let next_step = db.steps[next_step_id]; + // info!("check {:?}", (PathBuf::from(db.paths[next_step.path_id].clone()), next_step.line.0)); + // if self.diff_call_keys.contains(&next_step.call_key.0) { + // // &(PathBuf::from(db.paths[next_step.path_id].clone()), next_step.line.0)) { + // return (next_step_id, true); + // } else { + // next_step_id = next_step_id + 1; + // continue; + // } + // } + // } + todo!() } - fn find_first_step(&self, from_step_id: StepId, db: &Db) -> (StepId, bool) { - match self.mode { + fn move_to_first_step(&self, from_step_id: StepId, replay: &mut dyn Replay) -> Result<(StepId, bool), Box> { + let (mut step_id, mut progressing) = match self.mode { FlowMode::Call => (from_step_id, true), - FlowMode::Diff => self.next_diff_flow_step(StepId(0), true, db), + FlowMode::Diff => self.next_diff_flow_step(StepId(0), true, replay), + }; + if self.trace_kind == TraceKind::DB { + replay.jump_to(step_id)?; + } else { + let location = replay.jump_to_call(&self.location)?; + step_id = StepId(location.rr_ticks.0); + progressing = true; } + Ok((step_id, progressing)) } - fn find_next_step(&self, from_step_id: StepId, db: &Db) -> (StepId, bool) { - let step_to_different_line = true; // for flow for now makes sense to try to always reach a new line + fn move_to_next_step(&mut self, from_step_id: StepId, replay: &mut dyn Replay) -> (StepId, bool) { match self.mode { - FlowMode::Call => db.next_step_id_relative_to(from_step_id, true, step_to_different_line), - FlowMode::Diff => self.next_diff_flow_step(from_step_id, false, db), + FlowMode::Call => { + // let step_to_different_line = true; // for flow for now makes sense to try to always reach a new line + // db.next_step_id_relative_to(from_step_id, true, step_to_different_line), + replay.step(Action::Next, true).unwrap(); // TODO: handle error + let mut expr_loader = ExprLoader::new(CoreTrace::default()); + let location = replay.load_location(&mut expr_loader).unwrap(); // TODO: handle error + let new_step_id = StepId(location.rr_ticks.0); + let progressing = new_step_id != from_step_id; + (new_step_id, progressing) + }, + FlowMode::Diff => self.next_diff_flow_step(from_step_id, false, replay), } } - fn load_view_update(&mut self, db: &Db) -> FlowViewUpdate { - let start_step_id = StepId(self.location.rr_ticks.0); - let call_key: CallKey = db.steps[start_step_id].call_key; + fn call_key_from(&self, location: &Location) -> Result> { + Ok(CallKey(location.key.parse::()?)) // for now still assume it's an integer + } + + fn load_view_update(&mut self, replay: &mut dyn Replay) -> Result> { + // let start_step_id = StepId(self.location.rr_ticks.0); + // db.calls[call_key].step_id; // let mut path_buf = &PathBuf::from(&self.location.path); - let mut iter_step_id = db.calls[call_key].step_id; + let mut iter_step_id = StepId(self.location.rr_ticks.0); let mut flow_view_update = FlowViewUpdate::new(self.location.clone()); let mut step_count = 0; let mut first = true; + let tracked_call_key_result = self.call_key_from(&self.location); + let tracked_call_key; // = CallKey(0); + match tracked_call_key_result { + Ok(call_key) => { + tracked_call_key = call_key; + }, + Err(e) => { + error!("call key parse error: {e:?}"); + return Err(e); + } + } + info!("loop"); loop { + // let (step_id, progressing) = if first { + // first = false; + // self.find_first_step(iter_step_id, replay) + // } else { + // self.find_next_step(iter_step_id, replay) + // }; let (step_id, progressing) = if first { first = false; - self.find_first_step(iter_step_id, db) + self.move_to_first_step(iter_step_id, replay)? } else { - self.find_next_step(iter_step_id, db) + self.move_to_next_step(iter_step_id, replay) }; + iter_step_id = step_id; - let step = db.steps[step_id]; - if self.mode == FlowMode::Diff { - let mut expr_loader = ExprLoader::new(CoreTrace::default()); - self.location = db.load_location(step_id, step.call_key, &mut expr_loader); - } - if self.mode == FlowMode::Call && call_key != step.call_key || !progressing { - flow_view_update = self.add_return_value(flow_view_update, db, call_key); + let mut expr_loader = ExprLoader::new(CoreTrace::default()); + self.location = replay.load_location(&mut expr_loader)?; + let new_call_key = match self.call_key_from(&self.location) { + Ok(call_key) => { + call_key + } + Err(e) => { + error!("error when parsing call key: stopping flow preload: {e:?}"); + break; + } + }; + + if self.mode == FlowMode::Call && tracked_call_key != new_call_key || !progressing { + replay.step(Action::StepIn, false)?; // hopefully go back to the end of our original function + let return_location = replay.load_location(&mut expr_loader)?; + let mut load_return_value = false; + // maybe this can be improved with a limited loop/jump to return/exit of call in the future + if let Ok(return_call_key) = self.call_key_from(&return_location) { + if return_call_key == tracked_call_key { + flow_view_update = self.add_return_value(flow_view_update, replay); + load_return_value = true; + } + } + if !load_return_value { + warn!("we can't load return value"); + } info!("break flow"); break; } - let events = self.load_step_flow_events(db, step_id); + let events = self.load_step_flow_events(replay, step_id); // for now not sending last step id for line visit // but this flow step object *can* contain info about several actual steps // e.g. events from some of the next steps on the same line visit // one can analyze the step id of the next step, or we can add this info to the object + let line = self.location.line; flow_view_update.steps.push(FlowStep::new( - step.line.0, + line, step_count, - step.step_id, + replay.current_step_id(), Iteration(0), LoopId(0), events, )); - flow_view_update.relevant_step_count.push(step.line.0 as usize); - flow_view_update.add_step_count(step.line.0, step_count); + flow_view_update.relevant_step_count.push(line as usize); + flow_view_update.add_step_count(line, step_count); info!("process loops"); let path_buf = &PathBuf::from(&self.location.path); - flow_view_update = self.process_loops(flow_view_update.clone(), step, path_buf, step_count); - flow_view_update = self.log_expressions(flow_view_update.clone(), step, db, step_id); + flow_view_update = self.process_loops(flow_view_update.clone(), Position(self.location.line), replay.current_step_id(), path_buf, step_count); + flow_view_update = self.log_expressions(flow_view_update.clone(), Position(self.location.line), replay, step_id); step_count += 1; } let path_buf = &PathBuf::from(&self.location.path); @@ -278,19 +390,20 @@ impl<'a> CallFlowPreloader<'a> { .expr_loader .final_branch_load(path_buf, &flow_view_update.branches_taken[0][0].table), ); - flow_view_update + Ok(flow_view_update) } #[allow(clippy::unwrap_used)] fn process_loops( &mut self, mut flow_view_update: FlowViewUpdate, - step: DbStep, + line: Position, + step_id: StepId, path_buf: &PathBuf, step_count: i64, ) -> FlowViewUpdate { - if let Some(loop_shape) = self.flow_preloader.expr_loader.get_loop_shape(&step, path_buf) { - if loop_shape.first.0 == step.line.0 && !self.active_loops.contains(&loop_shape.first) { + if let Some(loop_shape) = self.flow_preloader.expr_loader.get_loop_shape(line, path_buf) { + if loop_shape.first.0 == line.0 && !self.active_loops.contains(&loop_shape.first) { flow_view_update.loops.push(Loop { base: LoopId(loop_shape.loop_id.0), base_iteration: Iteration(0), @@ -300,14 +413,14 @@ impl<'a> CallFlowPreloader<'a> { registered_line: loop_shape.first, iteration: Iteration(0), step_counts: vec![StepCount(step_count)], - rr_ticks_for_iterations: vec![RRTicks(step.step_id.0)], + rr_ticks_for_iterations: vec![RRTicks(step_id.0)], }); self.active_loops.push(loop_shape.first); flow_view_update .loop_iteration_steps .push(vec![LoopIterationSteps::default()]); flow_view_update.branches_taken.push(vec![BranchesTaken::default()]); - } else if (flow_view_update.loops.last().unwrap().first.0) == step.line.0 { + } else if (flow_view_update.loops.last().unwrap().first.0) == line.0 { flow_view_update.loops.last_mut().unwrap().iteration.inc(); flow_view_update .loop_iteration_steps @@ -324,12 +437,12 @@ impl<'a> CallFlowPreloader<'a> { .last_mut() .unwrap() .rr_ticks_for_iterations - .push(RRTicks(step.step_id.0)); + .push(RRTicks(step_id.0)); } } - if flow_view_update.loops.last().unwrap().first.0 <= step.line.0 - && flow_view_update.loops.last().unwrap().last.0 >= step.line.0 + if flow_view_update.loops.last().unwrap().first.0 <= line.0 + && flow_view_update.loops.last().unwrap().last.0 >= line.0 { flow_view_update.steps.last_mut().unwrap().iteration = Iteration(flow_view_update.loops.last().unwrap().iteration.0); @@ -346,17 +459,17 @@ impl<'a> CallFlowPreloader<'a> { .last_mut() .unwrap() .table - .insert(step.line.0 as usize, step_count as usize); + .insert(line.0 as usize, step_count as usize); flow_view_update.add_branches( flow_view_update.loops.clone().last_mut().unwrap().base.0, - self.flow_preloader.expr_loader.load_branch_for_step(&step, path_buf), + self.flow_preloader.expr_loader.load_branch_for_position(line, path_buf), ); } } else { flow_view_update.loop_iteration_steps[0][0] .table - .insert(step.line.0 as usize, step_count as usize); - flow_view_update.add_branches(0, self.flow_preloader.expr_loader.load_branch_for_step(&step, path_buf)); + .insert(line.0 as usize, step_count as usize); + flow_view_update.add_branches(0, self.flow_preloader.expr_loader.load_branch_for_position(line, path_buf)); } flow_view_update } @@ -370,12 +483,12 @@ impl<'a> CallFlowPreloader<'a> { } } - fn load_step_flow_events(&self, db: &Db, step_id: StepId) -> Vec { + fn load_step_flow_events(&self, replay: &mut dyn Replay, step_id: StepId) -> Vec { // load not only exactly this step, but for the whole step line "visit": // include events for next steps for this visit, because we don't process those steps in flow // otherwise, but we do something like a `next` let exact = false; - let step_events = db.load_step_events(step_id, exact); + let step_events = replay.load_step_events(step_id, exact); let flow_events = step_events.iter().map(|event| self.to_flow_event(event)).collect(); // info!("flow events: {flow_events:?}"); #[allow(clippy::let_and_return)] // useful to have the variable for debugging/logging @@ -386,40 +499,43 @@ impl<'a> CallFlowPreloader<'a> { fn log_expressions( &mut self, mut flow_view_update: FlowViewUpdate, - step: DbStep, - db: &Db, + line: Position, + replay: &mut dyn Replay, step_id: StepId, ) -> FlowViewUpdate { let mut expr_order: Vec = vec![]; - let mut variable_map: HashMap = HashMap::default(); - - for value_record in &db.variables[step_id] { - variable_map.insert( - db.variable_names[value_record.variable_id].clone(), - value_record.clone(), - ); - } - - for (variable_id, place) in &db.variable_cells[step_id] { - let value_record = db.load_value_for_place(*place, step_id); - let full_value_record = FullValueRecord { - variable_id: *variable_id, - value: value_record, - }; - let name = db.variable_name(*variable_id); - variable_map.insert(name.clone(), full_value_record); - } - - if let Some(var_list) = self.flow_preloader.get_var_list(step.line, &self.location) { + let mut variable_map: HashMap = HashMap::default(); + + // for value_record in &db.variables[step_id] { + // variable_map.insert( + // db.variable_names[value_record.variable_id].clone(), + // value_record.clone(), + // ); + // } + + // for (variable_id, place) in &db.variable_cells[step_id] { + // let value_record = db.load_value_for_place(*place, step_id); + // let full_value_record = FullValueRecord { + // variable_id: *variable_id, + // value: value_record, + // }; + // let name = db.variable_name(*variable_id); + // variable_map.insert(name.clone(), full_value_record); + // } + + if let Some(var_list) = self.flow_preloader.get_var_list(line, &self.location) { info!("var_list {:?}", var_list.clone()); for value_name in &var_list { - if variable_map.contains_key(value_name) { + if let Ok(value) = replay.load_value(value_name, self.lang) { + // if variable_map.contains_key(value_name) { + let ct_value = to_ct_value(&value); flow_view_update .steps .last_mut() .unwrap() .before_values - .insert(value_name.clone(), db.to_ct_value(&variable_map[value_name].value)); + .insert(value_name.clone(), ct_value.clone()); + variable_map.insert(value_name.clone(), ct_value); } expr_order.push(value_name.clone()); } @@ -434,7 +550,7 @@ impl<'a> CallFlowPreloader<'a> { if variable_map.contains_key(variable) { flow_view_update.steps[index] .after_values - .insert(variable.clone(), db.to_ct_value(&variable_map[variable].value)); + .insert(variable.clone(), variable_map[variable].clone()); } } } diff --git a/src/db-backend/src/handler.rs b/src/db-backend/src/handler.rs index 884755915..567e4c91b 100644 --- a/src/db-backend/src/handler.rs +++ b/src/db-backend/src/handler.rs @@ -4,31 +4,34 @@ use std::error::Error; use log::{error, info, warn}; use regex::Regex; -use serde::{Deserialize, Serialize}; +use serde::Serialize; use runtime_tracing::{CallKey, EventLogKind, Line, PathId, StepId, VariableId, NO_KEY}; use crate::calltrace::Calltrace; use crate::dap::{self, DapClient, DapMessage}; -use crate::db::{Db, DbCall, DbRecordEvent, DbStep}; +use crate::db::{Db, DbCall, DbRecordEvent, DbReplay, DbStep}; use crate::event_db::{EventDb, SingleTableId}; use crate::expr_loader::ExprLoader; use crate::flow_preloader::FlowPreloader; use crate::program_search_tool::ProgramSearchTool; +use crate::replay::Replay; +use crate::rr_dispatcher::{CtRRArgs, RRDispatcher}; // use crate::response::{}; use crate::dap_types; // use crate::dap_types::Source; use crate::step_lines_loader::StepLinesLoader; use crate::task; use crate::task::{ - Action, Call, CallArgsUpdateResults, CallLine, CallSearchArg, CalltraceLoadArgs, CalltraceNonExpandedKind, + Action, Breakpoint, Call, CallArgsUpdateResults, CallLine, CallSearchArg, CalltraceLoadArgs, CalltraceNonExpandedKind, CollapseCallsArgs, CoreTrace, DbEventKind, FrameInfo, FunctionLocation, FlowMode, HistoryResult, HistoryUpdate, Instruction, CtLoadFlowArguments, FlowUpdate, Instructions, LoadHistoryArg, LoadStepLinesArg, LoadStepLinesUpdate, LocalStepJump, Location, MoveState, Notification, NotificationKind, ProgramEvent, RRGDBStopSignal, RRTicks, RegisterEventsArg, RunTracepointsArg, - SourceCallJumpTarget, SourceLocation, StepArg, Stop, StopType, Task, TraceUpdate, TracepointId, TracepointResults, + SourceCallJumpTarget, SourceLocation, StepArg, Stop, StopType, Task, TraceKind, TraceUpdate, TracepointId, TracepointResults, UpdateTableArgs, Variable, NO_INDEX, NO_PATH, NO_POSITION, NO_STEP_ID, }; use crate::tracepoint_interpreter::TracepointInterpreter; +use crate::value::to_ct_value; const TRACEPOINT_RESULTS_LIMIT_BEFORE_UPDATE: usize = 5; @@ -50,17 +53,14 @@ pub struct Handler { pub resulting_dap_messages: Vec, pub raw_diff_index: Option, pub previous_step_id: StepId, + pub breakpoints: HashMap<(String, i64), Vec>, - pub breakpoint_list: Vec>, + pub trace_kind: TraceKind, + pub replay: Box, + pub ct_rr_args: CtRRArgs, + pub load_flow_index: usize, } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct BreakpointRecord { - pub is_active: bool, -} - -type LineTraceMap = HashMap>; - // two choices: // return results and potentially // generate multiple events as a generator @@ -80,25 +80,28 @@ type LineTraceMap = HashMap>; // sender. impl Handler { - pub fn new(db: Box) -> Handler { - Self::construct(db, false) + pub fn new(trace_kind: TraceKind, ct_rr_args: CtRRArgs, db: Box) -> Handler { + Self::construct(trace_kind, ct_rr_args, db, false) } - pub fn construct(db: Box, indirect_send: bool) -> Handler { + pub fn construct(trace_kind: TraceKind, ct_rr_args: CtRRArgs, db: Box, indirect_send: bool) -> Handler { let calltrace = Calltrace::new(&db); let trace = CoreTrace::default(); let mut expr_loader = ExprLoader::new(trace.clone()); - let mut breakpoint_list: Vec> = Default::default(); - breakpoint_list.resize_with(db.paths.len(), HashMap::new); let step_lines_loader = StepLinesLoader::new(&db, &mut expr_loader); + let replay: Box = if trace_kind == TraceKind::DB { + Box::new(DbReplay::new(db.clone())) + } else { + Box::new(RRDispatcher::new("stable", 0, ct_rr_args.clone())) + }; // let sender = sender::Sender::new(); Handler { - db, + trace_kind, + db: db.clone(), step_id: StepId(0), last_call_key: CallKey(0), indirect_send, // sender, - breakpoint_list, event_db: EventDb::new(), flow_preloader: FlowPreloader::new(), expr_loader, @@ -107,6 +110,10 @@ impl Handler { step_lines_loader, dap_client: DapClient::default(), previous_step_id: StepId(0), + breakpoints: HashMap::new(), + replay, + ct_rr_args, + load_flow_index: 0, resulting_dap_messages: vec![], raw_diff_index: None, } @@ -204,29 +211,28 @@ impl Handler { self.send_dap(&response) } - fn complete_move(&mut self, is_main: bool) -> Result<(), Box> { - let call_key = self.db.call_key_for_step(self.step_id); - let reset_flow = is_main || call_key != self.last_call_key; - self.last_call_key = call_key; - info!("complete move: step_id: {:?}", self.step_id); - let move_state = MoveState { - status: "".to_string(), - location: self.db.load_location(self.step_id, call_key, &mut self.expr_loader), - c_location: Location::default(), - main: is_main, - reset_flow, - stop_signal: RRGDBStopSignal::OtherStopSignal, - frame_info: FrameInfo::default(), - }; - - // info!("move_state {:?}", move_state); + // will be sent after completion of query + fn prepare_stopped_event(&mut self, is_main: bool) -> Result<(), Box> { let reason = if is_main { "entry" } else { "step" }; info!("generate stopped event"); let raw_event = self.dap_client.stopped_event(reason)?; info!("raw stopped event: {:?}", raw_event); self.send_dap(&raw_event)?; - let raw_complete_move_event = self.dap_client.complete_move_event(&move_state)?; + Ok(()) + } + + fn prepare_complete_move_event(&mut self, move_state: &MoveState) -> Result<(), Box> { + let raw_complete_move_event = self.dap_client.complete_move_event(move_state)?; self.send_dap(&raw_complete_move_event)?; + Ok(()) + } + + fn prepare_output_events(&mut self) -> Result<(), Box> { + if self.trace_kind == TraceKind::RR { + warn!("prepare_output_events not implemented for rr"); + return Ok(()); // TODO + } + if self.step_id.0 > self.previous_step_id.0 { let mut raw_output_events: Vec = vec![]; for event in self.db.events.iter() { @@ -253,9 +259,14 @@ impl Handler { self.send_dap(raw_output_event)?; } } - self.previous_step_id = self.step_id; + Ok(()) + } - // self.send_notification(NotificationKind::Success, "Complete move!", true)?; + fn prepare_eventual_error_event(&mut self) -> Result<(), Box> { + if self.trace_kind == TraceKind::RR { + warn!("prepare_eventual_error_event not implemented for rr"); + return Ok(()); // TODO + } let exact = false; // or for now try as flow // true just for this exact step let step_events = self.db.load_step_events(self.step_id, exact); @@ -272,43 +283,63 @@ impl Handler { Ok(()) } + fn complete_move(&mut self, is_main: bool) -> Result<(), Box> { + info!("complete_move"); + + // self.db.load_location(self.step_id, call_key, &mut self.expr_loader), + let location = self.replay.load_location(&mut self.expr_loader)?; + // let call_key = location.call_key; // self.db.call_key_for_step(self.step_id); + // TODO: change if we need to support non-int keys + let call_key = CallKey(location.key.parse::()?); + let reset_flow = is_main || call_key != self.last_call_key; + self.last_call_key = call_key; + info!(" location: {location:?}"); + + let move_state = MoveState { + status: "".to_string(), + location, + c_location: Location::default(), + main: is_main, + reset_flow, + stop_signal: RRGDBStopSignal::OtherStopSignal, + frame_info: FrameInfo::default(), + }; + + self.prepare_stopped_event(is_main)?; + self.prepare_complete_move_event(&move_state)?; + self.prepare_output_events()?; + + self.previous_step_id = self.step_id; + + // self.send_notification(NotificationKind::Success, "Complete move!", true)?; + + self.prepare_eventual_error_event()?; + + Ok(()) + } + pub fn run_to_entry(&mut self, _req: dap::Request) -> Result<(), Box> { - self.step_id_jump(StepId(0)); + self.replay.run_to_entry()?; + self.step_id = StepId(0); // TODO: use only db replay step_id or another workaround? self.complete_move(true)?; Ok(()) } - pub fn load_locals(&mut self, req: dap::Request, _args: task::CtLoadLocalsArguments) -> Result<(), Box> { - let full_value_locals: Vec = self.db.variables[self.step_id] - .iter() - .map(|v| Variable { - expression: self.db.variable_name(v.variable_id).to_string(), - value: self.db.to_ct_value(&v.value), - }) - .collect(); - - // TODO: fix random order here as well: ensure order(or in final locals?) - let value_tracking_locals: Vec = self.db.variable_cells[self.step_id] - .iter() - .map(|(variable_id, place)| { - let name = self.db.variable_name(*variable_id); - info!("log local {variable_id:?} {name} place: {place:?}"); - let value = self.db.load_value_for_place(*place, self.step_id); - Variable { - expression: self.db.variable_name(*variable_id).to_string(), - value: self.db.to_ct_value(&value), - } - }) - .collect(); - // based on https://stackoverflow.com/a/56490417/438099 - let mut locals: Vec = full_value_locals.into_iter().chain(value_tracking_locals).collect(); - - locals.sort_by(|left, right| Ord::cmp(&left.expression, &right.expression)); - // for now just removing duplicated variables/expressions: even if storing different values - locals.dedup_by(|a, b| a.expression == b.expression); + pub fn load_locals(&mut self, req: dap::Request, args: task::CtLoadLocalsArguments) -> Result<(), Box> { + // if self.trace_kind == TraceKind::RR { + // let locals: Vec = vec![]; + // warn!("load_locals not implemented for rr yet"); + let locals_with_records = self.replay.load_locals(args)?; + let locals = locals_with_records.iter().map(|l| Variable { + expression: l.expression.clone(), + value: to_ct_value(&l.value) + }).collect(); + self.respond_dap(req, task::CtLoadLocalsResponseBody { locals })?; + Ok(()) + // } - self.respond_dap(req, task::CtLoadLocalsResponseBody { locals })?; - Ok(()) + // self.respond_dap(req, task::CtLoadLocalsResponseBody { locals })?; + // Ok(()) } // pub fn load_callstack(&mut self, task: Task) -> Result<(), Box> { @@ -388,6 +419,11 @@ impl Handler { _req: dap::Request, args: CalltraceLoadArgs, ) -> Result<(), Box> { + if self.trace_kind == TraceKind::RR { + warn!("load_calltrace_section not implemented for rr"); + return Ok(()); + } + let start_call_line_index = args.start_call_line_index; let call_lines = self.load_local_calltrace(args)?; let total_count = self.calc_total_calls(); @@ -406,13 +442,18 @@ impl Handler { } pub fn load_flow(&mut self, _req: dap::Request, arg: CtLoadFlowArguments) -> Result<(), Box> { + let mut flow_replay: Box = if self.trace_kind == TraceKind::DB { + Box::new(DbReplay::new(self.db.clone())) + } else { + Box::new(RRDispatcher::new("flow", self.load_flow_index, self.ct_rr_args.clone())) + }; + self.load_flow_index += 1; + + // TODO: eventually cleanup or manage in a more optimal way flow replays: caching + // if possible for example + let flow_update = if arg.flow_mode == FlowMode::Call { - let step_id = StepId(arg.location.rr_ticks.0); - let call_key = self.db.steps[step_id].call_key; - let function_id = self.db.calls[call_key].function_id; - let function_first = self.db.functions[function_id].line; - info!("load {arg:?}"); - self.flow_preloader.load(arg.location, function_first, arg.flow_mode, &self.db) + self.flow_preloader.load(arg.location, arg.flow_mode, self.trace_kind, &mut *flow_replay) } else { if let Some(raw_flow) = &self.raw_diff_index { serde_json::from_str::(&raw_flow)? @@ -478,7 +519,9 @@ impl Handler { } pub fn step_in(&mut self, forward: bool) -> Result<(), Box> { - self.step_id = StepId(self.single_step_line(self.step_id.0 as usize, forward) as i64); + self.replay.step(Action::StepIn, forward)?; + self.step_id = self.replay.current_step_id(); + Ok(()) } @@ -509,57 +552,23 @@ impl Handler { } pub fn next(&mut self, forward: bool) -> Result<(), Box> { - let step_to_different_line = true; // which is better/should be let the user configure it? - (self.step_id, _) = self - .db - .next_step_id_relative_to(self.step_id, forward, step_to_different_line); + self.replay.step(Action::Next, forward)?; + self.step_id = self.replay.current_step_id(); Ok(()) } pub fn step_out(&mut self, forward: bool) -> Result<(), Box> { - (self.step_id, _) = self.db.step_out_step_id_relative_to(self.step_id, forward); + self.replay.step(Action::StepOut, forward)?; + self.step_id = self.replay.current_step_id(); Ok(()) } #[allow(clippy::expect_used)] pub fn step_continue(&mut self, forward: bool) -> Result<(), Box> { - for step in self.db.step_from(self.step_id, forward) { - if !self.breakpoint_list.is_empty() { - if let Some(is_active) = self.breakpoint_list[step.path_id.0] - .get(&step.line.into()) - .map(|bp| bp.is_active) - { - if is_active { - self.step_id_jump(step.step_id); - self.complete_move(false)?; - return Ok(()); - } - } - } else { - break; - } + if !self.replay.step(Action::Continue, forward)? { + self.send_notification(NotificationKind::Info, "No breakpoints were hit!", false)?; } - - // If the continue step doesn't find a valid breakpoint. - if forward { - self.step_id_jump( - self.db - .steps - .last() - .expect("unexpected 0 steps in trace for step_continue") - .step_id, - ); - } else { - self.step_id_jump( - self.db - .steps - .first() - .expect("unexpected 0 steps in trace for step_continue") - .step_id, - ) - } - self.complete_move(false)?; - self.send_notification(NotificationKind::Info, "No breakpoints were hit!", false)?; + self.step_id = self.replay.current_step_id(); Ok(()) } @@ -576,21 +585,23 @@ impl Handler { Action::Continue => self.step_continue(!arg.reverse)?, _ => error!("action {:?} not implemented", arg.action), } - if arg.complete && arg.action != Action::Continue { + if arg.complete { // && arg.action != Action::Continue { self.complete_move(false)?; } - if original_step_id == self.step_id { - let location = if self.step_id == StepId(0) { "beginning" } else { "end" }; - self.send_notification( - NotificationKind::Warning, - &format!("Limit of record at the {location} already reached!"), - false, - )?; - } else if self.step_id == StepId(0) { - self.send_notification(NotificationKind::Info, "Beginning of record reached", false)?; - } else if self.step_id.0 as usize == self.db.steps.len() - 1 { - self.send_notification(NotificationKind::Info, "End of record reached", false)?; + if self.trace_kind == TraceKind::DB { + if original_step_id == self.step_id { + let location = if self.step_id == StepId(0) { "beginning" } else { "end" }; + self.send_notification( + NotificationKind::Warning, + &format!("Limit of record at the {location} already reached!"), + false, + )?; + } else if self.step_id == StepId(0) { + self.send_notification(NotificationKind::Info, "Beginning of record reached", false)?; + } else if self.step_id.0 as usize == self.db.steps.len() - 1 { + self.send_notification(NotificationKind::Info, "End of record reached", false)?; + } } // } else if arg.action == Action::Next { // let new_step = self.db.steps[self.step_id]; @@ -610,19 +621,11 @@ impl Handler { } pub fn event_load(&mut self, _req: dap::Request) -> Result<(), Box> { - let mut events: Vec = vec![]; - let mut first_events: Vec = vec![]; - let mut contents: String = "".to_string(); + let events_data = self.replay.load_events()?; - for (i, event_record) in self.db.events.iter().enumerate() { - let mut event = self.to_program_event(event_record, i); - event.content = event_record.content.to_string(); - events.push(event.clone()); - if i < 20 { - first_events.push(event); - contents.push_str(&format!("{}\\n\n", event_record.content)); - } - } + let events = events_data.events; + let first_events = events_data.first_events; + let contents = events_data.contents; self.event_db.register_events(DbEventKind::Record, &events, vec![-1]); self.event_db.refresh_global(); @@ -637,9 +640,8 @@ impl Handler { } pub fn event_jump(&mut self, _req: dap::Request, event: ProgramEvent) -> Result<(), Box> { - let step_id = StepId(event.direct_location_rr_ticks); // currently using this field - // for compat with rr/gdb core support - self.step_id_jump(step_id); + let _ = self.replay.event_jump(&event)?; + self.step_id = self.replay.current_step_id(); self.complete_move(false)?; Ok(()) @@ -648,7 +650,8 @@ impl Handler { pub fn calltrace_jump(&mut self, _req: dap::Request, location: Location) -> Result<(), Box> { let step_id = StepId(location.rr_ticks.0); // using this field // for compat with rr/gdb core support - self.step_id_jump(step_id); + self.replay.jump_to(step_id)?; + self.step_id = self.replay.current_step_id(); self.complete_move(false)?; Ok(()) @@ -745,7 +748,8 @@ impl Handler { } pub fn history_jump(&mut self, _req: dap::Request, loc: Location) -> Result<(), Box> { - self.step_id_jump(StepId(loc.rr_ticks.0)); + self.replay.jump_to(StepId(loc.rr_ticks.0))?; + self.step_id = self.replay.current_step_id(); self.complete_move(false)?; Ok(()) } @@ -799,21 +803,39 @@ impl Handler { _req: dap::Request, source_location: SourceLocation, ) -> Result<(), Box> { - if let Some(step_id) = self.get_closest_step_id(&source_location) { - self.step_id_jump(step_id); - self.complete_move(false)?; - Ok(()) + if self.trace_kind == TraceKind::DB { + if let Some(step_id) = self.get_closest_step_id(&source_location) { + self.replay.jump_to(step_id)?; + self.step_id = self.replay.current_step_id(); + self.complete_move(false)?; + Ok(()) + } else { + let err: String = format!("unknown location: {}", &source_location); + Err(err.into()) + } } else { - let err: String = format!("unknown location: {}", &source_location); - Err(err.into()) + let b = self.replay.add_breakpoint(&source_location.path, source_location.line as i64)?; + match self.replay.step(Action::Continue, true) { + Ok(_) => { + self.replay.delete_breakpoint(&b)?; // make sure we do it before potential `?` fail in next functions + let _location = self.replay.load_location(&mut self.expr_loader)?; + self.step_id = self.replay.current_step_id(); + self.complete_move(false)?; + Ok(()) + } + Err(e) => { + self.replay.delete_breakpoint(&b)?; + Err(e) + } + } } } - fn step_id_jump(&mut self, step_id: StepId) { - if step_id.0 != NO_INDEX { - self.step_id = step_id; - } - } + // fn step_id_jump(&mut self, step_id: StepId) { + // if step_id.0 != NO_INDEX { + // self.step_id = step_id; + // } + // } fn get_call_target(&self, loc: &SourceCallJumpTarget) -> Option { let mut line: Line = Line(loc.line as i64); @@ -849,11 +871,13 @@ impl Handler { line: call_target.line, path: call_target.path.clone(), }) { - self.step_id_jump(line_step_id); + self.replay.jump_to(line_step_id)?; + self.step_id = self.replay.current_step_id(); } if let Some(call_step_id) = self.get_call_target(&call_target) { - self.step_id_jump(call_step_id); + self.replay.jump_to(call_step_id)?; + self.step_id = self.replay.current_step_id(); self.complete_move(false)?; Ok(()) } else { @@ -868,39 +892,98 @@ impl Handler { } } - pub fn add_breakpoint(&mut self, loc: SourceLocation, _task: Task) -> Result<(), Box> { - let path_id_res: Result> = self - .load_path_id(&loc.path) - .ok_or(format!("can't add a breakpoint: can't find path `{}`` in trace", loc.path).into()); - let path_id = path_id_res?; - let inner_map = &mut self.breakpoint_list[path_id.0]; - inner_map.insert(loc.line, BreakpointRecord { is_active: true }); + pub fn set_breakpoints(&mut self, request: dap::Request, args: dap_types::SetBreakpointsArguments) -> Result<(), Box> { + let mut results = Vec::new(); + // for now simples to redo them every time: TODO possible optimizations + self.clear_breakpoints()?; + if let Some(path) = args.source.path.clone() { + let lines: Vec = if let Some(bps) = args.breakpoints { + bps.into_iter().map(|b| b.line).collect() + } else { + args.lines.unwrap_or_default() + }; + + for line in lines { + let _ = self.add_breakpoint( + SourceLocation { + path: path.clone(), + line: line as usize, + }, + ); + results.push(dap_types::Breakpoint { + id: None, + verified: true, + message: None, + source: Some(dap_types::Source { + name: args.source.name.clone(), + path: Some(path.clone()), + source_reference: args.source.source_reference, + presentation_hint: None, + origin: None, + sources: None, + adapter_data: None, + checksums: None, + }), + line: Some(line), + column: None, + end_line: None, + end_column: None, + instruction_reference: None, + offset: None, + reason: None, + }); + } + } else { + let lines = args + .breakpoints + .unwrap_or_default() + .into_iter() + .map(|b| b.line) + .collect::>(); + for line in lines { + results.push(dap_types::Breakpoint { + id: None, + verified: false, + message: Some("missing source path".to_string()), + source: None, + line: Some(line), + column: None, + end_line: None, + end_column: None, + instruction_reference: None, + offset: None, + reason: None, + }); + } + } + self.respond_dap(request, dap_types::SetBreakpointsResponseBody { breakpoints: results })?; Ok(()) } - pub fn delete_breakpoint(&mut self, loc: SourceLocation, _task: Task) -> Result<(), Box> { - let path_id_res: Result> = self - .load_path_id(&loc.path) - .ok_or(format!("can't add a breakpoint: can't find path `{}`` in trace", loc.path).into()); - let path_id = path_id_res?; - let inner_map = &mut self.breakpoint_list[path_id.0]; - inner_map.remove(&loc.line); + pub fn add_breakpoint(&mut self, loc: SourceLocation) -> Result<(), Box> { + let breakpoint = self.replay.add_breakpoint(&loc.path, loc.line as i64)?; + let entry = self.breakpoints.entry((loc.path.clone(), loc.line as i64)).or_default(); + entry.push(breakpoint); + Ok(()) + } + + pub fn delete_breakpoints_for_location(&mut self, loc: SourceLocation, _task: Task) -> Result<(), Box> { + if self.breakpoints.contains_key(&(loc.path.clone(), loc.line as i64)) { + for breakpoint in &self.breakpoints[&(loc.path.clone(), loc.line as i64)] { + self.replay.delete_breakpoint(breakpoint)?; + } + } Ok(()) } - pub fn clear_breakpoints(&mut self) { - self.breakpoint_list.clear(); - self.breakpoint_list.resize_with(self.db.paths.len(), HashMap::new); + pub fn clear_breakpoints(&mut self) -> Result<(), Box> { + let _ = self.replay.delete_breakpoints()?; + self.breakpoints.clear(); + Ok(()) } - pub fn toggle_breakpoint(&mut self, loc: SourceLocation, _task: Task) -> Result<(), Box> { - let path_id_res: Result> = self - .load_path_id(&loc.path) - .ok_or(format!("can't add a breakpoint: can't find path `{}`` in trace", loc.path).into()); - let path_id = path_id_res?; - if let Some(breakpoint) = self.breakpoint_list[path_id.0].get_mut(&loc.line) { - breakpoint.is_active = !breakpoint.is_active; - } + pub fn toggle_breakpoint(&mut self, _loc: SourceLocation, _task: Task) -> Result<(), Box> { + // TODO: use path,line to id map: self.replay.toggle_breakpoint()?; Ok(()) } @@ -1077,7 +1160,8 @@ impl Handler { } pub fn trace_jump(&mut self, _req: dap::Request, event: ProgramEvent) -> Result<(), Box> { - self.step_id_jump(StepId(event.direct_location_rr_ticks)); + self.replay.jump_to(StepId(event.direct_location_rr_ticks))?; + self.step_id = self.replay.current_step_id(); self.complete_move(false)?; Ok(()) } @@ -1147,7 +1231,8 @@ impl Handler { } pub fn local_step_jump(&mut self, _req: dap::Request, arg: LocalStepJump) -> Result<(), Box> { - self.step_id_jump(StepId(arg.rr_ticks)); + self.replay.jump_to(StepId(arg.rr_ticks))?; + self.step_id = self.replay.current_step_id(); self.complete_move(false)?; Ok(()) } @@ -1550,7 +1635,7 @@ mod tests { #[test] fn test_struct_handling() { let db = setup_db(); - let handler: Handler = Handler::new(Box::new(db)); + let handler: Handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); let value = handler.db.to_ct_value(&ValueRecord::Struct { field_values: vec![], type_id: TypeId(1), @@ -1563,18 +1648,18 @@ mod tests { let db = setup_db(); // Act: Create a new Handler instance - let handler: Handler = Handler::new(Box::new(db)); + let handler: Handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); // Assert: Check that the Handler instance is correctly initialized assert_eq!(handler.step_id, StepId(0)); - assert!(!handler.breakpoint_list.is_empty()); + assert!(!handler.breakpoints.is_empty()); } // Test single tracepoint #[test] fn test_run_single_tracepoint() -> Result<(), Box> { let db = setup_db(); - let mut handler: Handler = Handler::new(Box::new(db)); + let mut handler: Handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); handler.event_load(dap::Request::default())?; handler.run_tracepoints(dap::Request::default(), make_tracepoints_args(1, 0))?; assert_eq!(handler.event_db.single_tables.len(), 2); @@ -1585,7 +1670,7 @@ mod tests { #[test] fn test_multiple_tracepoints() -> Result<(), Box> { let db = setup_db(); - let mut handler: Handler = Handler::new(Box::new(db)); + let mut handler: Handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); handler.event_load(dap::Request::default())?; // TODO // this way we are resetting them after reforms @@ -1620,7 +1705,7 @@ mod tests { fn test_multile_tracepoints_with_multiline_logs() -> Result<(), Box> { let size: usize = 10000; let db: Db = setup_db(); - let mut handler: Handler = Handler::new(Box::new(db)); + let mut handler: Handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); handler.event_load(dap::Request::default())?; handler.run_tracepoints( dap::Request::default(), @@ -1647,7 +1732,7 @@ mod tests { fn test_tracepoint_in_loop() -> Result<(), Box> { let size = 10000; let db: Db = setup_db_loop(size); - let mut handler: Handler = Handler::new(Box::new(db)); + let mut handler: Handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); handler.event_load(dap::Request::default())?; handler.run_tracepoints(dap::Request::default(), make_tracepoints_args(2, 0))?; assert_eq!(handler.event_db.single_tables[1].events.len(), size); @@ -1661,7 +1746,7 @@ mod tests { // Number of tracepoints and steps let count: usize = 10000; let db: Db = setup_db_with_step_count(count); - let mut handler: Handler = Handler::new(Box::new(db)); + let mut handler: Handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); handler.event_load(dap::Request::default())?; handler.run_tracepoints(dap::Request::default(), make_tracepoints_with_count(count))?; @@ -1674,7 +1759,7 @@ mod tests { let db = setup_db(); // Act: Create a new Handler instance - let mut handler: Handler = Handler::new(Box::new(db)); + let mut handler: Handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); let request = dap::Request::default(); handler.step(request, make_step_in())?; assert_eq!(handler.step_id, StepId(1_i64)); @@ -1684,7 +1769,7 @@ mod tests { #[test] fn test_source_jumps() -> Result<(), Box> { let db = setup_db(); - let mut handler: Handler = Handler::new(Box::new(db)); + let mut handler: Handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); let path = "/test/workdir"; let source_location: SourceLocation = SourceLocation { path: path.to_string(), @@ -1715,7 +1800,7 @@ mod tests { #[test] fn test_local_calltrace() -> Result<(), Box> { let db = setup_db_with_calls(); - let mut handler: Handler = Handler::new(Box::new(db)); + let mut handler: Handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); let calltrace_load_args = CalltraceLoadArgs { location: handler @@ -1757,7 +1842,7 @@ mod tests { let path = &PathBuf::from(raw_path); // (&PathBuf::from("/home/alexander92/codetracer-desktop/src/db-backend/example-trace/") let db = load_db_for_trace(path); - let mut handler: Handler = Handler::new(Box::new(db)); + let mut handler: Handler = Handler::new(TraceKind::DB, CtRRArgs::default(), Box::new(db)); // step-in from 1 to end(maybe also a parameter?) // on each step check validity, load locals, load callstack @@ -1814,6 +1899,7 @@ mod tests { } let trace_metadata_file = path.join("trace_metadata.json"); let trace = load_trace_data(&trace_file, trace_file_format).expect("expected that it can load the trace file"); + info!("trace {:?}", trace); let trace_metadata = load_trace_metadata(&trace_metadata_file).expect("expected that it can load the trace metadata file"); let mut db = Db::new(&trace_metadata.workdir); diff --git a/src/db-backend/src/lang.rs b/src/db-backend/src/lang.rs index e627f4e83..ae5b53da6 100644 --- a/src/db-backend/src/lang.rs +++ b/src/db-backend/src/lang.rs @@ -1,6 +1,11 @@ +use std::path::Path; +use std::ffi::OsStr; + use num_derive::FromPrimitive; use serde_repr::*; +use crate::task::TraceKind; + #[derive( Debug, Default, @@ -36,3 +41,15 @@ pub enum Lang { PythonDb, Unknown, } + +pub fn lang_from_context(path: &Path, trace_kind: TraceKind) -> Lang { + let extension = path.extension().unwrap_or(OsStr::new("")).to_str().unwrap_or(""); + // for now important mostly for system langs/rr support + // but still good to add all supported langs: TODO + match extension { + "rs" => if trace_kind == TraceKind::DB { Lang::RustWasm } else { Lang::Rust }, + "c" => Lang::C, + "cpp" => Lang::Cpp, + _ => Lang::Unknown, + } +} diff --git a/src/db-backend/src/lib.rs b/src/db-backend/src/lib.rs index f01815fba..d099740f1 100644 --- a/src/db-backend/src/lib.rs +++ b/src/db-backend/src/lib.rs @@ -30,8 +30,8 @@ pub mod dap_error; pub mod dap_server; pub mod dap_types; pub mod db; -pub mod distinct_vec; pub mod diff; +pub mod distinct_vec; pub mod event_db; pub mod expr_loader; pub mod flow_preloader; @@ -39,6 +39,9 @@ pub mod handler; pub mod lang; pub mod paths; pub mod program_search_tool; +pub mod query; +pub mod replay; +pub mod rr_dispatcher; pub mod step_lines_loader; pub mod task; pub mod trace_processor; diff --git a/src/db-backend/src/main.rs b/src/db-backend/src/main.rs index 553cee62a..4a10c2d1a 100644 --- a/src/db-backend/src/main.rs +++ b/src/db-backend/src/main.rs @@ -38,6 +38,9 @@ mod handler; mod lang; mod paths; mod program_search_tool; +mod query; +mod replay; +mod rr_dispatcher; mod step_lines_loader; mod task; mod trace_processor; @@ -124,7 +127,12 @@ fn main() -> Result<(), Box> { match cli.cmd { Commands::DapServer { socket_path, stdio } => { if stdio { - let _ = db_backend::dap_server::run_stdio(); + // thread::spawn(move || { + let res = db_backend::dap_server::run_stdio(); + if let Err(e) = res { + error!("dap server run error: {e:?}"); + } + // }) } else { let socket_path = if let Some(p) = socket_path { p @@ -132,10 +140,12 @@ fn main() -> Result<(), Box> { let pid = std::process::id() as usize; db_backend::dap_server::socket_path_for(pid) }; - - info!("dap_server::run {:?}", socket_path); - - let _ = db_backend::dap_server::run(&socket_path); + // thread::spawn(move || { + let res = db_backend::dap_server::run(&socket_path); + if let Err(e) = res { + error!("dap server run error: {e:?}"); + } + // }) }; } Commands::IndexDiff { diff --git a/src/db-backend/src/paths.rs b/src/db-backend/src/paths.rs index f4ff39fa6..65aed677c 100644 --- a/src/db-backend/src/paths.rs +++ b/src/db-backend/src/paths.rs @@ -1,5 +1,9 @@ +// copied from Stan's paths.rs in src/db-backend in the public codetracer repo +// added run_dir/ct_rr_worker_socket_path (and copied to db-backend too) + use std::env; -use std::path::PathBuf; +use std::error::Error; +use std::path::{Path, PathBuf}; use std::sync::{LazyLock, Mutex}; pub struct Paths { @@ -24,3 +28,24 @@ impl Default for Paths { } pub static CODETRACER_PATHS: LazyLock> = LazyLock::new(|| Mutex::new(Paths::default())); + +pub fn run_dir_for(tmp_path: &Path, run_id: usize) -> Result> { + let run_dir = tmp_path.join(format!("run-{run_id}")); + std::fs::create_dir_all(&run_dir)?; + Ok(run_dir) +} + +pub fn ct_rr_worker_socket_path(from: &str, worker_name: &str, worker_index_for_kind: usize, run_id: usize) -> Result> { + let tmp_path: PathBuf = { CODETRACER_PATHS.lock()?.tmp_path.clone() }; + let run_dir = run_dir_for(&tmp_path, run_id)?; + // eventually: TODO: unique index or better cleanup + // if worker with the same name started/restarted multiple times + // by the same backend instance + Ok(run_dir.join(format!("ct_rr_worker_{worker_name}_{worker_index_for_kind}_from_{from}.sock"))) + + // TODO: decide if we need to check/eventually remove or the unique run folder/paths are enough: + // + // if std::fs::metadata(&receiving_socket_path).is_ok() { + // let _ = std::fs::remove_file(&receiving_socket_path); // try to remove if existing: ignore error + // } +} diff --git a/src/db-backend/src/query.rs b/src/db-backend/src/query.rs new file mode 100644 index 000000000..ddebc4d45 --- /dev/null +++ b/src/db-backend/src/query.rs @@ -0,0 +1,22 @@ +use serde::{Deserialize, Serialize}; + +use crate::task::{Action, Breakpoint, CtLoadLocalsArguments, Location, ProgramEvent}; +use crate::lang::Lang; + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "kind")] +pub enum CtRRQuery { + RunToEntry, + LoadLocation, + Step { action: Action, forward: bool }, + LoadLocals { arg: CtLoadLocalsArguments }, + LoadReturnValue { lang: Lang }, + LoadValue { expression: String, lang: Lang }, + AddBreakpoint { path: String, line: i64 }, + DeleteBreakpoint { breakpoint: Breakpoint }, + DeleteBreakpoints, + ToggleBreakpoint { breakpoint: Breakpoint }, + JumpToCall { location: Location }, + LoadAllEvents, + EventJump { program_event: ProgramEvent }, +} diff --git a/src/db-backend/src/replay.rs b/src/db-backend/src/replay.rs new file mode 100644 index 000000000..2fbebba71 --- /dev/null +++ b/src/db-backend/src/replay.rs @@ -0,0 +1,31 @@ +use runtime_tracing::StepId; +use std::error::Error; + +use crate::db::DbRecordEvent; +use crate::expr_loader::ExprLoader; +use crate::lang::Lang; +use crate::task::{Action, Breakpoint, Events, Location, CtLoadLocalsArguments, ProgramEvent, VariableWithRecord}; +use crate::value::ValueRecordWithType; + +pub trait Replay: std::fmt::Debug { + fn load_location(&mut self, expr_loader: &mut ExprLoader) -> Result>; + fn run_to_entry(&mut self) -> Result<(), Box>; + fn load_events(&mut self) -> Result>; + fn step(&mut self, action: Action, forward: bool) -> Result>; + fn load_locals(&mut self, arg: CtLoadLocalsArguments) -> Result, Box>; + fn load_value(&mut self, expression: &str, lang: Lang) -> Result>; + + // assuming currently the replay is stopped in the right `call`(frame) for both trace kinds; + // and if rr: possibly near the return value + fn load_return_value(&mut self, lang: Lang) -> Result>; + + fn load_step_events(&mut self, step_id: StepId, exact: bool) -> Vec; + fn jump_to(&mut self, step_id: StepId) -> Result>; + fn add_breakpoint(&mut self, path: &str, line: i64) -> Result>; + fn delete_breakpoint(&mut self, breakpoint: &Breakpoint) -> Result>; + fn delete_breakpoints(&mut self) -> Result>; + fn toggle_breakpoint(&mut self, breakpoint: &Breakpoint) -> Result>; + fn jump_to_call(&mut self, location: &Location) -> Result>; + fn event_jump(&mut self, event: &ProgramEvent) -> Result>; + fn current_step_id(&mut self) -> StepId; +} diff --git a/src/db-backend/src/rr_dispatcher.rs b/src/db-backend/src/rr_dispatcher.rs new file mode 100644 index 000000000..ce0625d6a --- /dev/null +++ b/src/db-backend/src/rr_dispatcher.rs @@ -0,0 +1,317 @@ +use std::error::Error; +use std::io::Write; +use std::io::{BufRead, BufReader}; +use std::os::unix::net::UnixStream; +use std::path::{Path, PathBuf}; +use std::process::{Child, Command, Stdio}; +use std::thread; +use std::time::Duration; +use std::ffi::OsStr; + +use log::{info, error}; +use runtime_tracing::StepId; + +use crate::db::DbRecordEvent; +use crate::expr_loader::ExprLoader; +use crate::lang::Lang; +use crate::paths::ct_rr_worker_socket_path; +use crate::query::CtRRQuery; +use crate::replay::Replay; +use crate::task::{Action, Breakpoint, Events, Location, CtLoadLocalsArguments, ProgramEvent, VariableWithRecord}; +use crate::value::ValueRecordWithType; + +#[derive(Debug)] +pub struct RRDispatcher { + pub stable: CtRRWorker, + pub ct_rr_worker_exe: PathBuf, + pub rr_trace_folder: PathBuf, + pub name: String, + pub index: usize, +} + +#[derive(Debug)] +pub struct CtRRWorker { + pub name: String, + pub index: usize, + pub active: bool, + pub ct_rr_worker_exe: PathBuf, + pub rr_trace_folder: PathBuf, + process: Option, + stream: Option, +} + +#[derive(Default, Debug, Clone)] +pub struct CtRRArgs { + pub worker_exe: PathBuf, + pub rr_trace_folder: PathBuf, +} + +impl CtRRWorker { + pub fn new(name: &str, index: usize, ct_rr_worker_exe: &Path, rr_trace_folder: &Path) -> CtRRWorker { + CtRRWorker { + name: name.to_string(), + index, + active: false, + ct_rr_worker_exe: PathBuf::from(ct_rr_worker_exe), + rr_trace_folder: PathBuf::from(rr_trace_folder), + process: None, + stream: None, + } + } + + pub fn start(&mut self) -> Result<(), Box> { + let is_appimage = self.ct_rr_worker_exe.extension() == Some(OsStr::new("AppImage")); + info!( + "start: {}{} replay --name {} --index {} {}", + if !is_appimage { "" } else { "appimage-run " }, + self.ct_rr_worker_exe.display(), + self.name, + self.index, + self.rr_trace_folder.display() + ); + + let ct_worker = if !is_appimage { + Command::new(&self.ct_rr_worker_exe) + .arg("replay") + .arg("--name") + .arg(&self.name) + .arg("--index") + .arg(self.index.to_string()) + .arg(&self.rr_trace_folder) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .spawn()? + + } else { + Command::new("appimage-run") + .arg(&self.ct_rr_worker_exe) + .arg("replay") + .arg("--name") + .arg(&self.name) + .arg("--index") + .arg(self.index.to_string()) + .arg(&self.rr_trace_folder) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .spawn()? + }; + + self.process = Some(ct_worker); + self.setup_worker_sockets()?; + self.active = true; + Ok(()) + } + + fn setup_worker_sockets(&mut self) -> Result<(), Box> { + // assuming that the ct rr worker creates the sockets! + // code copied and adapted from `connect_socket_with_backend_and_loop` in ct-rr-worker + // which is itself copied/adapted/written from/based on https://emmanuelbosquet.com/2022/whatsaunixsocket/ + + let run_id = std::process::id() as usize; + + let socket_path = ct_rr_worker_socket_path("", &self.name, self.index, run_id)?; + info!("try to connect to worker with socket in {}", socket_path.display()); + loop { + if let Ok(stream) = UnixStream::connect(&socket_path) { + self.stream = Some(stream); + break; + } + thread::sleep(Duration::from_millis(1)); + // TODO: handle different kinds of errors + + // TODO: after some retries, assume a problem and return an error? + } + + Ok(()) + } + + // for now: don't return a typed value here, only Ok(raw value) or an error + pub fn run_query(&mut self, query: CtRRQuery) -> Result> { + let raw_json = serde_json::to_string(&query)?; + + info!("send to worker {raw_json}\n"); + self.stream + .as_mut() + .expect("valid sending stream") + .write_all(&format!("{raw_json}\n").into_bytes())?; + // `clippy::unused_io_amount` catched we need write_all, not write + + let mut res = "".to_string(); + info!("wait to read"); + + let mut reader = BufReader::new(self.stream.as_mut().expect("valid receiving stream")); + reader.read_line(&mut res)?; // TODO: more robust reading/read all + + res = String::from(res.trim()); // trim newlines/whitespace! + + info!("res: `{res}`"); + + if !res.starts_with("error:") { + Ok(res) + } else { + Err(format!("run_query ct rr worker error: {}", res).into()) + } + } +} + +impl RRDispatcher { + pub fn new(name: &str, index: usize, ct_rr_args: CtRRArgs) -> RRDispatcher { + RRDispatcher { + name: name.to_string(), + index, + stable: CtRRWorker::new(name, index, &ct_rr_args.worker_exe, &ct_rr_args.rr_trace_folder), + ct_rr_worker_exe: ct_rr_args.worker_exe.clone(), + rr_trace_folder: ct_rr_args.rr_trace_folder.clone(), + } + } + + pub fn ensure_active_stable(&mut self) -> Result<(), Box> { + // start stable process if not active, store fields, setup ipc? store in stable + if !self.stable.active { + self.stable.start()?; + } + // check again: + if !self.stable.active { + return Err("stable started, but still not active without an obvious error".into()); + } + + Ok(()) + } + + fn load_location_directly(&mut self) -> Result> { + Ok(serde_json::from_str::( + &self.stable.run_query(CtRRQuery::LoadLocation)?, + )?) + } +} + +impl Replay for RRDispatcher { + fn load_location(&mut self, _expr_loader: &mut ExprLoader) -> Result> { + self.ensure_active_stable()?; + self.load_location_directly() + } + + fn run_to_entry(&mut self) -> Result<(), Box> { + self.ensure_active_stable()?; + let _ok = self.stable.run_query(CtRRQuery::RunToEntry)?; + Ok(()) + } + + fn load_events(&mut self) -> Result> { + self.ensure_active_stable()?; + let events = serde_json::from_str::(&self.stable.run_query(CtRRQuery::LoadAllEvents)?)?; + Ok(events) + // Ok(Events { + // events: vec![], + // first_events: vec![], + // contents: "".to_string(), + // }) + } + + fn step(&mut self, action: Action, forward: bool) -> Result> { + self.ensure_active_stable()?; + let res = serde_json::from_str::(&self.stable.run_query(CtRRQuery::Step { action, forward })?)?; + Ok(res) + } + + fn load_locals(&mut self, arg: CtLoadLocalsArguments) -> Result, Box> { + self.ensure_active_stable()?; + let res = serde_json::from_str::>(&self.stable.run_query(CtRRQuery::LoadLocals { arg })?)?; + Ok(res) + } + + fn load_value(&mut self, expression: &str, lang: Lang) -> Result> { + self.ensure_active_stable()?; + let res = serde_json::from_str::(&self.stable.run_query(CtRRQuery::LoadValue { + expression: expression.to_string(), + lang, })?)?; + Ok(res) + } + + fn load_return_value(&mut self, lang: Lang) -> Result> { + self.ensure_active_stable()?; + let res = serde_json::from_str::(&self.stable.run_query(CtRRQuery::LoadReturnValue { lang })?)?; + Ok(res) + } + + fn load_step_events(&mut self, _step_id: StepId, _exact: bool) -> Vec { + // TODO: maybe cache events directly in replay for now, and use the same logic for them as in Db? + // or directly embed Db? or separate events in a separate EventList? + vec![] + } + + fn jump_to(&mut self, _step_id: StepId) -> Result> { + // TODO + error!("TODO rr jump_to: for now run to entry"); + self.run_to_entry()?; + Ok(true) + // todo!() + } + + fn add_breakpoint(&mut self, path: &str, line: i64) -> Result> { + self.ensure_active_stable()?; + let breakpoint = serde_json::from_str::( + &self.stable.run_query( + CtRRQuery::AddBreakpoint { + path: path.to_string(), + line, + })? + )?; + Ok(breakpoint) + } + + fn delete_breakpoint(&mut self, breakpoint: &Breakpoint) -> Result> { + self.ensure_active_stable()?; + Ok(serde_json::from_str::( + &self.stable.run_query( + CtRRQuery::DeleteBreakpoint { + breakpoint: breakpoint.clone(), + } + )? + )?) + } + + fn delete_breakpoints(&mut self) -> Result> { + self.ensure_active_stable()?; + Ok(serde_json::from_str::( + &self.stable.run_query( + CtRRQuery::DeleteBreakpoints + )? + )?) + } + + fn toggle_breakpoint(&mut self, breakpoint: &Breakpoint) -> Result> { + self.ensure_active_stable()?; + Ok(serde_json::from_str::( + &self.stable.run_query( + CtRRQuery::ToggleBreakpoint { breakpoint: breakpoint.clone() }, + )? + )?) + } + + fn jump_to_call(&mut self, location: &Location) -> Result> { + self.ensure_active_stable()?; + Ok(serde_json::from_str::( + &self.stable.run_query( + CtRRQuery::JumpToCall { location: location.clone() } + )? + )?) + } + + fn event_jump(&mut self, event: &ProgramEvent) -> Result> { + self.ensure_active_stable()?; + Ok(serde_json::from_str::( + &self.stable.run_query( + CtRRQuery::EventJump { program_event: event.clone() } + )? + )?) + } + + fn current_step_id(&mut self) -> StepId { + // cache location or step_id and return + // OR always load from worker + // TODO: return result or do something else or cache ? + let location = self.load_location_directly().expect("access to step_id"); + StepId(location.rr_ticks.0) + } +} diff --git a/src/db-backend/src/step_lines_loader.rs b/src/db-backend/src/step_lines_loader.rs index f02df3da0..ed292f736 100644 --- a/src/db-backend/src/step_lines_loader.rs +++ b/src/db-backend/src/step_lines_loader.rs @@ -5,11 +5,11 @@ use std::path::PathBuf; use log::info; use runtime_tracing::{CallKey, StepId}; -use crate::db::{Db, DbStep}; +use crate::db::{Db, DbStep, DbReplay}; use crate::distinct_vec::DistinctVec; use crate::expr_loader::ExprLoader; use crate::flow_preloader::FlowPreloader; -use crate::task::{FlowMode, LineStep, LineStepKind, LineStepValue, Location}; +use crate::task::{FlowMode, LineStep, LineStepKind, LineStepValue, Location, TraceKind}; #[derive(Debug, Clone)] pub struct StepLinesLoader { @@ -84,9 +84,10 @@ impl StepLinesLoader { let call_key = db.call_key_for_step(step_id); if !self.flow_loaded.contains(&call_key.0) { let location = self.global_line_steps[step_id].location.clone(); - let function_id = db.calls[call_key].function_id; - let function_first = db.functions[function_id].line; - let flow_update = flow_preloader.load(location, function_first, FlowMode::Call, db); + // let function_id = db.calls[call_key].function_id; + // let function_first = db.functions[function_id].line; + let mut replay = DbReplay::new(Box::new(db.clone())); + let flow_update = flow_preloader.load(location, FlowMode::Call, TraceKind::DB, &mut replay); if !flow_update.error && !flow_update.view_updates.is_empty() { let flow_view_update = &flow_update.view_updates[0]; for flow_step in flow_view_update.steps.iter() { diff --git a/src/db-backend/src/task.rs b/src/db-backend/src/task.rs index 568eaba6d..22820f3ea 100644 --- a/src/db-backend/src/task.rs +++ b/src/db-backend/src/task.rs @@ -11,7 +11,7 @@ use serde::{Deserialize, Serialize}; use serde_repr::*; use crate::lang::*; -use crate::value::{Type, Value}; +use crate::value::{Type, Value, ValueRecordWithType}; use schemars::JsonSchema; // IMPORTANT: must keep in sync with `EventLogKind` definition in common_types.nim! @@ -24,6 +24,7 @@ pub struct CtLoadLocalsArguments { pub rr_ticks: i64, pub count_budget: i64, pub min_count_limit: i64, + pub lang: Lang, } /// response for `ct/load-locals` @@ -124,6 +125,20 @@ pub struct Variable { pub value: Value, } + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all(serialize = "camelCase", deserialize = "camelCase"))] +pub struct VariableWithRecord { + pub expression: String, + pub value: ValueRecordWithType, +} + +// pub struct ValueRecordAndType { +// value: ValueRecord, +// typ: Type, +// } + + #[derive(Debug, Default, Clone, Serialize, Deserialize)] #[serde(rename_all(serialize = "camelCase", deserialize = "camelCase"))] pub struct Instruction { @@ -658,7 +673,7 @@ impl FlowUpdate { } } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, JsonSchema)] #[serde(rename_all(serialize = "camelCase", deserialize = "camelCase"))] pub struct ProgramEvent { pub kind: EventLogKind, @@ -1640,6 +1655,28 @@ pub struct TracepointResults { pub first_update: bool, } +#[derive(Debug, Default, Clone, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all(serialize = "camelCase", deserialize = "camelCase"))] +pub struct Breakpoint { + pub id: i64, + pub enabled: bool, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum TraceKind { + DB, + RR, +} + +#[derive(Debug, Default, Clone, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all(serialize = "camelCase", deserialize = "camelCase"))] +pub struct Events { + pub events: Vec, + pub first_events: Vec, + pub contents: String, +} + + pub static mut TASK_ID_MAP: &mut [usize] = &mut [0; 100]; pub static mut EVENT_ID_MAP: &mut [usize] = &mut [0; 100]; diff --git a/src/db-backend/src/trace_processor.rs b/src/db-backend/src/trace_processor.rs index cb94ec24f..4421d5b3e 100644 --- a/src/db-backend/src/trace_processor.rs +++ b/src/db-backend/src/trace_processor.rs @@ -450,7 +450,7 @@ pub fn load_trace_data( pub fn load_trace_metadata(trace_metadata_file: &Path) -> Result> { // copied and adapted from https://stackoverflow.com/a/70926549/438099 let path = expanduser(trace_metadata_file.display().to_string())?; - let raw_bytes = fs::read(&path).unwrap_or_else(|_| panic!("metadata file {path:?} read error")); + let raw_bytes = fs::read(&path)?; //.unwrap_or_else(|_| panic!("metadata file {path:?} read error")); let raw = str::from_utf8(&raw_bytes)?; let trace_metadata: TraceMetadata = serde_json::from_str(raw)?; diff --git a/src/db-backend/src/value.rs b/src/db-backend/src/value.rs index 09e96642a..84762b042 100644 --- a/src/db-backend/src/value.rs +++ b/src/db-backend/src/value.rs @@ -1,6 +1,8 @@ -use runtime_tracing::TypeKind; +use runtime_tracing::{TypeKind, TypeRecord, Place, TypeSpecificInfo}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; +use num_bigint::BigInt; +// TODO? from types if needed use runtime_tracing::base64; #[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, JsonSchema)] #[serde(rename_all = "camelCase")] @@ -127,3 +129,202 @@ impl Value { // #[default] // Other // } + + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "kind")] +pub enum ValueRecordWithType { + Int { + i: i64, + typ: TypeRecord, + }, + Float { + f: f64, + typ: TypeRecord, + }, + Bool { + b: bool, + typ: TypeRecord, + }, + String { + text: String, + typ: TypeRecord, + }, + Sequence { + elements: Vec, + is_slice: bool, + typ: TypeRecord, + }, + Tuple { + elements: Vec, + typ: TypeRecord, + }, + Struct { + field_values: Vec, + typ: TypeRecord, // (if TypeId: must point to), must be Type with STRUCT kind and TypeSpecificInfo::Struct + }, + Variant { + discriminator: String, // TODO: eventually a more specific kind of value/type + contents: Box, // usually a Struct or a Tuple + typ: TypeRecord, + }, + // TODO: eventually add more pointer-like variants + // or more fields (address?) + Reference { + dereferenced: Box, + address: u64, + mutable: bool, + typ: TypeRecord, + }, + Raw { + r: String, + typ: TypeRecord, + }, + Error { + msg: String, + typ: TypeRecord, + }, + None { + typ: TypeRecord, + }, + Cell { + place: Place, + }, + BigInt { + // TODO #[serde(with = "base64")] + b: Vec, // Base64 encoded bytes of a big-endian unsigned integer + negative: bool, + typ: TypeRecord, + }, +} + +pub fn to_ct_value(v: &ValueRecordWithType) -> Value { + match v { + ValueRecordWithType::Int { i, typ } => { + let mut res = Value::new(TypeKind::Int, to_ct_type(typ)); + res.i = i.to_string(); + res + } + ValueRecordWithType::Float { f, typ } => { + let mut res = Value::new(TypeKind::Float, to_ct_type(typ)); + res.f = f.to_string(); + res + } + ValueRecordWithType::String { text, typ } => { + let mut res = Value::new(TypeKind::String, to_ct_type(typ)); + res.text = text.clone(); + res + } + ValueRecordWithType::Bool { b, typ } => { + let mut res = Value::new(TypeKind::Bool, to_ct_type(typ)); + res.b = *b; + res + } + ValueRecordWithType::Sequence { + elements, + typ, + is_slice, + } => { + // TODO: is_slice should be in the type kind: SLICE? + let ct_typ = if !is_slice { + to_ct_type(typ) + } else { + Type::new(TypeKind::Slice, &typ.lang_type) + }; + let mut res = Value::new(TypeKind::Seq, ct_typ); + res.elements = elements.iter().map(|e| to_ct_value(e)).collect(); + res + } + ValueRecordWithType::Struct { field_values, typ } => { + let mut res = Value::new(TypeKind::Struct, to_ct_type(typ)); + res.elements = field_values.iter().map(|value| to_ct_value(value)).collect(); + res + } + ValueRecordWithType::Tuple { elements, typ } => { + let mut res = Value::new(TypeKind::Tuple, to_ct_type(typ)); + res.elements = elements.iter().map(|value| to_ct_value(value)).collect(); + res.typ.labels = elements + .iter() + .enumerate() + .map(|(index, _)| format!("{index}")) + .collect(); + res.typ.member_types = res.elements.iter().map(|value| value.typ.clone()).collect(); + res + } + ValueRecordWithType::Variant { + discriminator: _, + contents: _, + typ: _, + } => { + // variant-like enums not generated yet from noir tracer: + // we should support variants in general, but we'll think a bit first how + // to more cleanly/generally represent them in the codetracer code, as the current + // `Value` mapping doesn't seem great imo + // we can improve it, or we can add a new variant case (something more similar to the runtime_tracing repr?) + todo!("a more suitable codetracer value/type for variants") + } + ValueRecordWithType::Reference { + dereferenced, + address, + mutable, + typ, + } => { + let mut res = Value::new(TypeKind::Pointer, to_ct_type(typ)); + let dereferenced_value = to_ct_value(dereferenced); + res.typ.element_type = Some(Box::new(dereferenced_value.typ.clone())); + res.address = (*address).to_string(); + res.ref_value = Some(Box::new(dereferenced_value)); + res.is_mutable = *mutable; + res + } + ValueRecordWithType::Raw { r, typ } => { + let mut res = Value::new(TypeKind::Raw, to_ct_type(typ)); + res.r = r.clone(); + res + } + ValueRecordWithType::Error { msg, typ } => { + let mut res = Value::new(TypeKind::Error, to_ct_type(typ)); + res.msg = msg.clone(); + res + } + ValueRecordWithType::None { typ } => Value::new(TypeKind::None, to_ct_type(typ)), + ValueRecordWithType::Cell { .. } => { + // supposed to map to place in value graph + // TODO + unimplemented!() + } + ValueRecordWithType::BigInt { b, negative, typ } => { + let sign = if *negative { + num_bigint::Sign::Minus + } else { + num_bigint::Sign::Plus + }; + + let num = BigInt::from_bytes_be(sign, b); + + let mut res = Value::new(TypeKind::Int, to_ct_type(typ)); + res.i = num.to_string(); + res + } + } +} + +pub fn to_ct_type(typ: &TypeRecord) -> Type { + match typ.kind { + TypeKind::Struct => { + let mut t = Type::new(typ.kind, &typ.lang_type); + t.labels = get_field_names(typ); + t + } + _ => Type::new(typ.kind, &typ.lang_type), + } + // TODO: struct -> instance with labels/eventually other types + // if type_record.kind != res.type +} + +fn get_field_names(typ: &TypeRecord) -> Vec { + match &typ.specific_info { + TypeSpecificInfo::Struct { fields } => fields.iter().map(|field| field.name.clone()).collect(), + _ => Vec::new(), + } +} diff --git a/src/db-backend/tests/dap_backend_server.rs b/src/db-backend/tests/dap_backend_server.rs index 0ed502e84..79f8c82a2 100644 --- a/src/db-backend/tests/dap_backend_server.rs +++ b/src/db-backend/tests/dap_backend_server.rs @@ -89,6 +89,7 @@ fn test_backend_dap_server() { request: None, typ: None, session_id: None, + ct_rr_worker_exe: None, }; let launch = client.launch(launch_args).expect("failed to build launch request"); diff --git a/src/db-backend/tests/dap_backend_stdio.rs b/src/db-backend/tests/dap_backend_stdio.rs index 9e43abf03..3ccf02655 100644 --- a/src/db-backend/tests/dap_backend_stdio.rs +++ b/src/db-backend/tests/dap_backend_stdio.rs @@ -44,6 +44,7 @@ fn test_backend_dap_server_stdio() { request: None, typ: None, session_id: None, + ct_rr_worker_exe: None, }; let launch = client.launch(launch_args).unwrap(); // dap::write_message(&mut writer, &launch).unwrap(); diff --git a/src/db-backend/tests/rr_test.rs b/src/db-backend/tests/rr_test.rs new file mode 100644 index 000000000..d6bc1cea1 --- /dev/null +++ b/src/db-backend/tests/rr_test.rs @@ -0,0 +1,221 @@ +use std::io::BufReader; +use std::path::PathBuf; +use std::process::{Command, Stdio}; + +use serde_json::json; +use ntest::timeout; + +use db_backend::transport::DapTransport; +use db_backend::dap::{self, DapClient, DapMessage, LaunchRequestArguments}; +// use db_backend::dap_types::StackTraceArguments; +use db_backend::task; +use db_backend::lang::Lang; + +#[test] +#[timeout(5_000)] // try to detect hanging, e.g. waiting for response that doesn't come +#[ignore] // ignored by default, as they depend on closed source ct-rr-worker/also not finished setup +fn test_rr() { + let bin = env!("CARGO_BIN_EXE_db-backend"); + let pid = std::process::id() as usize; + let trace_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("rr-trace"); + + let mut child = Command::new(bin) + .arg("dap-server") + .arg("--stdio") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .spawn() + .unwrap(); + + let mut writer = child.stdin.take().unwrap(); + let mut reader = BufReader::new(child.stdout.take().unwrap()); + + let mut client = DapClient::default(); + let init = client.request("initialize", json!({})); + writer + .send(&init) + .unwrap_or_else(|err| panic!("failed to send initialize request: {err}")); + + let launch_args = LaunchRequestArguments { + program: Some("rr_gdb".to_string()), + trace_folder: Some(trace_dir), + trace_file: None, + raw_diff_index: None, + pid: Some(pid as u64), + cwd: None, + no_debug: None, + restart: None, + name: None, + request: None, + typ: None, + session_id: None, + // TODO: env var, or taking from $PATH, or from local or test config? + ct_rr_worker_exe: Some(PathBuf::from("/home/alexander92/codetracer-rr-backend/src/build-debug/bin/ct-rr-worker")), + }; + let launch = client.launch(launch_args).unwrap(); + writer + .send(&launch) + .unwrap_or_else(|err| panic!("failed to send launch request: {err}")); + + + let msg1 = dap::read_dap_message_from_reader(&mut reader).unwrap(); + match msg1 { + DapMessage::Response(r) => { + assert_eq!(r.command, "initialize"); + // assert!(r.body["supportsLoadedSourcesRequest"].as_bool().unwrap()); + assert!(r.body["supportsStepBack"].as_bool().unwrap()); + assert!(r.body["supportsConfigurationDoneRequest"].as_bool().unwrap()); + assert!(r.body["supportsDisassembleRequest"].as_bool().unwrap()); + assert!(r.body["supportsLogPoints"].as_bool().unwrap()); + assert!(r.body["supportsRestartRequest"].as_bool().unwrap()); + } + _ => panic!(), + } + let msg2 = dap::read_dap_message_from_reader(&mut reader).unwrap(); + match msg2 { + DapMessage::Event(e) => assert_eq!(e.event, "initialized"), + _ => panic!(), + } + let conf_done = client.request("configurationDone", json!({})); + writer + .send(&conf_done) + .unwrap_or_else(|err| panic!("failed to send configurationDone request: {err}")); + + let msg3 = dap::read_dap_message_from_reader(&mut reader).unwrap(); + match msg3 { + DapMessage::Response(r) => assert_eq!(r.command, "launch"), + _ => panic!(), + } + let msg4 = dap::read_dap_message_from_reader(&mut reader).unwrap(); + match msg4 { + DapMessage::Response(r) => assert_eq!(r.command, "configurationDone"), + _ => panic!(), + } + + let msg5 = dap::read_dap_message_from_reader(&mut reader).unwrap(); + match msg5 { + DapMessage::Event(e) => { + assert_eq!(e.event, "stopped"); + assert_eq!(e.body["reason"], "entry"); + } + _ => panic!("expected a stopped event, but got {:?}", msg5), + } + + let msg_complete_move = dap::read_dap_message_from_reader(&mut reader).unwrap(); + match msg_complete_move { + DapMessage::Event(e) => { + assert_eq!(e.event, "ct/complete-move"); + let move_state = serde_json::from_value::(e.body).expect("valid move state"); + let path = PathBuf::from(move_state.clone().location.path); + let filename = path.file_name().expect("filename"); + assert_eq!(filename.display().to_string(), "rr_gdb.rs"); + assert_eq!(move_state.location.line, 205); + assert_eq!(move_state.location.function_name.starts_with("rr_gdb::main"), true); + + } + _ => panic!("expected a complete move events, but got {:?}", msg_complete_move), + } + + // next to next line in `main` + let next_request = client.request("next", json!({})); + writer + .send(&next_request) + .unwrap_or_else(|err| panic!("failed to send next request: {err}")); + + // `stepIn` to `run` + let step_in_request = client.request("stepIn", json!({})); + writer + .send(&step_in_request) + .unwrap_or_else(|err| panic!("failed to send stepIn request: {err}")); + + // `next` to next line in `run`: to check a local + writer + .send(&next_request) + .unwrap_or_else(|err| panic!("failed to send next request: {err}")); + + for _ in 0 .. 4 { + let _ = dap::read_dap_message_from_reader(&mut reader).unwrap(); + } + + // let last_location: task::Location; // = task::Location::default(); + + let msg_complete_move_before_local_check = dap::read_dap_message_from_reader(&mut reader).unwrap(); + match msg_complete_move_before_local_check { + DapMessage::Event(e) => { + assert_eq!(e.event, "ct/complete-move"); + let move_state = serde_json::from_value::(e.body).expect("valid move state"); + // last_location = move_state.location.clone(); + let path = PathBuf::from(move_state.clone().location.path); + let filename = path.file_name().expect("filename"); + assert_eq!(filename.display().to_string(), "rr_gdb.rs"); + assert_eq!(move_state.location.line, 70); + assert_eq!(move_state.location.function_name.starts_with("rr_gdb::run"), true); + } + _ => panic!("expected a complete move events, but got {:?}", msg_complete_move_before_local_check), + + } + let _next_response = dap::read_dap_message_from_reader(&mut reader).unwrap(); + + let _next_response = dap::read_dap_message_from_reader(&mut reader).unwrap(); + + let load_locals_request = client.request("ct/load-locals", serde_json::to_value(&task::CtLoadLocalsArguments { + count_budget: 3_000, + min_count_limit: 50, + rr_ticks: 0, + lang: Lang::Rust, + }).unwrap()); + writer + .send(&load_locals_request) + .unwrap_or_else(|err| panic!("failed to send ct/load-locals request: {err}")); + + let load_locals_response = dap::read_dap_message_from_reader(&mut reader).unwrap(); + if let DapMessage::Response(response) = load_locals_response { + assert_eq!(response.command, "ct/load-locals"); + println!("{:?}", response.body); + let variables = serde_json::from_value::(response.body) + .expect("valid local response") + .locals; + assert_eq!(variables[0].expression, "i"); + assert_eq!(variables[0].value.typ.lang_type, "i64"); //? + assert_eq!(variables[0].value.i, "0"); + } + + // let threads_request = client.request("threads", json!({})); + // writer + // .send(&threads_request) + // .unwrap_or_else(|err| panic!("failed to send threads request: {err}")); + // let msg_threads = dap::from_reader(&mut reader).unwrap(); + // match msg_threads { + // DapMessage::Response(r) => { + // assert_eq!(r.command, "threads"); + // assert_eq!(r.body["threads"][0]["id"], 1); + // } + // _ => panic!( + // "expected a Response DapMessage after a threads request, but got {:?}", + // msg_threads + // ), + // } + + // let stack_trace_request = client.request( + // "stackTrace", + // serde_json::to_value(StackTraceArguments { + // thread_id: 1, + // format: None, + // levels: None, + // start_frame: None, + // }) + // .unwrap(), + // ); + // writer + // .send(&stack_trace_request) + // .unwrap_or_else(|err| panic!("failed to send stackTrace request: {err}")); + // let msg_stack_trace = dap::from_reader(&mut reader).unwrap(); + // match msg_stack_trace { + // DapMessage::Response(r) => assert_eq!(r.command, "stackTrace"), // TODO: test stackFrames / totalFrames ? + // _ => panic!(), + // } + + drop(writer); + drop(reader); + let _ = child.wait().unwrap(); +} diff --git a/src/db-backend/transport_spec.md b/src/db-backend/transport_spec.md new file mode 100644 index 000000000..7782cddb5 --- /dev/null +++ b/src/db-backend/transport_spec.md @@ -0,0 +1,58 @@ +# DB-Backend Transport Specification + +## 1. Introduction + +This document specifies the transport mechanism used by the `db-backend` component for communication with external clients, primarily debuggers or development environments. The transport layer facilitates the exchange of messages conforming to the Debug Adapter Protocol (DAP). + +## 2. Protocol Overview + +The `db-backend` utilizes the Debug Adapter Protocol (DAP) as its primary communication protocol. DAP defines a generic protocol for debuggers and development environments to communicate with debug adapters. This specification focuses on how these DAP messages are transmitted over a communication channel. + +## 3. Message Structure + +DAP messages are exchanged using a simple, length-prefixed JSON format. Each message consists of two parts: + +1. **Header**: A set of HTTP-like headers, terminated by a `\r\n\r\n` sequence. The most crucial header is `Content-Length`, which indicates the size of the following JSON payload in bytes. + ```/dev/null/example.txt#L1-2 + Content-Length: 123 + Content-Type: application/json + ``` + The `Content-Type` header is optional but recommended. If present, its value must be `application/json`. + +2. **Content**: The actual DAP message, which is a JSON object encoded in UTF-8. The size of this content must exactly match the `Content-Length` specified in the header. + +Example of a complete message: +```/dev/null/example.txt#L1-5 +Content-Length: 72 +Content-Type: application/json + +{"seq":1, "type":"request", "command":"initialize", "arguments":{"adapterID":"db"}} +``` + +## 4. Transport Layer + +The `db-backend` primarily uses **Standard I/O (stdin/stdout)** for its transport layer. + +* **Input (stdin)**: The `db-backend` reads incoming DAP messages from its standard input stream. +* **Output (stdout)**: The `db-backend` writes outgoing DAP messages to its standard output stream. + +Each message (header + content) is transmitted as a contiguous block of bytes. There should be no additional delimiters or framing between messages beyond the `\r\n\r\n` separator between the header and the content. + +## 5. Error Handling + +### 5.1. Malformed Messages + +If the `db-backend` receives a message that does not conform to the specified header and content format (e.g., missing `Content-Length`, invalid `Content-Length`, or non-JSON content), it should: + +* Attempt to log the error internally (if a logging mechanism is available). +* Discard the malformed message. +* Continue processing subsequent messages, if possible. +* It should **not** send an error response over the DAP channel for transport-layer parsing errors, as the message might be too corrupted to respond to meaningfully. + +### 5.2. Protocol Errors (DAP Level) + +Errors within the DAP message content (e.g., unknown command, invalid arguments for a command) should be handled according to the DAP specification, typically by sending a `response` message with the `success` field set to `false` and an appropriate `message` and `body.error` field. + +### 5.3. Transport Failures + +If the standard I/O streams are closed unexpectedly or encounter read/write errors, the `db-backend` should terminate gracefully, logging the nature of the transport failure. \ No newline at end of file diff --git a/src/frontend/index/traces.nim b/src/frontend/index/traces.nim index 8a5d470bb..a760d417c 100644 --- a/src/frontend/index/traces.nim +++ b/src/frontend/index/traces.nim @@ -140,7 +140,7 @@ proc prepareForLoadingTrace*(traceId: int, pid: int) {.async.} = let packet = wrapJsonForSending js{ "type": cstring"request", "command": cstring"ct/start-replay", - "arguments": [cstring(dbBackendExe), cstring"dap-server"] + "arguments": [cstring(dbBackendExe), cstring"dap-server"], } backendManagerSocket.write(packet) diff --git a/src/frontend/middleware.nim b/src/frontend/middleware.nim index 0fa4b23cf..3cf348b8c 100644 --- a/src/frontend/middleware.nim +++ b/src/frontend/middleware.nim @@ -16,6 +16,7 @@ when not defined(ctInExtension): data.dapApi.sendCtRequest(DapLaunch, js{ traceFolder: data.trace.outputFolder, rawDiffIndex: data.startOptions.rawDiffIndex, + ctRRWorkerExe: data.config.rrBackend.ctRRWorkerExe, }) proc newOperationHandler*(viewsApi: MediatorWithSubscribers, operation: NewOperation) = diff --git a/src/frontend/styles/components/data_tables.styl b/src/frontend/styles/components/data_tables.styl index 33379f4ce..87d1c9d62 100644 --- a/src/frontend/styles/components/data_tables.styl +++ b/src/frontend/styles/components/data_tables.styl @@ -63,7 +63,7 @@ display: none table - min-width: fit-content !important + // min-width: fit-content !important max-width: -webkit-fill-available display: block diff --git a/src/frontend/types.nim b/src/frontend/types.nim index 55f8a7dad..f77048490 100644 --- a/src/frontend/types.nim +++ b/src/frontend/types.nim @@ -1525,6 +1525,14 @@ type showMinimap*: bool traceSharing*: TraceSharingConfigObj skipInstall*: bool + rrBackend*: RRBackendConfig + + RRBackendConfig* = ref object + enabled*: bool + path*: cstring + ctPaths*: cstring + debugInfoToolPath*: cstring + ctRRWorkerExe*: cstring BreakpointSave* = ref object of js # Serialized breakpoint diff --git a/src/frontend/ui/state.nim b/src/frontend/ui/state.nim index a7c7b5af2..d41c59a36 100644 --- a/src/frontend/ui/state.nim +++ b/src/frontend/ui/state.nim @@ -71,6 +71,7 @@ method onMove(self: StateComponent) {.async.} = rrTicks: self.rrTicks, countBudget: countBudget, minCountLimit: minCountLimit, + lang: toLangFromFilename(self.location.path), ) self.api.emit(CtLoadLocals, arguments) self.redraw()