diff --git a/collector/compile-benchmarks/README.md b/collector/compile-benchmarks/README.md index 58c322ba4..0ad8e071a 100644 --- a/collector/compile-benchmarks/README.md +++ b/collector/compile-benchmarks/README.md @@ -61,8 +61,6 @@ They mostly consist of real-world crates. - **stm32f4-0.14.0**: A crate that has many thousands of blanket impl blocks. It uses cargo features to enable large portions of its structure and is built with `--features=stm32f410` to have faster benchmarking times. -- **syn-1.0.89**: A library for parsing Rust code. An important part of the Rust - ecosystem. - **syn-2.0.101**: A library for parsing Rust code. An important part of the Rust ecosystem. - **typenum-1.18.0**: A library that encodes integer computation within the trait system. Serves as diff --git a/collector/compile-benchmarks/REUSE.toml b/collector/compile-benchmarks/REUSE.toml index 24f33140b..1cf0f20cd 100644 --- a/collector/compile-benchmarks/REUSE.toml +++ b/collector/compile-benchmarks/REUSE.toml @@ -265,11 +265,6 @@ path = "syn/**" SPDX-FileCopyrightText = "syn contributors" SPDX-License-Identifier = "MIT OR Apache-2.0" -[[annotations]] -path = "syn-1.0.89/**" -SPDX-FileCopyrightText = "syn contributors" -SPDX-License-Identifier = "MIT OR Apache-2.0" - [[annotations]] path = "syn-2.0.101/**" SPDX-FileCopyrightText = "syn contributors" diff --git a/collector/compile-benchmarks/syn-1.0.89/.cargo_vcs_info.json b/collector/compile-benchmarks/syn-1.0.89/.cargo_vcs_info.json deleted file mode 100644 index 08e125562..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/.cargo_vcs_info.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "git": { - "sha1": "8d4eb4c597389b023b779621d8879641621d44e5" - }, - "path_in_vcs": "" -} \ No newline at end of file diff --git a/collector/compile-benchmarks/syn-1.0.89/0-println.patch b/collector/compile-benchmarks/syn-1.0.89/0-println.patch deleted file mode 100644 index f1b2644fe..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/0-println.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/src/lifetime.rs b/src/lifetime.rs -index 5dc1753a..61f28dde 100644 ---- a/src/lifetime.rs -+++ b/src/lifetime.rs -@@ -37,6 +37,7 @@ impl Lifetime { - /// # } - /// ``` - pub fn new(symbol: &str, span: Span) -> Self { -+ println!("testing"); - if !symbol.starts_with('\'') { - panic!( - "lifetime name must start with apostrophe as in \"'a\", got {:?}", diff --git a/collector/compile-benchmarks/syn-1.0.89/Cargo.lock b/collector/compile-benchmarks/syn-1.0.89/Cargo.lock deleted file mode 100644 index c23062799..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/Cargo.lock +++ /dev/null @@ -1,1284 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "aho-corasick" -version = "0.7.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" -dependencies = [ - "memchr", -] - -[[package]] -name = "anyhow" -version = "1.0.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4361135be9122e0870de935d7c439aef945b9f9ddd4199a553b5270b49c82a27" - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "automod" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27989085bc1962f5d8ad5c2ab819a603b5096590c38a03162443d8a8bc307d0c" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "base64" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bumpalo" -version = "3.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" - -[[package]] -name = "bytes" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" - -[[package]] -name = "cc" -version = "1.0.73" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "console" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28b32d32ca44b70c3e4acd7db1babf555fa026e385fb95f18028f88848b3c31" -dependencies = [ - "encode_unicode", - "libc", - "once_cell", - "terminal_size", - "winapi", -] - -[[package]] -name = "core-foundation" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" - -[[package]] -name = "crc32fast" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crossbeam-channel" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53" -dependencies = [ - "cfg-if", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" -dependencies = [ - "cfg-if", - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c" -dependencies = [ - "autocfg", - "cfg-if", - "crossbeam-utils", - "lazy_static", - "memoffset", - "scopeguard", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" -dependencies = [ - "cfg-if", - "lazy_static", -] - -[[package]] -name = "either" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" - -[[package]] -name = "encode_unicode" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" - -[[package]] -name = "encoding_rs" -version = "0.8.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dc8abb250ffdda33912550faa54c88ec8b998dec0b2c55ab224921ce11df" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "fastrand" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" -dependencies = [ - "instant", -] - -[[package]] -name = "filetime" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "winapi", -] - -[[package]] -name = "flate2" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" -dependencies = [ - "cfg-if", - "crc32fast", - "libc", - "miniz_oxide", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - -[[package]] -name = "form_urlencoded" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" -dependencies = [ - "matches", - "percent-encoding", -] - -[[package]] -name = "futures-channel" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" -dependencies = [ - "futures-core", -] - -[[package]] -name = "futures-core" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" - -[[package]] -name = "futures-io" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" - -[[package]] -name = "futures-sink" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" - -[[package]] -name = "futures-task" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" - -[[package]] -name = "futures-util" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" -dependencies = [ - "futures-core", - "futures-io", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "h2" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62eeb471aa3e3c9197aa4bfeabfe02982f6dc96f750486c0bb0009ac58b26d2b" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http", - "indexmap", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" - -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "http" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f4c6746584866f0feabcc69893c5b51beef3831656a968ed7ae254cdc4fd03" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http-body" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" -dependencies = [ - "bytes", - "http", - "pin-project-lite", -] - -[[package]] -name = "httparse" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9100414882e15fb7feccb4897e5f0ff0ff1ca7d1a86a23208ada4d7a18e6c6c4" - -[[package]] -name = "httpdate" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" - -[[package]] -name = "hyper" -version = "0.14.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b26ae0a80afebe130861d90abf98e3814a4f28a4c6ffeb5ab8ebb2be311e0ef2" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "h2", - "http", - "http-body", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", - "want", -] - -[[package]] -name = "hyper-tls" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" -dependencies = [ - "bytes", - "hyper", - "native-tls", - "tokio", - "tokio-native-tls", -] - -[[package]] -name = "idna" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" -dependencies = [ - "matches", - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "indexmap" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" -dependencies = [ - "autocfg", - "hashbrown", -] - -[[package]] -name = "insta" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a7e1911532a662f6b08b68f884080850f2fd9544963c3ab23a5af42bda1eac" -dependencies = [ - "console", - "once_cell", - "serde", - "serde_json", - "serde_yaml", - "similar", -] - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "ipnet" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e70ee094dc02fd9c13fdad4940090f22dbd6ac7c9e7094a46cf0232a50bc7c" - -[[package]] -name = "itoa" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" - -[[package]] -name = "js-sys" -version = "0.3.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "libc" -version = "0.2.121" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efaa7b300f3b5fe8eb6bf21ce3895e1751d9665086af2d64b42f19701015ff4f" - -[[package]] -name = "linked-hash-map" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" - -[[package]] -name = "log" -version = "0.4.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6389c490849ff5bc16be905ae24bc913a9c8892e19b2341dbc175e14c341c2b8" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "matches" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" - -[[package]] -name = "memchr" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" - -[[package]] -name = "memoffset" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" -dependencies = [ - "autocfg", -] - -[[package]] -name = "mime" -version = "0.3.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" - -[[package]] -name = "miniz_oxide" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" -dependencies = [ - "adler", - "autocfg", -] - -[[package]] -name = "mio" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52da4364ffb0e4fe33a9841a98a3f3014fb964045ce4f7a45a398243c8d6b0c9" -dependencies = [ - "libc", - "log", - "miow", - "ntapi", - "wasi", - "winapi", -] - -[[package]] -name = "miow" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" -dependencies = [ - "winapi", -] - -[[package]] -name = "native-tls" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48ba9f7719b5a0f42f338907614285fb5fd70e53858141f69898a1fb7203b24d" -dependencies = [ - "lazy_static", - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - -[[package]] -name = "ntapi" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28774a7fd2fbb4f0babd8237ce554b73af68021b5f695a3cebd6c59bac0980f" -dependencies = [ - "winapi", -] - -[[package]] -name = "num_cpus" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "once_cell" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9" - -[[package]] -name = "openssl" -version = "0.10.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7ae222234c30df141154f159066c5093ff73b63204dcda7121eb082fc56a95" -dependencies = [ - "bitflags", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-sys", -] - -[[package]] -name = "openssl-probe" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" - -[[package]] -name = "openssl-sys" -version = "0.9.72" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e46109c383602735fa0a2e48dd2b7c892b048e1bf69e5c3b1d804b7d9c203cb" -dependencies = [ - "autocfg", - "cc", - "libc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "percent-encoding" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" - -[[package]] -name = "pin-project-lite" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "pkg-config" -version = "0.3.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58893f751c9b0412871a09abd62ecd2a00298c6c83befa223ef98c52aef40cbe" - -[[package]] -name = "proc-macro2" -version = "1.0.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" -dependencies = [ - "unicode-xid", -] - -[[package]] -name = "quote" -version = "1.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4af2ec4714533fcdf07e886f17025ace8b997b9ce51204ee69b6da831c3da57" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rayon" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" -dependencies = [ - "autocfg", - "crossbeam-deque", - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" -dependencies = [ - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-utils", - "lazy_static", - "num_cpus", -] - -[[package]] -name = "redox_syscall" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8380fe0152551244f0747b1bf41737e0f8a74f97a14ccefd1148187271634f3c" -dependencies = [ - "bitflags", -] - -[[package]] -name = "ref-cast" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "300f2a835d808734ee295d45007adacb9ebb29dd3ae2424acfa17930cae541da" -dependencies = [ - "ref-cast-impl", -] - -[[package]] -name = "ref-cast-impl" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c38e3aecd2b21cb3959637b883bb3714bc7e43f0268b9a29d3743ee3e55cdd2" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "regex" -version = "1.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.6.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" - -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] - -[[package]] -name = "reqwest" -version = "0.11.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46a1f7aa4f35e5e8b4160449f51afc758f0ce6454315a9fa7d0d113e958c41eb" -dependencies = [ - "base64", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2", - "http", - "http-body", - "hyper", - "hyper-tls", - "ipnet", - "js-sys", - "lazy_static", - "log", - "mime", - "native-tls", - "percent-encoding", - "pin-project-lite", - "serde", - "serde_json", - "serde_urlencoded", - "tokio", - "tokio-native-tls", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "winreg", -] - -[[package]] -name = "ryu" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "schannel" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75" -dependencies = [ - "lazy_static", - "winapi", -] - -[[package]] -name = "scopeguard" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" - -[[package]] -name = "security-framework" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dc14f172faf8a0194a3aded622712b0de276821addc574fa54fc0a1167e10dc" -dependencies = [ - "bitflags", - "core-foundation", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0160a13a177a45bfb43ce71c01580998474f556ad854dcbca936dd2841a5c556" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "serde" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "serde_json" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "serde_yaml" -version = "0.8.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a521f2940385c165a24ee286aa8599633d162077a54bdcae2a6fd5a7bfa7a0" -dependencies = [ - "indexmap", - "ryu", - "serde", - "yaml-rust", -] - -[[package]] -name = "similar" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e24979f63a11545f5f2c60141afe249d4f19f84581ea2138065e400941d83d3" - -[[package]] -name = "slab" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" - -[[package]] -name = "socket2" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "syn" -version = "1.0.89" -dependencies = [ - "anyhow", - "automod", - "flate2", - "insta", - "proc-macro2", - "quote", - "rayon", - "ref-cast", - "regex", - "reqwest", - "syn-test-suite", - "tar", - "termcolor", - "unicode-xid", - "walkdir", -] - -[[package]] -name = "syn" -version = "1.0.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea297be220d52398dcc07ce15a209fce436d361735ac1db700cab3b6cdfb9f54" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "syn-test-suite" -version = "0.0.0+test" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0d661992f60e67c8bdd9a7d6360d30d1301f5783abf7d59933844f656762eb5" - -[[package]] -name = "tar" -version = "0.4.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b55807c0344e1e6c04d7c965f5289c39a8d94ae23ed5c0b57aabac549f871c6" -dependencies = [ - "filetime", - "libc", - "xattr", -] - -[[package]] -name = "tempfile" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" -dependencies = [ - "cfg-if", - "fastrand", - "libc", - "redox_syscall", - "remove_dir_all", - "winapi", -] - -[[package]] -name = "termcolor" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "terminal_size" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "tinyvec" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" - -[[package]] -name = "tokio" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2af73ac49756f3f7c01172e34a23e5d0216f6c32333757c2c61feb2bbff5a5ee" -dependencies = [ - "bytes", - "libc", - "memchr", - "mio", - "num_cpus", - "pin-project-lite", - "socket2", - "winapi", -] - -[[package]] -name = "tokio-native-tls" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b" -dependencies = [ - "native-tls", - "tokio", -] - -[[package]] -name = "tokio-util" -version = "0.6.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0" -dependencies = [ - "bytes", - "futures-core", - "futures-sink", - "log", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "tower-service" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" - -[[package]] -name = "tracing" -version = "0.1.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a1bdf54a7c28a2bbf701e1d2233f6c77f473486b94bee4f9678da5a148dca7f" -dependencies = [ - "cfg-if", - "pin-project-lite", - "tracing-core", -] - -[[package]] -name = "tracing-core" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa31669fa42c09c34d94d8165dd2012e8ff3c66aca50f3bb226b68f216f2706c" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "try-lock" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" - -[[package]] -name = "unicode-bidi" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" - -[[package]] -name = "unicode-normalization" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-xid" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" - -[[package]] -name = "url" -version = "2.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" -dependencies = [ - "form_urlencoded", - "idna", - "matches", - "percent-encoding", -] - -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - -[[package]] -name = "walkdir" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" -dependencies = [ - "same-file", - "winapi", - "winapi-util", -] - -[[package]] -name = "want" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" -dependencies = [ - "log", - "try-lock", -] - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasm-bindgen" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" -dependencies = [ - "cfg-if", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" -dependencies = [ - "bumpalo", - "lazy_static", - "log", - "proc-macro2", - "quote", - "syn 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eb6ec270a31b1d3c7e266b999739109abce8b6c87e4b31fcfcd788b65267395" -dependencies = [ - "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" - -[[package]] -name = "web-sys" -version = "0.3.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -dependencies = [ - "winapi", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "winreg" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" -dependencies = [ - "winapi", -] - -[[package]] -name = "xattr" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "244c3741f4240ef46274860397c7c74e50eb23624996930e484c16679633a54c" -dependencies = [ - "libc", -] - -[[package]] -name = "yaml-rust" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" -dependencies = [ - "linked-hash-map", -] diff --git a/collector/compile-benchmarks/syn-1.0.89/Cargo.toml b/collector/compile-benchmarks/syn-1.0.89/Cargo.toml deleted file mode 100644 index 6afef27e2..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/Cargo.toml +++ /dev/null @@ -1,139 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -edition = "2018" -rust-version = "1.31" -name = "syn" -version = "1.0.89" -authors = ["David Tolnay "] -include = [ - "/benches/**", - "/build.rs", - "/Cargo.toml", - "/LICENSE-APACHE", - "/LICENSE-MIT", - "/README.md", - "/src/**", - "/tests/**", -] -description = "Parser for Rust source code" -documentation = "https://docs.rs/syn" -readme = "README.md" -categories = ["development-tools::procedural-macro-helpers"] -license = "MIT OR Apache-2.0" -repository = "https://github.com/dtolnay/syn" - -[package.metadata.docs.rs] -all-features = true -targets = ["x86_64-unknown-linux-gnu"] -rustdoc-args = [ - "--cfg", - "doc_cfg", -] - -[package.metadata.playground] -features = [ - "full", - "visit", - "visit-mut", - "fold", - "extra-traits", -] - -[[bench]] -name = "rust" -harness = false -required-features = [ - "full", - "parsing", -] - -[[bench]] -name = "file" -required-features = [ - "full", - "parsing", -] - -[dependencies.proc-macro2] -version = "1.0.32" -default-features = false - -[dependencies.quote] -version = "1.0" -optional = true -default-features = false - -[dependencies.unicode-xid] -version = "0.2" - -[dev-dependencies.anyhow] -version = "1.0" - -[dev-dependencies.automod] -version = "1.0" - -[dev-dependencies.flate2] -version = "1.0" - -[dev-dependencies.insta] -version = "1.0" - -[dev-dependencies.rayon] -version = "1.0" - -[dev-dependencies.ref-cast] -version = "1.0" - -[dev-dependencies.regex] -version = "1.0" - -[dev-dependencies.reqwest] -version = "0.11" -features = ["blocking"] - -[dev-dependencies.syn-test-suite] -version = "0" - -[dev-dependencies.tar] -version = "0.4.16" - -[dev-dependencies.termcolor] -version = "1.0" - -[dev-dependencies.walkdir] -version = "2.1" - -[features] -clone-impls = [] -default = [ - "derive", - "parsing", - "printing", - "clone-impls", - "proc-macro", -] -derive = [] -extra-traits = [] -fold = [] -full = [] -parsing = [] -printing = ["quote"] -proc-macro = [ - "proc-macro2/proc-macro", - "quote/proc-macro", -] -test = ["syn-test-suite/all-features"] -visit = [] -visit-mut = [] - -[workspace] diff --git a/collector/compile-benchmarks/syn-1.0.89/Cargo.toml.orig b/collector/compile-benchmarks/syn-1.0.89/Cargo.toml.orig deleted file mode 100644 index 0482dd68e..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/Cargo.toml.orig +++ /dev/null @@ -1,75 +0,0 @@ -[package] -name = "syn" -version = "1.0.89" # don't forget to update html_root_url and syn.json -authors = ["David Tolnay "] -license = "MIT OR Apache-2.0" -description = "Parser for Rust source code" -repository = "https://github.com/dtolnay/syn" -documentation = "https://docs.rs/syn" -categories = ["development-tools::procedural-macro-helpers"] -readme = "README.md" -include = [ - "/benches/**", - "/build.rs", - "/Cargo.toml", - "/LICENSE-APACHE", - "/LICENSE-MIT", - "/README.md", - "/src/**", - "/tests/**", -] -edition = "2018" -rust-version = "1.31" - -[features] -default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"] -derive = [] -full = [] -parsing = [] -printing = ["quote"] -visit = [] -visit-mut = [] -fold = [] -clone-impls = [] -extra-traits = [] -proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"] -test = ["syn-test-suite/all-features"] - -[dependencies] -proc-macro2 = { version = "1.0.32", default-features = false } -quote = { version = "1.0", optional = true, default-features = false } -unicode-xid = "0.2" - -[dev-dependencies] -anyhow = "1.0" -automod = "1.0" -flate2 = "1.0" -insta = "1.0" -rayon = "1.0" -ref-cast = "1.0" -regex = "1.0" -reqwest = { version = "0.11", features = ["blocking"] } -syn-test-suite = { version = "0", path = "tests/features" } -tar = "0.4.16" -termcolor = "1.0" -walkdir = "2.1" - -[[bench]] -name = "rust" -harness = false -required-features = ["full", "parsing"] - -[[bench]] -name = "file" -required-features = ["full", "parsing"] - -[package.metadata.docs.rs] -all-features = true -targets = ["x86_64-unknown-linux-gnu"] -rustdoc-args = ["--cfg", "doc_cfg"] - -[package.metadata.playground] -features = ["full", "visit", "visit-mut", "fold", "extra-traits"] - -[workspace] -members = ["dev", "json", "tests/crates", "tests/features"] diff --git a/collector/compile-benchmarks/syn-1.0.89/LICENSE-APACHE b/collector/compile-benchmarks/syn-1.0.89/LICENSE-APACHE deleted file mode 100644 index 16fe87b06..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/collector/compile-benchmarks/syn-1.0.89/LICENSE-MIT b/collector/compile-benchmarks/syn-1.0.89/LICENSE-MIT deleted file mode 100644 index 31aa79387..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/LICENSE-MIT +++ /dev/null @@ -1,23 +0,0 @@ -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/collector/compile-benchmarks/syn-1.0.89/README.md b/collector/compile-benchmarks/syn-1.0.89/README.md deleted file mode 100644 index 38005f5e5..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/README.md +++ /dev/null @@ -1,285 +0,0 @@ -Parser for Rust source code -=========================== - -[github](https://github.com/dtolnay/syn) -[crates.io](https://crates.io/crates/syn) -[docs.rs](https://docs.rs/syn) -[build status](https://github.com/dtolnay/syn/actions?query=branch%3Amaster) - -Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree -of Rust source code. - -Currently this library is geared toward use in Rust procedural macros, but -contains some APIs that may be useful more generally. - -- **Data structures** — Syn provides a complete syntax tree that can represent - any valid Rust source code. The syntax tree is rooted at [`syn::File`] which - represents a full source file, but there are other entry points that may be - useful to procedural macros including [`syn::Item`], [`syn::Expr`] and - [`syn::Type`]. - -- **Derives** — Of particular interest to derive macros is [`syn::DeriveInput`] - which is any of the three legal input items to a derive macro. An example - below shows using this type in a library that can derive implementations of a - user-defined trait. - -- **Parsing** — Parsing in Syn is built around [parser functions] with the - signature `fn(ParseStream) -> Result`. Every syntax tree node defined by - Syn is individually parsable and may be used as a building block for custom - syntaxes, or you may dream up your own brand new syntax without involving any - of our syntax tree types. - -- **Location information** — Every token parsed by Syn is associated with a - `Span` that tracks line and column information back to the source of that - token. These spans allow a procedural macro to display detailed error messages - pointing to all the right places in the user's code. There is an example of - this below. - -- **Feature flags** — Functionality is aggressively feature gated so your - procedural macros enable only what they need, and do not pay in compile time - for all the rest. - -[`syn::File`]: https://docs.rs/syn/1.0/syn/struct.File.html -[`syn::Item`]: https://docs.rs/syn/1.0/syn/enum.Item.html -[`syn::Expr`]: https://docs.rs/syn/1.0/syn/enum.Expr.html -[`syn::Type`]: https://docs.rs/syn/1.0/syn/enum.Type.html -[`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html -[parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html - -*Version requirement: Syn supports rustc 1.31 and up.* - -[*Release notes*](https://github.com/dtolnay/syn/releases) - -
- -## Resources - -The best way to learn about procedural macros is by writing some. Consider -working through [this procedural macro workshop][workshop] to get familiar with -the different types of procedural macros. The workshop contains relevant links -into the Syn documentation as you work through each project. - -[workshop]: https://github.com/dtolnay/proc-macro-workshop - -
- -## Example of a derive macro - -The canonical derive macro using Syn looks like this. We write an ordinary Rust -function tagged with a `proc_macro_derive` attribute and the name of the trait -we are deriving. Any time that derive appears in the user's code, the Rust -compiler passes their data structure as tokens into our macro. We get to execute -arbitrary Rust code to figure out what to do with those tokens, then hand some -tokens back to the compiler to compile into the user's crate. - -[`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html - -```toml -[dependencies] -syn = "1.0" -quote = "1.0" - -[lib] -proc-macro = true -``` - -```rust -use proc_macro::TokenStream; -use quote::quote; -use syn::{parse_macro_input, DeriveInput}; - -#[proc_macro_derive(MyMacro)] -pub fn my_macro(input: TokenStream) -> TokenStream { - // Parse the input tokens into a syntax tree - let input = parse_macro_input!(input as DeriveInput); - - // Build the output, possibly using quasi-quotation - let expanded = quote! { - // ... - }; - - // Hand the output tokens back to the compiler - TokenStream::from(expanded) -} -``` - -The [`heapsize`] example directory shows a complete working implementation of a -derive macro. It works on any Rust compiler 1.31+. The example derives a -`HeapSize` trait which computes an estimate of the amount of heap memory owned -by a value. - -[`heapsize`]: examples/heapsize - -```rust -pub trait HeapSize { - /// Total number of bytes of heap memory owned by `self`. - fn heap_size_of_children(&self) -> usize; -} -``` - -The derive macro allows users to write `#[derive(HeapSize)]` on data structures -in their program. - -```rust -#[derive(HeapSize)] -struct Demo<'a, T: ?Sized> { - a: Box, - b: u8, - c: &'a str, - d: String, -} -``` - -
- -## Spans and error reporting - -The token-based procedural macro API provides great control over where the -compiler's error messages are displayed in user code. Consider the error the -user sees if one of their field types does not implement `HeapSize`. - -```rust -#[derive(HeapSize)] -struct Broken { - ok: String, - bad: std::thread::Thread, -} -``` - -By tracking span information all the way through the expansion of a procedural -macro as shown in the `heapsize` example, token-based macros in Syn are able to -trigger errors that directly pinpoint the source of the problem. - -```console -error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied - --> src/main.rs:7:5 - | -7 | bad: std::thread::Thread, - | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `std::thread::Thread` -``` - -
- -## Parsing a custom syntax - -The [`lazy-static`] example directory shows the implementation of a -`functionlike!(...)` procedural macro in which the input tokens are parsed using -Syn's parsing API. - -[`lazy-static`]: examples/lazy-static - -The example reimplements the popular `lazy_static` crate from crates.io as a -procedural macro. - -```rust -lazy_static! { - static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap(); -} -``` - -The implementation shows how to trigger custom warnings and error messages on -the macro input. - -```console -warning: come on, pick a more creative name - --> src/main.rs:10:16 - | -10 | static ref FOO: String = "lazy_static".to_owned(); - | ^^^ -``` - -
- -## Testing - -When testing macros, we often care not just that the macro can be used -successfully but also that when the macro is provided with invalid input it -produces maximally helpful error messages. Consider using the [`trybuild`] crate -to write tests for errors that are emitted by your macro or errors detected by -the Rust compiler in the expanded code following misuse of the macro. Such tests -help avoid regressions from later refactors that mistakenly make an error no -longer trigger or be less helpful than it used to be. - -[`trybuild`]: https://github.com/dtolnay/trybuild - -
- -## Debugging - -When developing a procedural macro it can be helpful to look at what the -generated code looks like. Use `cargo rustc -- -Zunstable-options ---pretty=expanded` or the [`cargo expand`] subcommand. - -[`cargo expand`]: https://github.com/dtolnay/cargo-expand - -To show the expanded code for some crate that uses your procedural macro, run -`cargo expand` from that crate. To show the expanded code for one of your own -test cases, run `cargo expand --test the_test_case` where the last argument is -the name of the test file without the `.rs` extension. - -This write-up by Brandon W Maister discusses debugging in more detail: -[Debugging Rust's new Custom Derive system][debugging]. - -[debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/ - -
- -## Optional features - -Syn puts a lot of functionality behind optional features in order to optimize -compile time for the most common use cases. The following features are -available. - -- **`derive`** *(enabled by default)* — Data structures for representing the - possible input to a derive macro, including structs and enums and types. -- **`full`** — Data structures for representing the syntax tree of all valid - Rust source code, including items and expressions. -- **`parsing`** *(enabled by default)* — Ability to parse input tokens into a - syntax tree node of a chosen type. -- **`printing`** *(enabled by default)* — Ability to print a syntax tree node as - tokens of Rust source code. -- **`visit`** — Trait for traversing a syntax tree. -- **`visit-mut`** — Trait for traversing and mutating in place a syntax tree. -- **`fold`** — Trait for transforming an owned syntax tree. -- **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree - types. -- **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree - types. -- **`proc-macro`** *(enabled by default)* — Runtime dependency on the dynamic - library libproc_macro from rustc toolchain. - -
- -## Proc macro shim - -Syn operates on the token representation provided by the [proc-macro2] crate -from crates.io rather than using the compiler's built in proc-macro crate -directly. This enables code using Syn to execute outside of the context of a -procedural macro, such as in unit tests or build.rs, and we avoid needing -incompatible ecosystems for proc macros vs non-macro use cases. - -In general all of your code should be written against proc-macro2 rather than -proc-macro. The one exception is in the signatures of procedural macro entry -points, which are required by the language to use `proc_macro::TokenStream`. - -The proc-macro2 crate will automatically detect and use the compiler's data -structures when a procedural macro is active. - -[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/ - -
- -#### License - - -Licensed under either of Apache License, Version -2.0 or MIT license at your option. - - -
- - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in this crate by you, as defined in the Apache-2.0 license, shall -be dual licensed as above, without any additional terms or conditions. - diff --git a/collector/compile-benchmarks/syn-1.0.89/benches/file.rs b/collector/compile-benchmarks/syn-1.0.89/benches/file.rs deleted file mode 100644 index 86204df2d..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/benches/file.rs +++ /dev/null @@ -1,31 +0,0 @@ -// $ cargo bench --features full --bench file - -#![feature(rustc_private, test)] -#![recursion_limit = "1024"] -#![allow(clippy::missing_panics_doc, clippy::must_use_candidate)] - -extern crate test; - -#[macro_use] -#[path = "../tests/macros/mod.rs"] -mod macros; - -#[path = "../tests/common/mod.rs"] -mod common; -#[path = "../tests/repo/mod.rs"] -pub mod repo; - -use proc_macro2::TokenStream; -use std::fs; -use std::str::FromStr; -use test::Bencher; - -const FILE: &str = "tests/rust/library/core/src/str/mod.rs"; - -#[bench] -fn parse_file(b: &mut Bencher) { - repo::clone_rust(); - let content = fs::read_to_string(FILE).unwrap(); - let tokens = TokenStream::from_str(&content).unwrap(); - b.iter(|| syn::parse2::(tokens.clone())); -} diff --git a/collector/compile-benchmarks/syn-1.0.89/benches/rust.rs b/collector/compile-benchmarks/syn-1.0.89/benches/rust.rs deleted file mode 100644 index 1b1143d20..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/benches/rust.rs +++ /dev/null @@ -1,159 +0,0 @@ -// $ cargo bench --features full --bench rust -// -// Syn only, useful for profiling: -// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust - -#![cfg_attr(not(syn_only), feature(rustc_private))] -#![recursion_limit = "1024"] -#![allow(clippy::cast_lossless, clippy::unnecessary_wraps)] - -#[macro_use] -#[path = "../tests/macros/mod.rs"] -mod macros; - -#[path = "../tests/common/mod.rs"] -mod common; -#[path = "../tests/repo/mod.rs"] -mod repo; - -use std::fs; -use std::time::{Duration, Instant}; - -#[cfg(not(syn_only))] -mod tokenstream_parse { - use proc_macro2::TokenStream; - use std::str::FromStr; - - pub fn bench(content: &str) -> Result<(), ()> { - TokenStream::from_str(content).map(drop).map_err(drop) - } -} - -mod syn_parse { - pub fn bench(content: &str) -> Result<(), ()> { - syn::parse_file(content).map(drop).map_err(drop) - } -} - -#[cfg(not(syn_only))] -mod librustc_parse { - extern crate rustc_data_structures; - extern crate rustc_errors; - extern crate rustc_parse; - extern crate rustc_session; - extern crate rustc_span; - - use rustc_data_structures::sync::Lrc; - use rustc_errors::{emitter::Emitter, Diagnostic, Handler}; - use rustc_session::parse::ParseSess; - use rustc_span::source_map::{FilePathMapping, SourceMap}; - use rustc_span::{edition::Edition, FileName}; - - pub fn bench(content: &str) -> Result<(), ()> { - struct SilentEmitter; - - impl Emitter for SilentEmitter { - fn emit_diagnostic(&mut self, _diag: &Diagnostic) {} - fn source_map(&self) -> Option<&Lrc> { - None - } - } - - rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| { - let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let emitter = Box::new(SilentEmitter); - let handler = Handler::with_emitter(false, None, emitter); - let sess = ParseSess::with_span_handler(handler, cm); - if let Err(diagnostic) = rustc_parse::parse_crate_from_source_str( - FileName::Custom("bench".to_owned()), - content.to_owned(), - &sess, - ) { - diagnostic.cancel(); - return Err(()); - }; - Ok(()) - }) - } -} - -#[cfg(not(syn_only))] -mod read_from_disk { - pub fn bench(content: &str) -> Result<(), ()> { - let _ = content; - Ok(()) - } -} - -fn exec(mut codepath: impl FnMut(&str) -> Result<(), ()>) -> Duration { - let begin = Instant::now(); - let mut success = 0; - let mut total = 0; - - walkdir::WalkDir::new("tests/rust/src") - .into_iter() - .filter_entry(repo::base_dir_filter) - .for_each(|entry| { - let entry = entry.unwrap(); - let path = entry.path(); - if path.is_dir() { - return; - } - let content = fs::read_to_string(path).unwrap(); - let ok = codepath(&content).is_ok(); - success += ok as usize; - total += 1; - if !ok { - eprintln!("FAIL {}", path.display()); - } - }); - - assert_eq!(success, total); - begin.elapsed() -} - -fn main() { - repo::clone_rust(); - - macro_rules! testcases { - ($($(#[$cfg:meta])* $name:ident,)*) => { - [ - $( - $(#[$cfg])* - (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>), - )* - ] - }; - } - - #[cfg(not(syn_only))] - { - let mut lines = 0; - let mut files = 0; - exec(|content| { - lines += content.lines().count(); - files += 1; - Ok(()) - }); - eprintln!("\n{} lines in {} files", lines, files); - } - - for (name, f) in testcases!( - #[cfg(not(syn_only))] - read_from_disk, - #[cfg(not(syn_only))] - tokenstream_parse, - syn_parse, - #[cfg(not(syn_only))] - librustc_parse, - ) { - eprint!("{:20}", format!("{}:", name)); - let elapsed = exec(f); - eprintln!( - "elapsed={}.{:03}s", - elapsed.as_secs(), - elapsed.subsec_millis(), - ); - } - eprintln!(); -} diff --git a/collector/compile-benchmarks/syn-1.0.89/build.rs b/collector/compile-benchmarks/syn-1.0.89/build.rs deleted file mode 100644 index c705fc50f..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/build.rs +++ /dev/null @@ -1,47 +0,0 @@ -use std::env; -use std::process::Command; -use std::str; - -// The rustc-cfg strings below are *not* public API. Please let us know by -// opening a GitHub issue if your build environment requires some way to enable -// these cfgs other than by executing our build script. -fn main() { - let compiler = match rustc_version() { - Some(compiler) => compiler, - None => return, - }; - - if compiler.minor < 36 { - println!("cargo:rustc-cfg=syn_omit_await_from_token_macro"); - } - - if compiler.minor < 39 { - println!("cargo:rustc-cfg=syn_no_const_vec_new"); - } - - if compiler.minor < 56 { - println!("cargo:rustc-cfg=syn_no_negative_literal_parse"); - } - - if !compiler.nightly { - println!("cargo:rustc-cfg=syn_disable_nightly_tests"); - } -} - -struct Compiler { - minor: u32, - nightly: bool, -} - -fn rustc_version() -> Option { - let rustc = env::var_os("RUSTC")?; - let output = Command::new(rustc).arg("--version").output().ok()?; - let version = str::from_utf8(&output.stdout).ok()?; - let mut pieces = version.split('.'); - if pieces.next() != Some("rustc 1") { - return None; - } - let minor = pieces.next()?.parse().ok()?; - let nightly = version.contains("nightly") || version.ends_with("-dev"); - Some(Compiler { minor, nightly }) -} diff --git a/collector/compile-benchmarks/syn-1.0.89/perf-config.json b/collector/compile-benchmarks/syn-1.0.89/perf-config.json deleted file mode 100644 index f5204c638..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/perf-config.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "category": "primary", - "artifact": "library" -} diff --git a/collector/compile-benchmarks/syn-1.0.89/src/attr.rs b/collector/compile-benchmarks/syn-1.0.89/src/attr.rs deleted file mode 100644 index bace94f43..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/attr.rs +++ /dev/null @@ -1,662 +0,0 @@ -use super::*; -use crate::punctuated::Punctuated; -use proc_macro2::TokenStream; -use std::iter; -use std::slice; - -#[cfg(feature = "parsing")] -use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result}; -#[cfg(feature = "parsing")] -use crate::punctuated::Pair; - -ast_struct! { - /// An attribute like `#[repr(transparent)]`. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - ///
- /// - /// # Syntax - /// - /// Rust has six types of attributes. - /// - /// - Outer attributes like `#[repr(transparent)]`. These appear outside or - /// in front of the item they describe. - /// - Inner attributes like `#![feature(proc_macro)]`. These appear inside - /// of the item they describe, usually a module. - /// - Outer doc comments like `/// # Example`. - /// - Inner doc comments like `//! Please file an issue`. - /// - Outer block comments `/** # Example */`. - /// - Inner block comments `/*! Please file an issue */`. - /// - /// The `style` field of type `AttrStyle` distinguishes whether an attribute - /// is outer or inner. Doc comments and block comments are promoted to - /// attributes, as this is how they are processed by the compiler and by - /// `macro_rules!` macros. - /// - /// The `path` field gives the possibly colon-delimited path against which - /// the attribute is resolved. It is equal to `"doc"` for desugared doc - /// comments. The `tokens` field contains the rest of the attribute body as - /// tokens. - /// - /// ```text - /// #[derive(Copy)] #[crate::precondition x < 5] - /// ^^^^^^~~~~~~ ^^^^^^^^^^^^^^^^^^^ ~~~~~ - /// path tokens path tokens - /// ``` - /// - ///
- /// - /// # Parsing from tokens to Attribute - /// - /// This type does not implement the [`Parse`] trait and thus cannot be - /// parsed directly by [`ParseStream::parse`]. Instead use - /// [`ParseStream::call`] with one of the two parser functions - /// [`Attribute::parse_outer`] or [`Attribute::parse_inner`] depending on - /// which you intend to parse. - /// - /// [`Parse`]: parse::Parse - /// [`ParseStream::parse`]: parse::ParseBuffer::parse - /// [`ParseStream::call`]: parse::ParseBuffer::call - /// - /// ``` - /// use syn::{Attribute, Ident, Result, Token}; - /// use syn::parse::{Parse, ParseStream}; - /// - /// // Parses a unit struct with attributes. - /// // - /// // #[path = "s.tmpl"] - /// // struct S; - /// struct UnitStruct { - /// attrs: Vec, - /// struct_token: Token![struct], - /// name: Ident, - /// semi_token: Token![;], - /// } - /// - /// impl Parse for UnitStruct { - /// fn parse(input: ParseStream) -> Result { - /// Ok(UnitStruct { - /// attrs: input.call(Attribute::parse_outer)?, - /// struct_token: input.parse()?, - /// name: input.parse()?, - /// semi_token: input.parse()?, - /// }) - /// } - /// } - /// ``` - /// - ///


- /// - /// # Parsing from Attribute to structured arguments - /// - /// The grammar of attributes in Rust is very flexible, which makes the - /// syntax tree not that useful on its own. In particular, arguments of the - /// attribute are held in an arbitrary `tokens: TokenStream`. Macros are - /// expected to check the `path` of the attribute, decide whether they - /// recognize it, and then parse the remaining tokens according to whatever - /// grammar they wish to require for that kind of attribute. - /// - /// If the attribute you are parsing is expected to conform to the - /// conventional structured form of attribute, use [`parse_meta()`] to - /// obtain that structured representation. If the attribute follows some - /// other grammar of its own, use [`parse_args()`] to parse that into the - /// expected data structure. - /// - /// [`parse_meta()`]: Attribute::parse_meta - /// [`parse_args()`]: Attribute::parse_args - /// - ///


- /// - /// # Doc comments - /// - /// The compiler transforms doc comments, such as `/// comment` and `/*! - /// comment */`, into attributes before macros are expanded. Each comment is - /// expanded into an attribute of the form `#[doc = r"comment"]`. - /// - /// As an example, the following `mod` items are expanded identically: - /// - /// ``` - /// # use syn::{ItemMod, parse_quote}; - /// let doc: ItemMod = parse_quote! { - /// /// Single line doc comments - /// /// We write so many! - /// /** - /// * Multi-line comments... - /// * May span many lines - /// */ - /// mod example { - /// //! Of course, they can be inner too - /// /*! And fit in a single line */ - /// } - /// }; - /// let attr: ItemMod = parse_quote! { - /// #[doc = r" Single line doc comments"] - /// #[doc = r" We write so many!"] - /// #[doc = r" - /// * Multi-line comments... - /// * May span many lines - /// "] - /// mod example { - /// #![doc = r" Of course, they can be inner too"] - /// #![doc = r" And fit in a single line "] - /// } - /// }; - /// assert_eq!(doc, attr); - /// ``` - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct Attribute { - pub pound_token: Token![#], - pub style: AttrStyle, - pub bracket_token: token::Bracket, - pub path: Path, - pub tokens: TokenStream, - } -} - -impl Attribute { - /// Parses the content of the attribute, consisting of the path and tokens, - /// as a [`Meta`] if possible. - /// - /// *This function is available only if Syn is built with the `"parsing"` - /// feature.* - #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_meta(&self) -> Result { - fn clone_ident_segment(segment: &PathSegment) -> PathSegment { - PathSegment { - ident: segment.ident.clone(), - arguments: PathArguments::None, - } - } - - let path = Path { - leading_colon: self - .path - .leading_colon - .as_ref() - .map(|colon| Token![::](colon.spans)), - segments: self - .path - .segments - .pairs() - .map(|pair| match pair { - Pair::Punctuated(seg, punct) => { - Pair::Punctuated(clone_ident_segment(seg), Token![::](punct.spans)) - } - Pair::End(seg) => Pair::End(clone_ident_segment(seg)), - }) - .collect(), - }; - - let parser = |input: ParseStream| parsing::parse_meta_after_path(path, input); - parse::Parser::parse2(parser, self.tokens.clone()) - } - - /// Parse the arguments to the attribute as a syntax tree. - /// - /// This is similar to `syn::parse2::(attr.tokens)` except that: - /// - /// - the surrounding delimiters are *not* included in the input to the - /// parser; and - /// - the error message has a more useful span when `tokens` is empty. - /// - /// ```text - /// #[my_attr(value < 5)] - /// ^^^^^^^^^ what gets parsed - /// ``` - /// - /// *This function is available only if Syn is built with the `"parsing"` - /// feature.* - #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_args(&self) -> Result { - self.parse_args_with(T::parse) - } - - /// Parse the arguments to the attribute using the given parser. - /// - /// *This function is available only if Syn is built with the `"parsing"` - /// feature.* - #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_args_with(&self, parser: F) -> Result { - let parser = |input: ParseStream| { - let args = enter_args(self, input)?; - parse::parse_stream(parser, &args) - }; - parser.parse2(self.tokens.clone()) - } - - /// Parses zero or more outer attributes from the stream. - /// - /// *This function is available only if Syn is built with the `"parsing"` - /// feature.* - #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_outer(input: ParseStream) -> Result> { - let mut attrs = Vec::new(); - while input.peek(Token![#]) { - attrs.push(input.call(parsing::single_parse_outer)?); - } - Ok(attrs) - } - - /// Parses zero or more inner attributes from the stream. - /// - /// *This function is available only if Syn is built with the `"parsing"` - /// feature.* - #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_inner(input: ParseStream) -> Result> { - let mut attrs = Vec::new(); - parsing::parse_inner(input, &mut attrs)?; - Ok(attrs) - } -} - -#[cfg(feature = "parsing")] -fn expected_parentheses(attr: &Attribute) -> String { - let style = match attr.style { - AttrStyle::Outer => "#", - AttrStyle::Inner(_) => "#!", - }; - - let mut path = String::new(); - for segment in &attr.path.segments { - if !path.is_empty() || attr.path.leading_colon.is_some() { - path += "::"; - } - path += &segment.ident.to_string(); - } - - format!("{}[{}(...)]", style, path) -} - -#[cfg(feature = "parsing")] -fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result> { - if input.is_empty() { - let expected = expected_parentheses(attr); - let msg = format!("expected attribute arguments in parentheses: {}", expected); - return Err(crate::error::new2( - attr.pound_token.span, - attr.bracket_token.span, - msg, - )); - } else if input.peek(Token![=]) { - let expected = expected_parentheses(attr); - let msg = format!("expected parentheses: {}", expected); - return Err(input.error(msg)); - }; - - let content; - if input.peek(token::Paren) { - parenthesized!(content in input); - } else if input.peek(token::Bracket) { - bracketed!(content in input); - } else if input.peek(token::Brace) { - braced!(content in input); - } else { - return Err(input.error("unexpected token in attribute arguments")); - } - - if input.is_empty() { - Ok(content) - } else { - Err(input.error("unexpected token in attribute arguments")) - } -} - -ast_enum! { - /// Distinguishes between attributes that decorate an item and attributes - /// that are contained within an item. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Outer attributes - /// - /// - `#[repr(transparent)]` - /// - `/// # Example` - /// - `/** Please file an issue */` - /// - /// # Inner attributes - /// - /// - `#![feature(proc_macro)]` - /// - `//! # Example` - /// - `/*! Please file an issue */` - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum AttrStyle { - Outer, - Inner(Token![!]), - } -} - -ast_enum_of_structs! { - /// Content of a compile-time structured attribute. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// ## Path - /// - /// A meta path is like the `test` in `#[test]`. - /// - /// ## List - /// - /// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`. - /// - /// ## NameValue - /// - /// A name-value meta is like the `path = "..."` in `#[path = - /// "sys/windows.rs"]`. - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum Meta { - Path(Path), - - /// A structured list within an attribute, like `derive(Copy, Clone)`. - List(MetaList), - - /// A name-value pair within an attribute, like `feature = "nightly"`. - NameValue(MetaNameValue), - } -} - -ast_struct! { - /// A structured list within an attribute, like `derive(Copy, Clone)`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct MetaList { - pub path: Path, - pub paren_token: token::Paren, - pub nested: Punctuated, - } -} - -ast_struct! { - /// A name-value pair within an attribute, like `feature = "nightly"`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct MetaNameValue { - pub path: Path, - pub eq_token: Token![=], - pub lit: Lit, - } -} - -impl Meta { - /// Returns the identifier that begins this structured meta item. - /// - /// For example this would return the `test` in `#[test]`, the `derive` in - /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`. - pub fn path(&self) -> &Path { - match self { - Meta::Path(path) => path, - Meta::List(meta) => &meta.path, - Meta::NameValue(meta) => &meta.path, - } - } -} - -ast_enum_of_structs! { - /// Element of a compile-time attribute list. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum NestedMeta { - /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which - /// would be a nested `Meta::Path`. - Meta(Meta), - - /// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`. - Lit(Lit), - } -} - -/// Conventional argument type associated with an invocation of an attribute -/// macro. -/// -/// For example if we are developing an attribute macro that is intended to be -/// invoked on function items as follows: -/// -/// ``` -/// # const IGNORE: &str = stringify! { -/// #[my_attribute(path = "/v1/refresh")] -/// # }; -/// pub fn refresh() { -/// /* ... */ -/// } -/// ``` -/// -/// The implementation of this macro would want to parse its attribute arguments -/// as type `AttributeArgs`. -/// -/// ``` -/// # extern crate proc_macro; -/// # -/// use proc_macro::TokenStream; -/// use syn::{parse_macro_input, AttributeArgs, ItemFn}; -/// -/// # const IGNORE: &str = stringify! { -/// #[proc_macro_attribute] -/// # }; -/// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream { -/// let args = parse_macro_input!(args as AttributeArgs); -/// let input = parse_macro_input!(input as ItemFn); -/// -/// /* ... */ -/// # "".parse().unwrap() -/// } -/// ``` -#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] -pub type AttributeArgs = Vec; - -pub trait FilterAttrs<'a> { - type Ret: Iterator; - - fn outer(self) -> Self::Ret; - fn inner(self) -> Self::Ret; -} - -impl<'a> FilterAttrs<'a> for &'a [Attribute] { - type Ret = iter::Filter, fn(&&Attribute) -> bool>; - - fn outer(self) -> Self::Ret { - fn is_outer(attr: &&Attribute) -> bool { - match attr.style { - AttrStyle::Outer => true, - AttrStyle::Inner(_) => false, - } - } - self.iter().filter(is_outer) - } - - fn inner(self) -> Self::Ret { - fn is_inner(attr: &&Attribute) -> bool { - match attr.style { - AttrStyle::Inner(_) => true, - AttrStyle::Outer => false, - } - } - self.iter().filter(is_inner) - } -} - -#[cfg(feature = "parsing")] -pub mod parsing { - use super::*; - use crate::ext::IdentExt; - use crate::parse::{Parse, ParseStream, Result}; - - pub fn parse_inner(input: ParseStream, attrs: &mut Vec) -> Result<()> { - while input.peek(Token![#]) && input.peek2(Token![!]) { - attrs.push(input.call(parsing::single_parse_inner)?); - } - Ok(()) - } - - pub fn single_parse_inner(input: ParseStream) -> Result { - let content; - Ok(Attribute { - pound_token: input.parse()?, - style: AttrStyle::Inner(input.parse()?), - bracket_token: bracketed!(content in input), - path: content.call(Path::parse_mod_style)?, - tokens: content.parse()?, - }) - } - - pub fn single_parse_outer(input: ParseStream) -> Result { - let content; - Ok(Attribute { - pound_token: input.parse()?, - style: AttrStyle::Outer, - bracket_token: bracketed!(content in input), - path: content.call(Path::parse_mod_style)?, - tokens: content.parse()?, - }) - } - - // Like Path::parse_mod_style but accepts keywords in the path. - fn parse_meta_path(input: ParseStream) -> Result { - Ok(Path { - leading_colon: input.parse()?, - segments: { - let mut segments = Punctuated::new(); - while input.peek(Ident::peek_any) { - let ident = Ident::parse_any(input)?; - segments.push_value(PathSegment::from(ident)); - if !input.peek(Token![::]) { - break; - } - let punct = input.parse()?; - segments.push_punct(punct); - } - if segments.is_empty() { - return Err(input.error("expected path")); - } else if segments.trailing_punct() { - return Err(input.error("expected path segment")); - } - segments - }, - }) - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for Meta { - fn parse(input: ParseStream) -> Result { - let path = input.call(parse_meta_path)?; - parse_meta_after_path(path, input) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for MetaList { - fn parse(input: ParseStream) -> Result { - let path = input.call(parse_meta_path)?; - parse_meta_list_after_path(path, input) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for MetaNameValue { - fn parse(input: ParseStream) -> Result { - let path = input.call(parse_meta_path)?; - parse_meta_name_value_after_path(path, input) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for NestedMeta { - fn parse(input: ParseStream) -> Result { - if input.peek(Lit) && !(input.peek(LitBool) && input.peek2(Token![=])) { - input.parse().map(NestedMeta::Lit) - } else if input.peek(Ident::peek_any) - || input.peek(Token![::]) && input.peek3(Ident::peek_any) - { - input.parse().map(NestedMeta::Meta) - } else { - Err(input.error("expected identifier or literal")) - } - } - } - - pub fn parse_meta_after_path(path: Path, input: ParseStream) -> Result { - if input.peek(token::Paren) { - parse_meta_list_after_path(path, input).map(Meta::List) - } else if input.peek(Token![=]) { - parse_meta_name_value_after_path(path, input).map(Meta::NameValue) - } else { - Ok(Meta::Path(path)) - } - } - - fn parse_meta_list_after_path(path: Path, input: ParseStream) -> Result { - let content; - Ok(MetaList { - path, - paren_token: parenthesized!(content in input), - nested: content.parse_terminated(NestedMeta::parse)?, - }) - } - - fn parse_meta_name_value_after_path(path: Path, input: ParseStream) -> Result { - Ok(MetaNameValue { - path, - eq_token: input.parse()?, - lit: input.parse()?, - }) - } -} - -#[cfg(feature = "printing")] -mod printing { - use super::*; - use proc_macro2::TokenStream; - use quote::ToTokens; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for Attribute { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.pound_token.to_tokens(tokens); - if let AttrStyle::Inner(b) = &self.style { - b.to_tokens(tokens); - } - self.bracket_token.surround(tokens, |tokens| { - self.path.to_tokens(tokens); - self.tokens.to_tokens(tokens); - }); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for MetaList { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.path.to_tokens(tokens); - self.paren_token.surround(tokens, |tokens| { - self.nested.to_tokens(tokens); - }); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for MetaNameValue { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.path.to_tokens(tokens); - self.eq_token.to_tokens(tokens); - self.lit.to_tokens(tokens); - } - } -} diff --git a/collector/compile-benchmarks/syn-1.0.89/src/await.rs b/collector/compile-benchmarks/syn-1.0.89/src/await.rs deleted file mode 100644 index 038c6a5d1..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/await.rs +++ /dev/null @@ -1,2 +0,0 @@ -// See include!("await.rs") in token.rs. -export_token_macro! {[await]} diff --git a/collector/compile-benchmarks/syn-1.0.89/src/bigint.rs b/collector/compile-benchmarks/syn-1.0.89/src/bigint.rs deleted file mode 100644 index 5397d6bee..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/bigint.rs +++ /dev/null @@ -1,66 +0,0 @@ -use std::ops::{AddAssign, MulAssign}; - -// For implementing base10_digits() accessor on LitInt. -pub struct BigInt { - digits: Vec, -} - -impl BigInt { - pub fn new() -> Self { - BigInt { digits: Vec::new() } - } - - pub fn to_string(&self) -> String { - let mut repr = String::with_capacity(self.digits.len()); - - let mut has_nonzero = false; - for digit in self.digits.iter().rev() { - has_nonzero |= *digit != 0; - if has_nonzero { - repr.push((*digit + b'0') as char); - } - } - - if repr.is_empty() { - repr.push('0'); - } - - repr - } - - fn reserve_two_digits(&mut self) { - let len = self.digits.len(); - let desired = - len + !self.digits.ends_with(&[0, 0]) as usize + !self.digits.ends_with(&[0]) as usize; - self.digits.resize(desired, 0); - } -} - -impl AddAssign for BigInt { - // Assumes increment <16. - fn add_assign(&mut self, mut increment: u8) { - self.reserve_two_digits(); - - let mut i = 0; - while increment > 0 { - let sum = self.digits[i] + increment; - self.digits[i] = sum % 10; - increment = sum / 10; - i += 1; - } - } -} - -impl MulAssign for BigInt { - // Assumes base <=16. - fn mul_assign(&mut self, base: u8) { - self.reserve_two_digits(); - - let mut carry = 0; - for digit in &mut self.digits { - let prod = *digit * base + carry; - *digit = prod % 10; - carry = prod / 10; - } - } -} diff --git a/collector/compile-benchmarks/syn-1.0.89/src/buffer.rs b/collector/compile-benchmarks/syn-1.0.89/src/buffer.rs deleted file mode 100644 index 43e77e97f..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/buffer.rs +++ /dev/null @@ -1,413 +0,0 @@ -//! A stably addressed token buffer supporting efficient traversal based on a -//! cheaply copyable cursor. -//! -//! *This module is available only if Syn is built with the `"parsing"` feature.* - -// This module is heavily commented as it contains most of the unsafe code in -// Syn, and caution should be used when editing it. The public-facing interface -// is 100% safe but the implementation is fragile internally. - -#[cfg(all( - not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), - feature = "proc-macro" -))] -use crate::proc_macro as pm; -use crate::Lifetime; -use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; -use std::marker::PhantomData; -use std::ptr; -use std::slice; - -/// Internal type which is used instead of `TokenTree` to represent a token tree -/// within a `TokenBuffer`. -enum Entry { - // Mimicking types from proc-macro. - Group(Group, TokenBuffer), - Ident(Ident), - Punct(Punct), - Literal(Literal), - // End entries contain a raw pointer to the entry from the containing - // token tree, or null if this is the outermost level. - End(*const Entry), -} - -/// A buffer that can be efficiently traversed multiple times, unlike -/// `TokenStream` which requires a deep copy in order to traverse more than -/// once. -/// -/// *This type is available only if Syn is built with the `"parsing"` feature.* -pub struct TokenBuffer { - // NOTE: Do not implement clone on this - there are raw pointers inside - // these entries which will be messed up. Moving the `TokenBuffer` itself is - // safe as the data pointed to won't be moved. - ptr: *const Entry, - len: usize, -} - -impl Drop for TokenBuffer { - fn drop(&mut self) { - unsafe { - let slice = slice::from_raw_parts_mut(self.ptr as *mut Entry, self.len); - let _ = Box::from_raw(slice); - } - } -} - -impl TokenBuffer { - // NOTE: Do not mutate the Vec returned from this function once it returns; - // the address of its backing memory must remain stable. - fn inner_new(stream: TokenStream, up: *const Entry) -> TokenBuffer { - // Build up the entries list, recording the locations of any Groups - // in the list to be processed later. - let mut entries = Vec::new(); - let mut groups = Vec::new(); - for tt in stream { - match tt { - TokenTree::Ident(sym) => { - entries.push(Entry::Ident(sym)); - } - TokenTree::Punct(op) => { - entries.push(Entry::Punct(op)); - } - TokenTree::Literal(l) => { - entries.push(Entry::Literal(l)); - } - TokenTree::Group(g) => { - // Record the index of the interesting entry, and store an - // `End(null)` there temporarily. - groups.push((entries.len(), g)); - entries.push(Entry::End(ptr::null())); - } - } - } - // Add an `End` entry to the end with a reference to the enclosing token - // stream which was passed in. - entries.push(Entry::End(up)); - - // NOTE: This is done to ensure that we don't accidentally modify the - // length of the backing buffer. The backing buffer must remain at a - // constant address after this point, as we are going to store a raw - // pointer into it. - let mut entries = entries.into_boxed_slice(); - for (idx, group) in groups { - // We know that this index refers to one of the temporary - // `End(null)` entries, and we know that the last entry is - // `End(up)`, so the next index is also valid. - let group_up = unsafe { entries.as_ptr().add(idx + 1) }; - - // The end entry stored at the end of this Entry::Group should - // point to the Entry which follows the Group in the list. - let inner = Self::inner_new(group.stream(), group_up); - entries[idx] = Entry::Group(group, inner); - } - - let len = entries.len(); - let ptr = Box::into_raw(entries); - TokenBuffer { - ptr: ptr as *const Entry, - len, - } - } - - /// Creates a `TokenBuffer` containing all the tokens from the input - /// `proc_macro::TokenStream`. - /// - /// *This method is available only if Syn is built with both the `"parsing"` and - /// `"proc-macro"` features.* - #[cfg(all( - not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), - feature = "proc-macro" - ))] - pub fn new(stream: pm::TokenStream) -> Self { - Self::new2(stream.into()) - } - - /// Creates a `TokenBuffer` containing all the tokens from the input - /// `proc_macro2::TokenStream`. - pub fn new2(stream: TokenStream) -> Self { - Self::inner_new(stream, ptr::null()) - } - - /// Creates a cursor referencing the first token in the buffer and able to - /// traverse until the end of the buffer. - pub fn begin(&self) -> Cursor { - unsafe { Cursor::create(self.ptr, self.ptr.add(self.len - 1)) } - } -} - -/// A cheaply copyable cursor into a `TokenBuffer`. -/// -/// This cursor holds a shared reference into the immutable data which is used -/// internally to represent a `TokenStream`, and can be efficiently manipulated -/// and copied around. -/// -/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer` -/// object and get a cursor to its first token with `begin()`. -/// -/// Two cursors are equal if they have the same location in the same input -/// stream, and have the same scope. -/// -/// *This type is available only if Syn is built with the `"parsing"` feature.* -pub struct Cursor<'a> { - // The current entry which the `Cursor` is pointing at. - ptr: *const Entry, - // This is the only `Entry::End(..)` object which this cursor is allowed to - // point at. All other `End` objects are skipped over in `Cursor::create`. - scope: *const Entry, - // Cursor is covariant in 'a. This field ensures that our pointers are still - // valid. - marker: PhantomData<&'a Entry>, -} - -impl<'a> Cursor<'a> { - /// Creates a cursor referencing a static empty TokenStream. - pub fn empty() -> Self { - // It's safe in this situation for us to put an `Entry` object in global - // storage, despite it not actually being safe to send across threads - // (`Ident` is a reference into a thread-local table). This is because - // this entry never includes a `Ident` object. - // - // This wrapper struct allows us to break the rules and put a `Sync` - // object in global storage. - struct UnsafeSyncEntry(Entry); - unsafe impl Sync for UnsafeSyncEntry {} - static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0 as *const Entry)); - - Cursor { - ptr: &EMPTY_ENTRY.0, - scope: &EMPTY_ENTRY.0, - marker: PhantomData, - } - } - - /// This create method intelligently exits non-explicitly-entered - /// `None`-delimited scopes when the cursor reaches the end of them, - /// allowing for them to be treated transparently. - unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self { - // NOTE: If we're looking at a `End(..)`, we want to advance the cursor - // past it, unless `ptr == scope`, which means that we're at the edge of - // our cursor's scope. We should only have `ptr != scope` at the exit - // from None-delimited groups entered with `ignore_none`. - while let Entry::End(exit) = *ptr { - if ptr == scope { - break; - } - ptr = exit; - } - - Cursor { - ptr, - scope, - marker: PhantomData, - } - } - - /// Get the current entry. - fn entry(self) -> &'a Entry { - unsafe { &*self.ptr } - } - - /// Bump the cursor to point at the next token after the current one. This - /// is undefined behavior if the cursor is currently looking at an - /// `Entry::End`. - unsafe fn bump(self) -> Cursor<'a> { - Cursor::create(self.ptr.offset(1), self.scope) - } - - /// While the cursor is looking at a `None`-delimited group, move it to look - /// at the first token inside instead. If the group is empty, this will move - /// the cursor past the `None`-delimited group. - /// - /// WARNING: This mutates its argument. - fn ignore_none(&mut self) { - while let Entry::Group(group, buf) = self.entry() { - if group.delimiter() == Delimiter::None { - // NOTE: We call `Cursor::create` here to make sure that - // situations where we should immediately exit the span after - // entering it are handled correctly. - unsafe { - *self = Cursor::create(buf.ptr, self.scope); - } - } else { - break; - } - } - } - - /// Checks whether the cursor is currently pointing at the end of its valid - /// scope. - pub fn eof(self) -> bool { - // We're at eof if we're at the end of our scope. - self.ptr == self.scope - } - - /// If the cursor is pointing at a `Group` with the given delimiter, returns - /// a cursor into that group and one pointing to the next `TokenTree`. - pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> { - // If we're not trying to enter a none-delimited group, we want to - // ignore them. We have to make sure to _not_ ignore them when we want - // to enter them, of course. For obvious reasons. - if delim != Delimiter::None { - self.ignore_none(); - } - - if let Entry::Group(group, buf) = self.entry() { - if group.delimiter() == delim { - return Some((buf.begin(), group.span(), unsafe { self.bump() })); - } - } - - None - } - - /// If the cursor is pointing at a `Ident`, returns it along with a cursor - /// pointing at the next `TokenTree`. - pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> { - self.ignore_none(); - match self.entry() { - Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump() })), - _ => None, - } - } - - /// If the cursor is pointing at a `Punct`, returns it along with a cursor - /// pointing at the next `TokenTree`. - pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> { - self.ignore_none(); - match self.entry() { - Entry::Punct(op) if op.as_char() != '\'' => Some((op.clone(), unsafe { self.bump() })), - _ => None, - } - } - - /// If the cursor is pointing at a `Literal`, return it along with a cursor - /// pointing at the next `TokenTree`. - pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> { - self.ignore_none(); - match self.entry() { - Entry::Literal(lit) => Some((lit.clone(), unsafe { self.bump() })), - _ => None, - } - } - - /// If the cursor is pointing at a `Lifetime`, returns it along with a - /// cursor pointing at the next `TokenTree`. - pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> { - self.ignore_none(); - match self.entry() { - Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => { - let next = unsafe { self.bump() }; - match next.ident() { - Some((ident, rest)) => { - let lifetime = Lifetime { - apostrophe: op.span(), - ident, - }; - Some((lifetime, rest)) - } - None => None, - } - } - _ => None, - } - } - - /// Copies all remaining tokens visible from this cursor into a - /// `TokenStream`. - pub fn token_stream(self) -> TokenStream { - let mut tts = Vec::new(); - let mut cursor = self; - while let Some((tt, rest)) = cursor.token_tree() { - tts.push(tt); - cursor = rest; - } - tts.into_iter().collect() - } - - /// If the cursor is pointing at a `TokenTree`, returns it along with a - /// cursor pointing at the next `TokenTree`. - /// - /// Returns `None` if the cursor has reached the end of its stream. - /// - /// This method does not treat `None`-delimited groups as transparent, and - /// will return a `Group(None, ..)` if the cursor is looking at one. - pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> { - let tree = match self.entry() { - Entry::Group(group, _) => group.clone().into(), - Entry::Literal(lit) => lit.clone().into(), - Entry::Ident(ident) => ident.clone().into(), - Entry::Punct(op) => op.clone().into(), - Entry::End(..) => return None, - }; - - Some((tree, unsafe { self.bump() })) - } - - /// Returns the `Span` of the current token, or `Span::call_site()` if this - /// cursor points to eof. - pub fn span(self) -> Span { - match self.entry() { - Entry::Group(group, _) => group.span(), - Entry::Literal(l) => l.span(), - Entry::Ident(t) => t.span(), - Entry::Punct(o) => o.span(), - Entry::End(..) => Span::call_site(), - } - } - - /// Skip over the next token without cloning it. Returns `None` if this - /// cursor points to eof. - /// - /// This method treats `'lifetimes` as a single token. - pub(crate) fn skip(self) -> Option> { - match self.entry() { - Entry::End(..) => None, - - // Treat lifetimes as a single tt for the purposes of 'skip'. - Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => { - let next = unsafe { self.bump() }; - match next.entry() { - Entry::Ident(_) => Some(unsafe { next.bump() }), - _ => Some(next), - } - } - _ => Some(unsafe { self.bump() }), - } - } -} - -impl<'a> Copy for Cursor<'a> {} - -impl<'a> Clone for Cursor<'a> { - fn clone(&self) -> Self { - *self - } -} - -impl<'a> Eq for Cursor<'a> {} - -impl<'a> PartialEq for Cursor<'a> { - fn eq(&self, other: &Self) -> bool { - let Cursor { ptr, scope, marker } = self; - let _ = marker; - *ptr == other.ptr && *scope == other.scope - } -} - -pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool { - a.scope == b.scope -} - -pub(crate) fn open_span_of_group(cursor: Cursor) -> Span { - match cursor.entry() { - Entry::Group(group, _) => group.span_open(), - _ => cursor.span(), - } -} - -pub(crate) fn close_span_of_group(cursor: Cursor) -> Span { - match cursor.entry() { - Entry::Group(group, _) => group.span_close(), - _ => cursor.span(), - } -} diff --git a/collector/compile-benchmarks/syn-1.0.89/src/custom_keyword.rs b/collector/compile-benchmarks/syn-1.0.89/src/custom_keyword.rs deleted file mode 100644 index 69d787e54..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/custom_keyword.rs +++ /dev/null @@ -1,253 +0,0 @@ -/// Define a type that supports parsing and printing a given identifier as if it -/// were a keyword. -/// -/// # Usage -/// -/// As a convention, it is recommended that this macro be invoked within a -/// module called `kw` or `keyword` and that the resulting parser be invoked -/// with a `kw::` or `keyword::` prefix. -/// -/// ``` -/// mod kw { -/// syn::custom_keyword!(whatever); -/// } -/// ``` -/// -/// The generated syntax tree node supports the following operations just like -/// any built-in keyword token. -/// -/// - [Peeking] — `input.peek(kw::whatever)` -/// -/// - [Parsing] — `input.parse::()?` -/// -/// - [Printing] — `quote!( ... #whatever_token ... )` -/// -/// - Construction from a [`Span`] — `let whatever_token = kw::whatever(sp)` -/// -/// - Field access to its span — `let sp = whatever_token.span` -/// -/// [Peeking]: crate::parse::ParseBuffer::peek -/// [Parsing]: crate::parse::ParseBuffer::parse -/// [Printing]: quote::ToTokens -/// [`Span`]: proc_macro2::Span -/// -/// # Example -/// -/// This example parses input that looks like `bool = true` or `str = "value"`. -/// The key must be either the identifier `bool` or the identifier `str`. If -/// `bool`, the value may be either `true` or `false`. If `str`, the value may -/// be any string literal. -/// -/// The symbols `bool` and `str` are not reserved keywords in Rust so these are -/// not considered keywords in the `syn::token` module. Like any other -/// identifier that is not a keyword, these can be declared as custom keywords -/// by crates that need to use them as such. -/// -/// ``` -/// use syn::{LitBool, LitStr, Result, Token}; -/// use syn::parse::{Parse, ParseStream}; -/// -/// mod kw { -/// syn::custom_keyword!(bool); -/// syn::custom_keyword!(str); -/// } -/// -/// enum Argument { -/// Bool { -/// bool_token: kw::bool, -/// eq_token: Token![=], -/// value: LitBool, -/// }, -/// Str { -/// str_token: kw::str, -/// eq_token: Token![=], -/// value: LitStr, -/// }, -/// } -/// -/// impl Parse for Argument { -/// fn parse(input: ParseStream) -> Result { -/// let lookahead = input.lookahead1(); -/// if lookahead.peek(kw::bool) { -/// Ok(Argument::Bool { -/// bool_token: input.parse::()?, -/// eq_token: input.parse()?, -/// value: input.parse()?, -/// }) -/// } else if lookahead.peek(kw::str) { -/// Ok(Argument::Str { -/// str_token: input.parse::()?, -/// eq_token: input.parse()?, -/// value: input.parse()?, -/// }) -/// } else { -/// Err(lookahead.error()) -/// } -/// } -/// } -/// ``` -#[macro_export] -macro_rules! custom_keyword { - ($ident:ident) => { - #[allow(non_camel_case_types)] - pub struct $ident { - pub span: $crate::__private::Span, - } - - #[doc(hidden)] - #[allow(dead_code, non_snake_case)] - pub fn $ident<__S: $crate::__private::IntoSpans<[$crate::__private::Span; 1]>>( - span: __S, - ) -> $ident { - $ident { - span: $crate::__private::IntoSpans::into_spans(span)[0], - } - } - - impl $crate::__private::Default for $ident { - fn default() -> Self { - $ident { - span: $crate::__private::Span::call_site(), - } - } - } - - $crate::impl_parse_for_custom_keyword!($ident); - $crate::impl_to_tokens_for_custom_keyword!($ident); - $crate::impl_clone_for_custom_keyword!($ident); - $crate::impl_extra_traits_for_custom_keyword!($ident); - }; -} - -// Not public API. -#[cfg(feature = "parsing")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_parse_for_custom_keyword { - ($ident:ident) => { - // For peek. - impl $crate::token::CustomToken for $ident { - fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool { - if let Some((ident, _rest)) = cursor.ident() { - ident == stringify!($ident) - } else { - false - } - } - - fn display() -> &'static $crate::__private::str { - concat!("`", stringify!($ident), "`") - } - } - - impl $crate::parse::Parse for $ident { - fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> { - input.step(|cursor| { - if let $crate::__private::Some((ident, rest)) = cursor.ident() { - if ident == stringify!($ident) { - return $crate::__private::Ok(($ident { span: ident.span() }, rest)); - } - } - $crate::__private::Err(cursor.error(concat!( - "expected `", - stringify!($ident), - "`" - ))) - }) - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "parsing"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_parse_for_custom_keyword { - ($ident:ident) => {}; -} - -// Not public API. -#[cfg(feature = "printing")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_to_tokens_for_custom_keyword { - ($ident:ident) => { - impl $crate::__private::ToTokens for $ident { - fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) { - let ident = $crate::Ident::new(stringify!($ident), self.span); - $crate::__private::TokenStreamExt::append(tokens, ident); - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "printing"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_to_tokens_for_custom_keyword { - ($ident:ident) => {}; -} - -// Not public API. -#[cfg(feature = "clone-impls")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_clone_for_custom_keyword { - ($ident:ident) => { - impl $crate::__private::Copy for $ident {} - - #[allow(clippy::expl_impl_clone_on_copy)] - impl $crate::__private::Clone for $ident { - fn clone(&self) -> Self { - *self - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "clone-impls"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_clone_for_custom_keyword { - ($ident:ident) => {}; -} - -// Not public API. -#[cfg(feature = "extra-traits")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_extra_traits_for_custom_keyword { - ($ident:ident) => { - impl $crate::__private::Debug for $ident { - fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result { - $crate::__private::Formatter::write_str( - f, - concat!("Keyword [", stringify!($ident), "]"), - ) - } - } - - impl $crate::__private::Eq for $ident {} - - impl $crate::__private::PartialEq for $ident { - fn eq(&self, _other: &Self) -> $crate::__private::bool { - true - } - } - - impl $crate::__private::Hash for $ident { - fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {} - } - }; -} - -// Not public API. -#[cfg(not(feature = "extra-traits"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_extra_traits_for_custom_keyword { - ($ident:ident) => {}; -} diff --git a/collector/compile-benchmarks/syn-1.0.89/src/custom_punctuation.rs b/collector/compile-benchmarks/syn-1.0.89/src/custom_punctuation.rs deleted file mode 100644 index 118a8453d..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/custom_punctuation.rs +++ /dev/null @@ -1,300 +0,0 @@ -/// Define a type that supports parsing and printing a multi-character symbol -/// as if it were a punctuation token. -/// -/// # Usage -/// -/// ``` -/// syn::custom_punctuation!(LeftRightArrow, <=>); -/// ``` -/// -/// The generated syntax tree node supports the following operations just like -/// any built-in punctuation token. -/// -/// - [Peeking] — `input.peek(LeftRightArrow)` -/// -/// - [Parsing] — `input.parse::()?` -/// -/// - [Printing] — `quote!( ... #lrarrow ... )` -/// -/// - Construction from a [`Span`] — `let lrarrow = LeftRightArrow(sp)` -/// -/// - Construction from multiple [`Span`] — `let lrarrow = LeftRightArrow([sp, sp, sp])` -/// -/// - Field access to its spans — `let spans = lrarrow.spans` -/// -/// [Peeking]: crate::parse::ParseBuffer::peek -/// [Parsing]: crate::parse::ParseBuffer::parse -/// [Printing]: quote::ToTokens -/// [`Span`]: proc_macro2::Span -/// -/// # Example -/// -/// ``` -/// use proc_macro2::{TokenStream, TokenTree}; -/// use syn::parse::{Parse, ParseStream, Peek, Result}; -/// use syn::punctuated::Punctuated; -/// use syn::Expr; -/// -/// syn::custom_punctuation!(PathSeparator, ); -/// -/// // expr expr expr ... -/// struct PathSegments { -/// segments: Punctuated, -/// } -/// -/// impl Parse for PathSegments { -/// fn parse(input: ParseStream) -> Result { -/// let mut segments = Punctuated::new(); -/// -/// let first = parse_until(input, PathSeparator)?; -/// segments.push_value(syn::parse2(first)?); -/// -/// while input.peek(PathSeparator) { -/// segments.push_punct(input.parse()?); -/// -/// let next = parse_until(input, PathSeparator)?; -/// segments.push_value(syn::parse2(next)?); -/// } -/// -/// Ok(PathSegments { segments }) -/// } -/// } -/// -/// fn parse_until(input: ParseStream, end: E) -> Result { -/// let mut tokens = TokenStream::new(); -/// while !input.is_empty() && !input.peek(end) { -/// let next: TokenTree = input.parse()?; -/// tokens.extend(Some(next)); -/// } -/// Ok(tokens) -/// } -/// -/// fn main() { -/// let input = r#" a::b c::d::e "#; -/// let _: PathSegments = syn::parse_str(input).unwrap(); -/// } -/// ``` -#[macro_export] -macro_rules! custom_punctuation { - ($ident:ident, $($tt:tt)+) => { - pub struct $ident { - pub spans: $crate::custom_punctuation_repr!($($tt)+), - } - - #[doc(hidden)] - #[allow(dead_code, non_snake_case)] - pub fn $ident<__S: $crate::__private::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>( - spans: __S, - ) -> $ident { - let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*; - $ident { - spans: $crate::__private::IntoSpans::into_spans(spans) - } - } - - impl $crate::__private::Default for $ident { - fn default() -> Self { - $ident($crate::__private::Span::call_site()) - } - } - - $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+); - $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+); - $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+); - $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+); - }; -} - -// Not public API. -#[cfg(feature = "parsing")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_parse_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => { - impl $crate::token::CustomToken for $ident { - fn peek(cursor: $crate::buffer::Cursor) -> bool { - $crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+)) - } - - fn display() -> &'static $crate::__private::str { - concat!("`", $crate::stringify_punct!($($tt)+), "`") - } - } - - impl $crate::parse::Parse for $ident { - fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> { - let spans: $crate::custom_punctuation_repr!($($tt)+) = - $crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?; - Ok($ident(spans)) - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "parsing"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_parse_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => {}; -} - -// Not public API. -#[cfg(feature = "printing")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_to_tokens_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => { - impl $crate::__private::ToTokens for $ident { - fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) { - $crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens) - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "printing"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_to_tokens_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => {}; -} - -// Not public API. -#[cfg(feature = "clone-impls")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_clone_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => { - impl $crate::__private::Copy for $ident {} - - #[allow(clippy::expl_impl_clone_on_copy)] - impl $crate::__private::Clone for $ident { - fn clone(&self) -> Self { - *self - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "clone-impls"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_clone_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => {}; -} - -// Not public API. -#[cfg(feature = "extra-traits")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_extra_traits_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => { - impl $crate::__private::Debug for $ident { - fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result { - $crate::__private::Formatter::write_str(f, stringify!($ident)) - } - } - - impl $crate::__private::Eq for $ident {} - - impl $crate::__private::PartialEq for $ident { - fn eq(&self, _other: &Self) -> $crate::__private::bool { - true - } - } - - impl $crate::__private::Hash for $ident { - fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {} - } - }; -} - -// Not public API. -#[cfg(not(feature = "extra-traits"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_extra_traits_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => {}; -} - -// Not public API. -#[doc(hidden)] -#[macro_export] -macro_rules! custom_punctuation_repr { - ($($tt:tt)+) => { - [$crate::__private::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+] - }; -} - -// Not public API. -#[doc(hidden)] -#[macro_export] -#[rustfmt::skip] -macro_rules! custom_punctuation_len { - ($mode:ident, +) => { 1 }; - ($mode:ident, +=) => { 2 }; - ($mode:ident, &) => { 1 }; - ($mode:ident, &&) => { 2 }; - ($mode:ident, &=) => { 2 }; - ($mode:ident, @) => { 1 }; - ($mode:ident, !) => { 1 }; - ($mode:ident, ^) => { 1 }; - ($mode:ident, ^=) => { 2 }; - ($mode:ident, :) => { 1 }; - ($mode:ident, ::) => { 2 }; - ($mode:ident, ,) => { 1 }; - ($mode:ident, /) => { 1 }; - ($mode:ident, /=) => { 2 }; - ($mode:ident, .) => { 1 }; - ($mode:ident, ..) => { 2 }; - ($mode:ident, ...) => { 3 }; - ($mode:ident, ..=) => { 3 }; - ($mode:ident, =) => { 1 }; - ($mode:ident, ==) => { 2 }; - ($mode:ident, >=) => { 2 }; - ($mode:ident, >) => { 1 }; - ($mode:ident, <=) => { 2 }; - ($mode:ident, <) => { 1 }; - ($mode:ident, *=) => { 2 }; - ($mode:ident, !=) => { 2 }; - ($mode:ident, |) => { 1 }; - ($mode:ident, |=) => { 2 }; - ($mode:ident, ||) => { 2 }; - ($mode:ident, #) => { 1 }; - ($mode:ident, ?) => { 1 }; - ($mode:ident, ->) => { 2 }; - ($mode:ident, <-) => { 2 }; - ($mode:ident, %) => { 1 }; - ($mode:ident, %=) => { 2 }; - ($mode:ident, =>) => { 2 }; - ($mode:ident, ;) => { 1 }; - ($mode:ident, <<) => { 2 }; - ($mode:ident, <<=) => { 3 }; - ($mode:ident, >>) => { 2 }; - ($mode:ident, >>=) => { 3 }; - ($mode:ident, *) => { 1 }; - ($mode:ident, -) => { 1 }; - ($mode:ident, -=) => { 2 }; - ($mode:ident, ~) => { 1 }; - (lenient, $tt:tt) => { 0 }; - (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }}; -} - -// Not public API. -#[doc(hidden)] -#[macro_export] -macro_rules! custom_punctuation_unexpected { - () => {}; -} - -// Not public API. -#[doc(hidden)] -#[macro_export] -macro_rules! stringify_punct { - ($($tt:tt)+) => { - concat!($(stringify!($tt)),+) - }; -} diff --git a/collector/compile-benchmarks/syn-1.0.89/src/data.rs b/collector/compile-benchmarks/syn-1.0.89/src/data.rs deleted file mode 100644 index 3b466618f..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/data.rs +++ /dev/null @@ -1,493 +0,0 @@ -use super::*; -use crate::punctuated::Punctuated; - -ast_struct! { - /// An enum variant. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct Variant { - /// Attributes tagged on the variant. - pub attrs: Vec, - - /// Name of the variant. - pub ident: Ident, - - /// Content stored in the variant. - pub fields: Fields, - - /// Explicit discriminant: `Variant = 1` - pub discriminant: Option<(Token![=], Expr)>, - } -} - -ast_enum_of_structs! { - /// Data stored within an enum variant or struct. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum Fields { - /// Named fields of a struct or struct variant such as `Point { x: f64, - /// y: f64 }`. - Named(FieldsNamed), - - /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`. - Unnamed(FieldsUnnamed), - - /// Unit struct or unit variant such as `None`. - Unit, - } -} - -ast_struct! { - /// Named fields of a struct or struct variant such as `Point { x: f64, - /// y: f64 }`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct FieldsNamed { - pub brace_token: token::Brace, - pub named: Punctuated, - } -} - -ast_struct! { - /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct FieldsUnnamed { - pub paren_token: token::Paren, - pub unnamed: Punctuated, - } -} - -impl Fields { - /// Get an iterator over the borrowed [`Field`] items in this object. This - /// iterator can be used to iterate over a named or unnamed struct or - /// variant's fields uniformly. - pub fn iter(&self) -> punctuated::Iter { - match self { - Fields::Unit => crate::punctuated::empty_punctuated_iter(), - Fields::Named(f) => f.named.iter(), - Fields::Unnamed(f) => f.unnamed.iter(), - } - } - - /// Get an iterator over the mutably borrowed [`Field`] items in this - /// object. This iterator can be used to iterate over a named or unnamed - /// struct or variant's fields uniformly. - pub fn iter_mut(&mut self) -> punctuated::IterMut { - match self { - Fields::Unit => crate::punctuated::empty_punctuated_iter_mut(), - Fields::Named(f) => f.named.iter_mut(), - Fields::Unnamed(f) => f.unnamed.iter_mut(), - } - } - - /// Returns the number of fields. - pub fn len(&self) -> usize { - match self { - Fields::Unit => 0, - Fields::Named(f) => f.named.len(), - Fields::Unnamed(f) => f.unnamed.len(), - } - } - - /// Returns `true` if there are zero fields. - pub fn is_empty(&self) -> bool { - match self { - Fields::Unit => true, - Fields::Named(f) => f.named.is_empty(), - Fields::Unnamed(f) => f.unnamed.is_empty(), - } - } -} - -impl IntoIterator for Fields { - type Item = Field; - type IntoIter = punctuated::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - match self { - Fields::Unit => Punctuated::::new().into_iter(), - Fields::Named(f) => f.named.into_iter(), - Fields::Unnamed(f) => f.unnamed.into_iter(), - } - } -} - -impl<'a> IntoIterator for &'a Fields { - type Item = &'a Field; - type IntoIter = punctuated::Iter<'a, Field>; - - fn into_iter(self) -> Self::IntoIter { - self.iter() - } -} - -impl<'a> IntoIterator for &'a mut Fields { - type Item = &'a mut Field; - type IntoIter = punctuated::IterMut<'a, Field>; - - fn into_iter(self) -> Self::IntoIter { - self.iter_mut() - } -} - -ast_struct! { - /// A field of a struct or enum variant. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct Field { - /// Attributes tagged on the field. - pub attrs: Vec, - - /// Visibility of the field. - pub vis: Visibility, - - /// Name of the field, if any. - /// - /// Fields of tuple structs have no names. - pub ident: Option, - - pub colon_token: Option, - - /// Type of the field. - pub ty: Type, - } -} - -ast_enum_of_structs! { - /// The visibility level of an item: inherited or `pub` or - /// `pub(restricted)`. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum Visibility { - /// A public visibility level: `pub`. - Public(VisPublic), - - /// A crate-level visibility: `crate`. - Crate(VisCrate), - - /// A visibility level restricted to some path: `pub(self)` or - /// `pub(super)` or `pub(crate)` or `pub(in some::module)`. - Restricted(VisRestricted), - - /// An inherited visibility, which usually means private. - Inherited, - } -} - -ast_struct! { - /// A public visibility level: `pub`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct VisPublic { - pub pub_token: Token![pub], - } -} - -ast_struct! { - /// A crate-level visibility: `crate`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct VisCrate { - pub crate_token: Token![crate], - } -} - -ast_struct! { - /// A visibility level restricted to some path: `pub(self)` or - /// `pub(super)` or `pub(crate)` or `pub(in some::module)`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct VisRestricted { - pub pub_token: Token![pub], - pub paren_token: token::Paren, - pub in_token: Option, - pub path: Box, - } -} - -#[cfg(feature = "parsing")] -pub mod parsing { - use super::*; - use crate::ext::IdentExt; - use crate::parse::discouraged::Speculative; - use crate::parse::{Parse, ParseStream, Result}; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for Variant { - fn parse(input: ParseStream) -> Result { - let attrs = input.call(Attribute::parse_outer)?; - let _visibility: Visibility = input.parse()?; - let ident: Ident = input.parse()?; - let fields = if input.peek(token::Brace) { - Fields::Named(input.parse()?) - } else if input.peek(token::Paren) { - Fields::Unnamed(input.parse()?) - } else { - Fields::Unit - }; - let discriminant = if input.peek(Token![=]) { - let eq_token: Token![=] = input.parse()?; - let discriminant: Expr = input.parse()?; - Some((eq_token, discriminant)) - } else { - None - }; - Ok(Variant { - attrs, - ident, - fields, - discriminant, - }) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for FieldsNamed { - fn parse(input: ParseStream) -> Result { - let content; - Ok(FieldsNamed { - brace_token: braced!(content in input), - named: content.parse_terminated(Field::parse_named)?, - }) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for FieldsUnnamed { - fn parse(input: ParseStream) -> Result { - let content; - Ok(FieldsUnnamed { - paren_token: parenthesized!(content in input), - unnamed: content.parse_terminated(Field::parse_unnamed)?, - }) - } - } - - impl Field { - /// Parses a named (braced struct) field. - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_named(input: ParseStream) -> Result { - Ok(Field { - attrs: input.call(Attribute::parse_outer)?, - vis: input.parse()?, - ident: Some(if input.peek(Token![_]) { - input.call(Ident::parse_any) - } else { - input.parse() - }?), - colon_token: Some(input.parse()?), - ty: input.parse()?, - }) - } - - /// Parses an unnamed (tuple struct) field. - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_unnamed(input: ParseStream) -> Result { - Ok(Field { - attrs: input.call(Attribute::parse_outer)?, - vis: input.parse()?, - ident: None, - colon_token: None, - ty: input.parse()?, - }) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for Visibility { - fn parse(input: ParseStream) -> Result { - // Recognize an empty None-delimited group, as produced by a $:vis - // matcher that matched no tokens. - if input.peek(token::Group) { - let ahead = input.fork(); - let group = crate::group::parse_group(&ahead)?; - if group.content.is_empty() { - input.advance_to(&ahead); - return Ok(Visibility::Inherited); - } - } - - if input.peek(Token![pub]) { - Self::parse_pub(input) - } else if input.peek(Token![crate]) { - Self::parse_crate(input) - } else { - Ok(Visibility::Inherited) - } - } - } - - impl Visibility { - fn parse_pub(input: ParseStream) -> Result { - let pub_token = input.parse::()?; - - if input.peek(token::Paren) { - let ahead = input.fork(); - - let content; - let paren_token = parenthesized!(content in ahead); - if content.peek(Token![crate]) - || content.peek(Token![self]) - || content.peek(Token![super]) - { - let path = content.call(Ident::parse_any)?; - - // Ensure there are no additional tokens within `content`. - // Without explicitly checking, we may misinterpret a tuple - // field as a restricted visibility, causing a parse error. - // e.g. `pub (crate::A, crate::B)` (Issue #720). - if content.is_empty() { - input.advance_to(&ahead); - return Ok(Visibility::Restricted(VisRestricted { - pub_token, - paren_token, - in_token: None, - path: Box::new(Path::from(path)), - })); - } - } else if content.peek(Token![in]) { - let in_token: Token![in] = content.parse()?; - let path = content.call(Path::parse_mod_style)?; - - input.advance_to(&ahead); - return Ok(Visibility::Restricted(VisRestricted { - pub_token, - paren_token, - in_token: Some(in_token), - path: Box::new(path), - })); - } - } - - Ok(Visibility::Public(VisPublic { pub_token })) - } - - fn parse_crate(input: ParseStream) -> Result { - if input.peek2(Token![::]) { - Ok(Visibility::Inherited) - } else { - Ok(Visibility::Crate(VisCrate { - crate_token: input.parse()?, - })) - } - } - - #[cfg(feature = "full")] - pub(crate) fn is_some(&self) -> bool { - match self { - Visibility::Inherited => false, - _ => true, - } - } - } -} - -#[cfg(feature = "printing")] -mod printing { - use super::*; - use crate::print::TokensOrDefault; - use proc_macro2::TokenStream; - use quote::{ToTokens, TokenStreamExt}; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for Variant { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append_all(&self.attrs); - self.ident.to_tokens(tokens); - self.fields.to_tokens(tokens); - if let Some((eq_token, disc)) = &self.discriminant { - eq_token.to_tokens(tokens); - disc.to_tokens(tokens); - } - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for FieldsNamed { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.brace_token.surround(tokens, |tokens| { - self.named.to_tokens(tokens); - }); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for FieldsUnnamed { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.paren_token.surround(tokens, |tokens| { - self.unnamed.to_tokens(tokens); - }); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for Field { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append_all(&self.attrs); - self.vis.to_tokens(tokens); - if let Some(ident) = &self.ident { - ident.to_tokens(tokens); - TokensOrDefault(&self.colon_token).to_tokens(tokens); - } - self.ty.to_tokens(tokens); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for VisPublic { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.pub_token.to_tokens(tokens); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for VisCrate { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.crate_token.to_tokens(tokens); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for VisRestricted { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.pub_token.to_tokens(tokens); - self.paren_token.surround(tokens, |tokens| { - // TODO: If we have a path which is not "self" or "super" or - // "crate", automatically add the "in" token. - self.in_token.to_tokens(tokens); - self.path.to_tokens(tokens); - }); - } - } -} diff --git a/collector/compile-benchmarks/syn-1.0.89/src/derive.rs b/collector/compile-benchmarks/syn-1.0.89/src/derive.rs deleted file mode 100644 index af9bb91b7..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/derive.rs +++ /dev/null @@ -1,274 +0,0 @@ -use super::*; -use crate::punctuated::Punctuated; - -ast_struct! { - /// Data structure sent to a `proc_macro_derive` macro. - /// - /// *This type is available only if Syn is built with the `"derive"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] - pub struct DeriveInput { - /// Attributes tagged on the whole struct or enum. - pub attrs: Vec, - - /// Visibility of the struct or enum. - pub vis: Visibility, - - /// Name of the struct or enum. - pub ident: Ident, - - /// Generics required to complete the definition. - pub generics: Generics, - - /// Data within the struct or enum. - pub data: Data, - } -} - -ast_enum_of_structs! { - /// The storage of a struct, enum or union data structure. - /// - /// *This type is available only if Syn is built with the `"derive"` feature.* - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] - pub enum Data { - /// A struct input to a `proc_macro_derive` macro. - Struct(DataStruct), - - /// An enum input to a `proc_macro_derive` macro. - Enum(DataEnum), - - /// An untagged union input to a `proc_macro_derive` macro. - Union(DataUnion), - } - - do_not_generate_to_tokens -} - -ast_struct! { - /// A struct input to a `proc_macro_derive` macro. - /// - /// *This type is available only if Syn is built with the `"derive"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] - pub struct DataStruct { - pub struct_token: Token![struct], - pub fields: Fields, - pub semi_token: Option, - } -} - -ast_struct! { - /// An enum input to a `proc_macro_derive` macro. - /// - /// *This type is available only if Syn is built with the `"derive"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] - pub struct DataEnum { - pub enum_token: Token![enum], - pub brace_token: token::Brace, - pub variants: Punctuated, - } -} - -ast_struct! { - /// An untagged union input to a `proc_macro_derive` macro. - /// - /// *This type is available only if Syn is built with the `"derive"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] - pub struct DataUnion { - pub union_token: Token![union], - pub fields: FieldsNamed, - } -} - -#[cfg(feature = "parsing")] -pub mod parsing { - use super::*; - use crate::parse::{Parse, ParseStream, Result}; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for DeriveInput { - fn parse(input: ParseStream) -> Result { - let attrs = input.call(Attribute::parse_outer)?; - let vis = input.parse::()?; - - let lookahead = input.lookahead1(); - if lookahead.peek(Token![struct]) { - let struct_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - let (where_clause, fields, semi) = data_struct(input)?; - Ok(DeriveInput { - attrs, - vis, - ident, - generics: Generics { - where_clause, - ..generics - }, - data: Data::Struct(DataStruct { - struct_token, - fields, - semi_token: semi, - }), - }) - } else if lookahead.peek(Token![enum]) { - let enum_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - let (where_clause, brace, variants) = data_enum(input)?; - Ok(DeriveInput { - attrs, - vis, - ident, - generics: Generics { - where_clause, - ..generics - }, - data: Data::Enum(DataEnum { - enum_token, - brace_token: brace, - variants, - }), - }) - } else if lookahead.peek(Token![union]) { - let union_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - let (where_clause, fields) = data_union(input)?; - Ok(DeriveInput { - attrs, - vis, - ident, - generics: Generics { - where_clause, - ..generics - }, - data: Data::Union(DataUnion { - union_token, - fields, - }), - }) - } else { - Err(lookahead.error()) - } - } - } - - pub fn data_struct( - input: ParseStream, - ) -> Result<(Option, Fields, Option)> { - let mut lookahead = input.lookahead1(); - let mut where_clause = None; - if lookahead.peek(Token![where]) { - where_clause = Some(input.parse()?); - lookahead = input.lookahead1(); - } - - if where_clause.is_none() && lookahead.peek(token::Paren) { - let fields = input.parse()?; - - lookahead = input.lookahead1(); - if lookahead.peek(Token![where]) { - where_clause = Some(input.parse()?); - lookahead = input.lookahead1(); - } - - if lookahead.peek(Token![;]) { - let semi = input.parse()?; - Ok((where_clause, Fields::Unnamed(fields), Some(semi))) - } else { - Err(lookahead.error()) - } - } else if lookahead.peek(token::Brace) { - let fields = input.parse()?; - Ok((where_clause, Fields::Named(fields), None)) - } else if lookahead.peek(Token![;]) { - let semi = input.parse()?; - Ok((where_clause, Fields::Unit, Some(semi))) - } else { - Err(lookahead.error()) - } - } - - pub fn data_enum( - input: ParseStream, - ) -> Result<( - Option, - token::Brace, - Punctuated, - )> { - let where_clause = input.parse()?; - - let content; - let brace = braced!(content in input); - let variants = content.parse_terminated(Variant::parse)?; - - Ok((where_clause, brace, variants)) - } - - pub fn data_union(input: ParseStream) -> Result<(Option, FieldsNamed)> { - let where_clause = input.parse()?; - let fields = input.parse()?; - Ok((where_clause, fields)) - } -} - -#[cfg(feature = "printing")] -mod printing { - use super::*; - use crate::attr::FilterAttrs; - use crate::print::TokensOrDefault; - use proc_macro2::TokenStream; - use quote::ToTokens; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for DeriveInput { - fn to_tokens(&self, tokens: &mut TokenStream) { - for attr in self.attrs.outer() { - attr.to_tokens(tokens); - } - self.vis.to_tokens(tokens); - match &self.data { - Data::Struct(d) => d.struct_token.to_tokens(tokens), - Data::Enum(d) => d.enum_token.to_tokens(tokens), - Data::Union(d) => d.union_token.to_tokens(tokens), - } - self.ident.to_tokens(tokens); - self.generics.to_tokens(tokens); - match &self.data { - Data::Struct(data) => match &data.fields { - Fields::Named(fields) => { - self.generics.where_clause.to_tokens(tokens); - fields.to_tokens(tokens); - } - Fields::Unnamed(fields) => { - fields.to_tokens(tokens); - self.generics.where_clause.to_tokens(tokens); - TokensOrDefault(&data.semi_token).to_tokens(tokens); - } - Fields::Unit => { - self.generics.where_clause.to_tokens(tokens); - TokensOrDefault(&data.semi_token).to_tokens(tokens); - } - }, - Data::Enum(data) => { - self.generics.where_clause.to_tokens(tokens); - data.brace_token.surround(tokens, |tokens| { - data.variants.to_tokens(tokens); - }); - } - Data::Union(data) => { - self.generics.where_clause.to_tokens(tokens); - data.fields.to_tokens(tokens); - } - } - } - } -} diff --git a/collector/compile-benchmarks/syn-1.0.89/src/discouraged.rs b/collector/compile-benchmarks/syn-1.0.89/src/discouraged.rs deleted file mode 100644 index a46129b6a..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/discouraged.rs +++ /dev/null @@ -1,194 +0,0 @@ -//! Extensions to the parsing API with niche applicability. - -use super::*; - -/// Extensions to the `ParseStream` API to support speculative parsing. -pub trait Speculative { - /// Advance this parse stream to the position of a forked parse stream. - /// - /// This is the opposite operation to [`ParseStream::fork`]. You can fork a - /// parse stream, perform some speculative parsing, then join the original - /// stream to the fork to "commit" the parsing from the fork to the main - /// stream. - /// - /// If you can avoid doing this, you should, as it limits the ability to - /// generate useful errors. That said, it is often the only way to parse - /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem - /// is that when the fork fails to parse an `A`, it's impossible to tell - /// whether that was because of a syntax error and the user meant to provide - /// an `A`, or that the `A`s are finished and it's time to start parsing - /// `B`s. Use with care. - /// - /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by - /// parsing `B*` and removing the leading members of `A` from the - /// repetition, bypassing the need to involve the downsides associated with - /// speculative parsing. - /// - /// [`ParseStream::fork`]: ParseBuffer::fork - /// - /// # Example - /// - /// There has been chatter about the possibility of making the colons in the - /// turbofish syntax like `path::to::` no longer required by accepting - /// `path::to` in expression position. Specifically, according to [RFC - /// 2544], [`PathSegment`] parsing should always try to consume a following - /// `<` token as the start of generic arguments, and reset to the `<` if - /// that fails (e.g. the token is acting as a less-than operator). - /// - /// This is the exact kind of parsing behavior which requires the "fork, - /// try, commit" behavior that [`ParseStream::fork`] discourages. With - /// `advance_to`, we can avoid having to parse the speculatively parsed - /// content a second time. - /// - /// This change in behavior can be implemented in syn by replacing just the - /// `Parse` implementation for `PathSegment`: - /// - /// ``` - /// # use syn::ext::IdentExt; - /// use syn::parse::discouraged::Speculative; - /// # use syn::parse::{Parse, ParseStream}; - /// # use syn::{Ident, PathArguments, Result, Token}; - /// - /// pub struct PathSegment { - /// pub ident: Ident, - /// pub arguments: PathArguments, - /// } - /// # - /// # impl From for PathSegment - /// # where - /// # T: Into, - /// # { - /// # fn from(ident: T) -> Self { - /// # PathSegment { - /// # ident: ident.into(), - /// # arguments: PathArguments::None, - /// # } - /// # } - /// # } - /// - /// impl Parse for PathSegment { - /// fn parse(input: ParseStream) -> Result { - /// if input.peek(Token![super]) - /// || input.peek(Token![self]) - /// || input.peek(Token![Self]) - /// || input.peek(Token![crate]) - /// { - /// let ident = input.call(Ident::parse_any)?; - /// return Ok(PathSegment::from(ident)); - /// } - /// - /// let ident = input.parse()?; - /// if input.peek(Token![::]) && input.peek3(Token![<]) { - /// return Ok(PathSegment { - /// ident, - /// arguments: PathArguments::AngleBracketed(input.parse()?), - /// }); - /// } - /// if input.peek(Token![<]) && !input.peek(Token![<=]) { - /// let fork = input.fork(); - /// if let Ok(arguments) = fork.parse() { - /// input.advance_to(&fork); - /// return Ok(PathSegment { - /// ident, - /// arguments: PathArguments::AngleBracketed(arguments), - /// }); - /// } - /// } - /// Ok(PathSegment::from(ident)) - /// } - /// } - /// - /// # syn::parse_str::("a").unwrap(); - /// ``` - /// - /// # Drawbacks - /// - /// The main drawback of this style of speculative parsing is in error - /// presentation. Even if the lookahead is the "correct" parse, the error - /// that is shown is that of the "fallback" parse. To use the same example - /// as the turbofish above, take the following unfinished "turbofish": - /// - /// ```text - /// let _ = f<&'a fn(), for<'a> serde::>(); - /// ``` - /// - /// If this is parsed as generic arguments, we can provide the error message - /// - /// ```text - /// error: expected identifier - /// --> src.rs:L:C - /// | - /// L | let _ = f<&'a fn(), for<'a> serde::>(); - /// | ^ - /// ``` - /// - /// but if parsed using the above speculative parsing, it falls back to - /// assuming that the `<` is a less-than when it fails to parse the generic - /// arguments, and tries to interpret the `&'a` as the start of a labelled - /// loop, resulting in the much less helpful error - /// - /// ```text - /// error: expected `:` - /// --> src.rs:L:C - /// | - /// L | let _ = f<&'a fn(), for<'a> serde::>(); - /// | ^^ - /// ``` - /// - /// This can be mitigated with various heuristics (two examples: show both - /// forks' parse errors, or show the one that consumed more tokens), but - /// when you can control the grammar, sticking to something that can be - /// parsed LL(3) and without the LL(*) speculative parsing this makes - /// possible, displaying reasonable errors becomes much more simple. - /// - /// [RFC 2544]: https://github.com/rust-lang/rfcs/pull/2544 - /// [`PathSegment`]: crate::PathSegment - /// - /// # Performance - /// - /// This method performs a cheap fixed amount of work that does not depend - /// on how far apart the two streams are positioned. - /// - /// # Panics - /// - /// The forked stream in the argument of `advance_to` must have been - /// obtained by forking `self`. Attempting to advance to any other stream - /// will cause a panic. - fn advance_to(&self, fork: &Self); -} - -impl<'a> Speculative for ParseBuffer<'a> { - fn advance_to(&self, fork: &Self) { - if !crate::buffer::same_scope(self.cursor(), fork.cursor()) { - panic!("Fork was not derived from the advancing parse stream"); - } - - let (self_unexp, self_sp) = inner_unexpected(self); - let (fork_unexp, fork_sp) = inner_unexpected(fork); - if !Rc::ptr_eq(&self_unexp, &fork_unexp) { - match (fork_sp, self_sp) { - // Unexpected set on the fork, but not on `self`, copy it over. - (Some(span), None) => { - self_unexp.set(Unexpected::Some(span)); - } - // Unexpected unset. Use chain to propagate errors from fork. - (None, None) => { - fork_unexp.set(Unexpected::Chain(self_unexp)); - - // Ensure toplevel 'unexpected' tokens from the fork don't - // bubble up the chain by replacing the root `unexpected` - // pointer, only 'unexpected' tokens from existing group - // parsers should bubble. - fork.unexpected - .set(Some(Rc::new(Cell::new(Unexpected::None)))); - } - // Unexpected has been set on `self`. No changes needed. - (_, Some(_)) => {} - } - } - - // See comment on `cell` in the struct definition. - self.cell - .set(unsafe { mem::transmute::>(fork.cursor()) }); - } -} diff --git a/collector/compile-benchmarks/syn-1.0.89/src/error.rs b/collector/compile-benchmarks/syn-1.0.89/src/error.rs deleted file mode 100644 index b505b8947..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/error.rs +++ /dev/null @@ -1,413 +0,0 @@ -#[cfg(feature = "parsing")] -use crate::buffer::Cursor; -use crate::thread::ThreadBound; -use proc_macro2::{ - Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, -}; -#[cfg(feature = "printing")] -use quote::ToTokens; -use std::fmt::{self, Debug, Display}; -use std::iter::FromIterator; -use std::slice; -use std::vec; - -/// The result of a Syn parser. -pub type Result = std::result::Result; - -/// Error returned when a Syn parser cannot parse the input tokens. -/// -/// # Error reporting in proc macros -/// -/// The correct way to report errors back to the compiler from a procedural -/// macro is by emitting an appropriately spanned invocation of -/// [`compile_error!`] in the generated code. This produces a better diagnostic -/// message than simply panicking the macro. -/// -/// [`compile_error!`]: std::compile_error! -/// -/// When parsing macro input, the [`parse_macro_input!`] macro handles the -/// conversion to `compile_error!` automatically. -/// -/// ``` -/// # extern crate proc_macro; -/// # -/// use proc_macro::TokenStream; -/// use syn::{parse_macro_input, AttributeArgs, ItemFn}; -/// -/// # const IGNORE: &str = stringify! { -/// #[proc_macro_attribute] -/// # }; -/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream { -/// let args = parse_macro_input!(args as AttributeArgs); -/// let input = parse_macro_input!(input as ItemFn); -/// -/// /* ... */ -/// # TokenStream::new() -/// } -/// ``` -/// -/// For errors that arise later than the initial parsing stage, the -/// [`.to_compile_error()`] or [`.into_compile_error()`] methods can be used to -/// perform an explicit conversion to `compile_error!`. -/// -/// [`.to_compile_error()`]: Error::to_compile_error -/// [`.into_compile_error()`]: Error::into_compile_error -/// -/// ``` -/// # extern crate proc_macro; -/// # -/// # use proc_macro::TokenStream; -/// # use syn::{parse_macro_input, DeriveInput}; -/// # -/// # const IGNORE: &str = stringify! { -/// #[proc_macro_derive(MyDerive)] -/// # }; -/// pub fn my_derive(input: TokenStream) -> TokenStream { -/// let input = parse_macro_input!(input as DeriveInput); -/// -/// // fn(DeriveInput) -> syn::Result -/// expand::my_derive(input) -/// .unwrap_or_else(syn::Error::into_compile_error) -/// .into() -/// } -/// # -/// # mod expand { -/// # use proc_macro2::TokenStream; -/// # use syn::{DeriveInput, Result}; -/// # -/// # pub fn my_derive(input: DeriveInput) -> Result { -/// # unimplemented!() -/// # } -/// # } -/// ``` -pub struct Error { - messages: Vec, -} - -struct ErrorMessage { - // Span is implemented as an index into a thread-local interner to keep the - // size small. It is not safe to access from a different thread. We want - // errors to be Send and Sync to play nicely with the Failure crate, so pin - // the span we're given to its original thread and assume it is - // Span::call_site if accessed from any other thread. - start_span: ThreadBound, - end_span: ThreadBound, - message: String, -} - -#[cfg(test)] -struct _Test -where - Error: Send + Sync; - -impl Error { - /// Usually the [`ParseStream::error`] method will be used instead, which - /// automatically uses the correct span from the current position of the - /// parse stream. - /// - /// Use `Error::new` when the error needs to be triggered on some span other - /// than where the parse stream is currently positioned. - /// - /// [`ParseStream::error`]: crate::parse::ParseBuffer::error - /// - /// # Example - /// - /// ``` - /// use syn::{Error, Ident, LitStr, Result, Token}; - /// use syn::parse::ParseStream; - /// - /// // Parses input that looks like `name = "string"` where the key must be - /// // the identifier `name` and the value may be any string literal. - /// // Returns the string literal. - /// fn parse_name(input: ParseStream) -> Result { - /// let name_token: Ident = input.parse()?; - /// if name_token != "name" { - /// // Trigger an error not on the current position of the stream, - /// // but on the position of the unexpected identifier. - /// return Err(Error::new(name_token.span(), "expected `name`")); - /// } - /// input.parse::()?; - /// let s: LitStr = input.parse()?; - /// Ok(s) - /// } - /// ``` - pub fn new(span: Span, message: T) -> Self { - Error { - messages: vec![ErrorMessage { - start_span: ThreadBound::new(span), - end_span: ThreadBound::new(span), - message: message.to_string(), - }], - } - } - - /// Creates an error with the specified message spanning the given syntax - /// tree node. - /// - /// Unlike the `Error::new` constructor, this constructor takes an argument - /// `tokens` which is a syntax tree node. This allows the resulting `Error` - /// to attempt to span all tokens inside of `tokens`. While you would - /// typically be able to use the `Spanned` trait with the above `Error::new` - /// constructor, implementation limitations today mean that - /// `Error::new_spanned` may provide a higher-quality error message on - /// stable Rust. - /// - /// When in doubt it's recommended to stick to `Error::new` (or - /// `ParseStream::error`)! - #[cfg(feature = "printing")] - pub fn new_spanned(tokens: T, message: U) -> Self { - let mut iter = tokens.into_token_stream().into_iter(); - let start = iter.next().map_or_else(Span::call_site, |t| t.span()); - let end = iter.last().map_or(start, |t| t.span()); - Error { - messages: vec![ErrorMessage { - start_span: ThreadBound::new(start), - end_span: ThreadBound::new(end), - message: message.to_string(), - }], - } - } - - /// The source location of the error. - /// - /// Spans are not thread-safe so this function returns `Span::call_site()` - /// if called from a different thread than the one on which the `Error` was - /// originally created. - pub fn span(&self) -> Span { - let start = match self.messages[0].start_span.get() { - Some(span) => *span, - None => return Span::call_site(), - }; - let end = match self.messages[0].end_span.get() { - Some(span) => *span, - None => return Span::call_site(), - }; - start.join(end).unwrap_or(start) - } - - /// Render the error as an invocation of [`compile_error!`]. - /// - /// The [`parse_macro_input!`] macro provides a convenient way to invoke - /// this method correctly in a procedural macro. - /// - /// [`compile_error!`]: std::compile_error! - pub fn to_compile_error(&self) -> TokenStream { - self.messages - .iter() - .map(ErrorMessage::to_compile_error) - .collect() - } - - /// Render the error as an invocation of [`compile_error!`]. - /// - /// [`compile_error!`]: std::compile_error! - /// - /// # Example - /// - /// ``` - /// # extern crate proc_macro; - /// # - /// use proc_macro::TokenStream; - /// use syn::{parse_macro_input, DeriveInput, Error}; - /// - /// # const _: &str = stringify! { - /// #[proc_macro_derive(MyTrait)] - /// # }; - /// pub fn derive_my_trait(input: TokenStream) -> TokenStream { - /// let input = parse_macro_input!(input as DeriveInput); - /// my_trait::expand(input) - /// .unwrap_or_else(Error::into_compile_error) - /// .into() - /// } - /// - /// mod my_trait { - /// use proc_macro2::TokenStream; - /// use syn::{DeriveInput, Result}; - /// - /// pub(crate) fn expand(input: DeriveInput) -> Result { - /// /* ... */ - /// # unimplemented!() - /// } - /// } - /// ``` - pub fn into_compile_error(self) -> TokenStream { - self.to_compile_error() - } - - /// Add another error message to self such that when `to_compile_error()` is - /// called, both errors will be emitted together. - pub fn combine(&mut self, another: Error) { - self.messages.extend(another.messages); - } -} - -impl ErrorMessage { - fn to_compile_error(&self) -> TokenStream { - let start = self - .start_span - .get() - .cloned() - .unwrap_or_else(Span::call_site); - let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site); - - // compile_error!($message) - TokenStream::from_iter(vec![ - TokenTree::Ident(Ident::new("compile_error", start)), - TokenTree::Punct({ - let mut punct = Punct::new('!', Spacing::Alone); - punct.set_span(start); - punct - }), - TokenTree::Group({ - let mut group = Group::new(Delimiter::Brace, { - TokenStream::from_iter(vec![TokenTree::Literal({ - let mut string = Literal::string(&self.message); - string.set_span(end); - string - })]) - }); - group.set_span(end); - group - }), - ]) - } -} - -#[cfg(feature = "parsing")] -pub fn new_at(scope: Span, cursor: Cursor, message: T) -> Error { - if cursor.eof() { - Error::new(scope, format!("unexpected end of input, {}", message)) - } else { - let span = crate::buffer::open_span_of_group(cursor); - Error::new(span, message) - } -} - -#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))] -pub fn new2(start: Span, end: Span, message: T) -> Error { - Error { - messages: vec![ErrorMessage { - start_span: ThreadBound::new(start), - end_span: ThreadBound::new(end), - message: message.to_string(), - }], - } -} - -impl Debug for Error { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - if self.messages.len() == 1 { - formatter - .debug_tuple("Error") - .field(&self.messages[0]) - .finish() - } else { - formatter - .debug_tuple("Error") - .field(&self.messages) - .finish() - } - } -} - -impl Debug for ErrorMessage { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.message, formatter) - } -} - -impl Display for Error { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(&self.messages[0].message) - } -} - -impl Clone for Error { - fn clone(&self) -> Self { - Error { - messages: self.messages.clone(), - } - } -} - -impl Clone for ErrorMessage { - fn clone(&self) -> Self { - let start = self - .start_span - .get() - .cloned() - .unwrap_or_else(Span::call_site); - let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site); - ErrorMessage { - start_span: ThreadBound::new(start), - end_span: ThreadBound::new(end), - message: self.message.clone(), - } - } -} - -impl std::error::Error for Error {} - -impl From for Error { - fn from(err: LexError) -> Self { - Error::new(err.span(), "lex error") - } -} - -impl IntoIterator for Error { - type Item = Error; - type IntoIter = IntoIter; - - fn into_iter(self) -> Self::IntoIter { - IntoIter { - messages: self.messages.into_iter(), - } - } -} - -pub struct IntoIter { - messages: vec::IntoIter, -} - -impl Iterator for IntoIter { - type Item = Error; - - fn next(&mut self) -> Option { - Some(Error { - messages: vec![self.messages.next()?], - }) - } -} - -impl<'a> IntoIterator for &'a Error { - type Item = Error; - type IntoIter = Iter<'a>; - - fn into_iter(self) -> Self::IntoIter { - Iter { - messages: self.messages.iter(), - } - } -} - -pub struct Iter<'a> { - messages: slice::Iter<'a, ErrorMessage>, -} - -impl<'a> Iterator for Iter<'a> { - type Item = Error; - - fn next(&mut self) -> Option { - Some(Error { - messages: vec![self.messages.next()?.clone()], - }) - } -} - -impl Extend for Error { - fn extend>(&mut self, iter: T) { - for err in iter { - self.combine(err); - } - } -} diff --git a/collector/compile-benchmarks/syn-1.0.89/src/export.rs b/collector/compile-benchmarks/syn-1.0.89/src/export.rs deleted file mode 100644 index d3a087841..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/export.rs +++ /dev/null @@ -1,36 +0,0 @@ -pub use std::clone::Clone; -pub use std::cmp::{Eq, PartialEq}; -pub use std::default::Default; -pub use std::fmt::{self, Debug, Formatter}; -pub use std::hash::{Hash, Hasher}; -pub use std::marker::Copy; -pub use std::option::Option::{None, Some}; -pub use std::result::Result::{Err, Ok}; - -#[cfg(feature = "printing")] -pub extern crate quote; - -pub use proc_macro2::{Span, TokenStream as TokenStream2}; - -pub use crate::span::IntoSpans; - -#[cfg(all( - not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), - feature = "proc-macro" -))] -pub use proc_macro::TokenStream; - -#[cfg(feature = "printing")] -pub use quote::{ToTokens, TokenStreamExt}; - -#[allow(non_camel_case_types)] -pub type bool = help::Bool; -#[allow(non_camel_case_types)] -pub type str = help::Str; - -mod help { - pub type Bool = bool; - pub type Str = str; -} - -pub struct private(pub(crate) ()); diff --git a/collector/compile-benchmarks/syn-1.0.89/src/expr.rs b/collector/compile-benchmarks/syn-1.0.89/src/expr.rs deleted file mode 100644 index 90e383f84..000000000 --- a/collector/compile-benchmarks/syn-1.0.89/src/expr.rs +++ /dev/null @@ -1,3513 +0,0 @@ -use super::*; -use crate::punctuated::Punctuated; -#[cfg(feature = "full")] -use crate::reserved::Reserved; -use proc_macro2::{Span, TokenStream}; -#[cfg(feature = "printing")] -use quote::IdentFragment; -#[cfg(feature = "printing")] -use std::fmt::{self, Display}; -use std::hash::{Hash, Hasher}; -#[cfg(feature = "parsing")] -use std::mem; - -ast_enum_of_structs! { - /// A Rust expression. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature, but most of the variants are not available unless "full" is enabled.* - /// - /// # Syntax tree enums - /// - /// This type is a syntax tree enum. In Syn this and other syntax tree enums - /// are designed to be traversed using the following rebinding idiom. - /// - /// ``` - /// # use syn::Expr; - /// # - /// # fn example(expr: Expr) { - /// # const IGNORE: &str = stringify! { - /// let expr: Expr = /* ... */; - /// # }; - /// match expr { - /// Expr::MethodCall(expr) => { - /// /* ... */ - /// } - /// Expr::Cast(expr) => { - /// /* ... */ - /// } - /// Expr::If(expr) => { - /// /* ... */ - /// } - /// - /// /* ... */ - /// # _ => {} - /// # } - /// # } - /// ``` - /// - /// We begin with a variable `expr` of type `Expr` that has no fields - /// (because it is an enum), and by matching on it and rebinding a variable - /// with the same name `expr` we effectively imbue our variable with all of - /// the data fields provided by the variant that it turned out to be. So for - /// example above if we ended up in the `MethodCall` case then we get to use - /// `expr.receiver`, `expr.args` etc; if we ended up in the `If` case we get - /// to use `expr.cond`, `expr.then_branch`, `expr.else_branch`. - /// - /// This approach avoids repeating the variant names twice on every line. - /// - /// ``` - /// # use syn::{Expr, ExprMethodCall}; - /// # - /// # fn example(expr: Expr) { - /// // Repetitive; recommend not doing this. - /// match expr { - /// Expr::MethodCall(ExprMethodCall { method, args, .. }) => { - /// # } - /// # _ => {} - /// # } - /// # } - /// ``` - /// - /// In general, the name to which a syntax tree enum variant is bound should - /// be a suitable name for the complete syntax tree enum type. - /// - /// ``` - /// # use syn::{Expr, ExprField}; - /// # - /// # fn example(discriminant: ExprField) { - /// // Binding is called `base` which is the name I would use if I were - /// // assigning `*discriminant.base` without an `if let`. - /// if let Expr::Tuple(base) = *discriminant.base { - /// # } - /// # } - /// ``` - /// - /// A sign that you may not be choosing the right variable names is if you - /// see names getting repeated in your code, like accessing - /// `receiver.receiver` or `pat.pat` or `cond.cond`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum Expr { - /// A slice literal expression: `[a, b, c, d]`. - Array(ExprArray), - - /// An assignment expression: `a = compute()`. - Assign(ExprAssign), - - /// A compound assignment expression: `counter += 1`. - AssignOp(ExprAssignOp), - - /// An async block: `async { ... }`. - Async(ExprAsync), - - /// An await expression: `fut.await`. - Await(ExprAwait), - - /// A binary operation: `a + b`, `a * b`. - Binary(ExprBinary), - - /// A blocked scope: `{ ... }`. - Block(ExprBlock), - - /// A box expression: `box f`. - Box(ExprBox), - - /// A `break`, with an optional label to break and an optional - /// expression. - Break(ExprBreak), - - /// A function call expression: `invoke(a, b)`. - Call(ExprCall), - - /// A cast expression: `foo as f64`. - Cast(ExprCast), - - /// A closure expression: `|a, b| a + b`. - Closure(ExprClosure), - - /// A `continue`, with an optional label. - Continue(ExprContinue), - - /// Access of a named struct field (`obj.k`) or unnamed tuple struct - /// field (`obj.0`). - Field(ExprField), - - /// A for loop: `for pat in expr { ... }`. - ForLoop(ExprForLoop), - - /// An expression contained within invisible delimiters. - /// - /// This variant is important for faithfully representing the precedence - /// of expressions and is related to `None`-delimited spans in a - /// `TokenStream`. - Group(ExprGroup), - - /// An `if` expression with an optional `else` block: `if expr { ... } - /// else { ... }`. - /// - /// The `else` branch expression may only be an `If` or `Block` - /// expression, not any of the other types of expression. - If(ExprIf), - - /// A square bracketed indexing expression: `vector[2]`. - Index(ExprIndex), - - /// A `let` guard: `let Some(x) = opt`. - Let(ExprLet), - - /// A literal in place of an expression: `1`, `"foo"`. - Lit(ExprLit), - - /// Conditionless loop: `loop { ... }`. - Loop(ExprLoop), - - /// A macro invocation expression: `format!("{}", q)`. - Macro(ExprMacro), - - /// A `match` expression: `match n { Some(n) => {}, None => {} }`. - Match(ExprMatch), - - /// A method call expression: `x.foo::(a, b)`. - MethodCall(ExprMethodCall), - - /// A parenthesized expression: `(a + b)`. - Paren(ExprParen), - - /// A path like `std::mem::replace` possibly containing generic - /// parameters and a qualified self-type. - /// - /// A plain identifier like `x` is a path of length 1. - Path(ExprPath), - - /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`. - Range(ExprRange), - - /// A referencing operation: `&a` or `&mut a`. - Reference(ExprReference), - - /// An array literal constructed from one repeated element: `[0u8; N]`. - Repeat(ExprRepeat), - - /// A `return`, with an optional value to be returned. - Return(ExprReturn), - - /// A struct literal expression: `Point { x: 1, y: 1 }`. - /// - /// The `rest` provides the value of the remaining fields as in `S { a: - /// 1, b: 1, ..rest }`. - Struct(ExprStruct), - - /// A try-expression: `expr?`. - Try(ExprTry), - - /// A try block: `try { ... }`. - TryBlock(ExprTryBlock), - - /// A tuple expression: `(a, b, c, d)`. - Tuple(ExprTuple), - - /// A type ascription expression: `foo: f64`. - Type(ExprType), - - /// A unary operation: `!x`, `*x`. - Unary(ExprUnary), - - /// An unsafe block: `unsafe { ... }`. - Unsafe(ExprUnsafe), - - /// Tokens in expression position not interpreted by Syn. - Verbatim(TokenStream), - - /// A while loop: `while expr { ... }`. - While(ExprWhile), - - /// A yield expression: `yield expr`. - Yield(ExprYield), - - // The following is the only supported idiom for exhaustive matching of - // this enum. - // - // match expr { - // Expr::Array(e) => {...} - // Expr::Assign(e) => {...} - // ... - // Expr::Yield(e) => {...} - // - // #[cfg(test)] - // Expr::__TestExhaustive(_) => unimplemented!(), - // #[cfg(not(test))] - // _ => { /* some sane fallback */ } - // } - // - // This way we fail your tests but don't break your library when adding - // a variant. You will be notified by a test failure when a variant is - // added, so that you can add code to handle it, but your library will - // continue to compile and work for downstream users in the interim. - // - // Once `deny(reachable)` is available in rustc, Expr will be - // reimplemented as a non_exhaustive enum. - // https://github.com/rust-lang/rust/issues/44109#issuecomment-521781237 - #[doc(hidden)] - __TestExhaustive(crate::private), - } -} - -ast_struct! { - /// A slice literal expression: `[a, b, c, d]`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprArray #full { - pub attrs: Vec, - pub bracket_token: token::Bracket, - pub elems: Punctuated, - } -} - -ast_struct! { - /// An assignment expression: `a = compute()`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprAssign #full { - pub attrs: Vec, - pub left: Box, - pub eq_token: Token![=], - pub right: Box, - } -} - -ast_struct! { - /// A compound assignment expression: `counter += 1`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprAssignOp #full { - pub attrs: Vec, - pub left: Box, - pub op: BinOp, - pub right: Box, - } -} - -ast_struct! { - /// An async block: `async { ... }`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprAsync #full { - pub attrs: Vec, - pub async_token: Token![async], - pub capture: Option, - pub block: Block, - } -} - -ast_struct! { - /// An await expression: `fut.await`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprAwait #full { - pub attrs: Vec, - pub base: Box, - pub dot_token: Token![.], - pub await_token: token::Await, - } -} - -ast_struct! { - /// A binary operation: `a + b`, `a * b`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct ExprBinary { - pub attrs: Vec, - pub left: Box, - pub op: BinOp, - pub right: Box, - } -} - -ast_struct! { - /// A blocked scope: `{ ... }`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprBlock #full { - pub attrs: Vec, - pub label: Option