commit a09712605daef5b7d45f5162d7363fbf54e5a407
parent 706c0c30068f984b28a3701c575dc5bd128ec944
Author: Ben Dean-Kawamura <bdk@mozilla.com>
Date: Wed, 12 Nov 2025 22:25:18 +0000
Bug 1998236 - Prepare for toml 0.9, r=supply-chain-reviewers
Added a build hack to upgrade all 0.5 consumers to 0.9. This is in
preperation of upgrading the `ohttp` crate, which is on toml 0.9.
Added audits for the new versions. I added trusted audits for all the
toml crates. cargo depends on toml, if they trust that seems like good
grounds for us to trust it as well.
Differential Revision: https://phabricator.services.mozilla.com/D271295
Diffstat:
130 files changed, 22096 insertions(+), 7553 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
@@ -4617,7 +4617,7 @@ dependencies = [
"tinystr",
"tokio",
"tokio-util",
- "toml",
+ "toml 0.5.999",
"tracing",
"unic-langid",
"unic-langid-impl",
@@ -4818,7 +4818,7 @@ dependencies = [
"serde_derive",
"strum",
"thiserror 2.0.12",
- "toml",
+ "toml 0.5.999",
]
[[package]]
@@ -5005,7 +5005,7 @@ dependencies = [
"pkcs11-bindings",
"serde",
"serde_derive",
- "toml",
+ "toml 0.5.999",
]
[[package]]
@@ -5179,7 +5179,7 @@ dependencies = [
"serde",
"serde_derive",
"thiserror 1.999.999",
- "toml",
+ "toml 0.5.999",
]
[[package]]
@@ -6328,6 +6328,15 @@ dependencies = [
]
[[package]]
+name = "serde_spanned"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392"
+dependencies = [
+ "serde_core",
+]
+
+[[package]]
name = "serde_urlencoded"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -6654,7 +6663,7 @@ dependencies = [
"thin-vec",
"to_shmem",
"to_shmem_derive",
- "toml",
+ "toml 0.5.999",
"uluru",
"void",
"walkdir",
@@ -7063,14 +7072,51 @@ dependencies = [
[[package]]
name = "toml"
-version = "0.5.11"
+version = "0.5.999"
+dependencies = [
+ "toml 0.9.8",
+]
+
+[[package]]
+name = "toml"
+version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"
+checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8"
dependencies = [
- "serde",
+ "indexmap",
+ "serde_core",
+ "serde_spanned",
+ "toml_datetime",
+ "toml_parser",
+ "toml_writer",
+ "winnow",
]
[[package]]
+name = "toml_datetime"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533"
+dependencies = [
+ "serde_core",
+]
+
+[[package]]
+name = "toml_parser"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e"
+dependencies = [
+ "winnow",
+]
+
+[[package]]
+name = "toml_writer"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2"
+
+[[package]]
name = "topological-sort"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -7320,7 +7366,7 @@ dependencies = [
"indexmap",
"serde",
"textwrap",
- "toml",
+ "toml 0.5.999",
"uniffi",
"uniffi_bindgen",
"uniffi_pipeline",
@@ -7354,7 +7400,7 @@ dependencies = [
"serde",
"tempfile",
"textwrap",
- "toml",
+ "toml 0.5.999",
"uniffi_internal_macros",
"uniffi_meta",
"uniffi_pipeline",
@@ -7438,7 +7484,7 @@ dependencies = [
"quote",
"serde",
"syn",
- "toml",
+ "toml 0.5.999",
"uniffi_meta",
]
@@ -8158,9 +8204,9 @@ dependencies = [
[[package]]
name = "winnow"
-version = "0.7.9"
+version = "0.7.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9fb597c990f03753e08d3c29efbfcf2019a003b4bf4ba19225c158e1549f0f3"
+checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
dependencies = [
"memchr",
]
diff --git a/Cargo.toml b/Cargo.toml
@@ -198,6 +198,9 @@ hashbrown_0_15 = { package = "hashbrown", path = "build/rust/hashbrown" }
# Patch `thiserror` 1 to 2.
thiserror = { path = "build/rust/thiserror" }
+# Patch `toml` 0.5 to 0.9
+toml = { path = "build/rust/toml" }
+
# Patch thin_vec with not-yet-published changes.
thin-vec = { path = "third_party/rust/thin-vec" }
diff --git a/build/rust/toml/Cargo.toml b/build/rust/toml/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "toml"
+version = "0.5.999"
+edition = "2018"
+license = "MIT/Apache-2.0"
+
+[lib]
+path = "lib.rs"
+
+[dependencies.toml]
+version = "0.9.8"
+default-features = false
+
+[features]
+default = ["toml/default"]
+indexmap = ["toml/indexmap"]
+preserve-order = ["toml/preserve-order"]
diff --git a/build/rust/toml/lib.rs b/build/rust/toml/lib.rs
@@ -0,0 +1,5 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+pub use toml::*;
diff --git a/supply-chain/audits.toml b/supply-chain/audits.toml
@@ -5611,6 +5611,12 @@ who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-run"
delta = "0.1.9 -> 0.1.10"
+[[audits.serde_spanned]]
+who = "Ben Dean-Kawamura <bdk@mozilla.com>"
+criteria = "safe-to-deploy"
+version = "1.0.3"
+notes = "Relatively simple Serde trait implementations. No IO or unsafe code."
+
[[audits.serde_with]]
who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-deploy"
@@ -7950,6 +7956,30 @@ user-id = 1 # Alex Crichton (alexcrichton)
start = "2019-05-16"
end = "2024-05-06"
+[[trusted.toml]]
+criteria = "safe-to-deploy"
+user-id = 6743 # Ed Page (epage)
+start = "2022-12-14"
+end = "2026-11-12"
+
+[[trusted.toml_datetime]]
+criteria = "safe-to-deploy"
+user-id = 6743 # Ed Page (epage)
+start = "2022-10-21"
+end = "2026-11-12"
+
+[[trusted.toml_parser]]
+criteria = "safe-to-deploy"
+user-id = 6743 # Ed Page (epage)
+start = "2025-07-08"
+end = "2026-11-12"
+
+[[trusted.toml_writer]]
+criteria = "safe-to-deploy"
+user-id = 6743 # Ed Page (epage)
+start = "2025-07-08"
+end = "2026-11-12"
+
[[trusted.unicode-ident]]
criteria = "safe-to-deploy"
user-id = 3618 # David Tolnay (dtolnay)
diff --git a/supply-chain/imports.lock b/supply-chain/imports.lock
@@ -735,11 +735,32 @@ user-login = "Darksonn"
user-name = "Alice Ryhl"
[[publisher.toml]]
-version = "0.5.7"
-when = "2020-10-11"
-user-id = 1
-user-login = "alexcrichton"
-user-name = "Alex Crichton"
+version = "0.9.8"
+when = "2025-10-09"
+user-id = 6743
+user-login = "epage"
+user-name = "Ed Page"
+
+[[publisher.toml_datetime]]
+version = "0.7.3"
+when = "2025-10-09"
+user-id = 6743
+user-login = "epage"
+user-name = "Ed Page"
+
+[[publisher.toml_parser]]
+version = "1.0.4"
+when = "2025-10-09"
+user-id = 6743
+user-login = "epage"
+user-name = "Ed Page"
+
+[[publisher.toml_writer]]
+version = "1.0.4"
+when = "2025-10-09"
+user-id = 6743
+user-login = "epage"
+user-name = "Ed Page"
[[publisher.unicode-ident]]
version = "1.0.6"
@@ -944,8 +965,8 @@ user-login = "kennykerr"
user-name = "Kenny Kerr"
[[publisher.winnow]]
-version = "0.7.9"
-when = "2025-05-02"
+version = "0.7.13"
+when = "2025-08-22"
user-id = 6743
user-login = "epage"
user-name = "Ed Page"
diff --git a/third_party/rust/serde_spanned/.cargo-checksum.json b/third_party/rust/serde_spanned/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.lock":"f904c415629658ccf733a67cc5417802dbef3fe0ae1cdaadd28a4f92689a2c8a","Cargo.toml":"1eafa226645f35d7d9aeb097e95306a31f8250b7d105aae69e2f5edf06ec0398","LICENSE-APACHE":"c6596eb7be8581c18be736c846fb9173b69eccf6ef94c5135893ec56bd92ba08","LICENSE-MIT":"6efb0476a1cc085077ed49357026d8c173bf33017278ef440f222fb9cbcb66e6","README.md":"dd1b53b524e4a6685a0afec2cdef745ec4da2b1eef8097c26afaf10bcae73860","src/de.rs":"d3e241d806acffbc55ea6cb3235f88c9dc44ec2f8dce3b33a26bd326923d2ec6","src/lib.rs":"9177c41606d5a98e104b7dcceabb8b341d92a40089d75ffab271fe400aefbeac","src/spanned.rs":"63ef6c841a03fd302b260b0af621db0b9677f5fb761fbeb1ac6a3877997b673a"},"package":"e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392"}
+\ No newline at end of file
diff --git a/third_party/rust/serde_spanned/Cargo.lock b/third_party/rust/serde_spanned/Cargo.lock
@@ -0,0 +1,105 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "erased-serde"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7"
+dependencies = [
+ "serde",
+ "typeid",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.101"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.225"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd6c24dee235d0da097043389623fb913daddf92c76e9f5a1db88607a0bcbd1d"
+dependencies = [
+ "serde_core",
+]
+
+[[package]]
+name = "serde-untagged"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f9faf48a4a2d2693be24c6289dbe26552776eb7737074e6722891fadbe6c5058"
+dependencies = [
+ "erased-serde",
+ "serde",
+ "serde_core",
+ "typeid",
+]
+
+[[package]]
+name = "serde_core"
+version = "1.0.225"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "659356f9a0cb1e529b24c01e43ad2bdf520ec4ceaf83047b83ddcc2251f96383"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.225"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ea936adf78b1f766949a4977b91d2f5595825bd6ec079aa9543ad2685fc4516"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_spanned"
+version = "1.0.3"
+dependencies = [
+ "serde",
+ "serde-untagged",
+ "serde_core",
+ "serde_derive",
+]
+
+[[package]]
+name = "syn"
+version = "2.0.106"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "typeid"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d"
diff --git a/third_party/rust/serde_spanned/Cargo.toml b/third_party/rust/serde_spanned/Cargo.toml
@@ -0,0 +1,190 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.76"
+name = "serde_spanned"
+version = "1.0.3"
+build = false
+include = [
+ "build.rs",
+ "src/**/*",
+ "Cargo.toml",
+ "Cargo.lock",
+ "LICENSE*",
+ "README.md",
+ "examples/**/*",
+]
+autolib = false
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
+description = "Serde-compatible spanned Value"
+readme = "README.md"
+keywords = [
+ "serde",
+ "span",
+]
+categories = [
+ "encoding",
+ "parser-implementations",
+ "parsing",
+ "config",
+]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/toml-rs/toml"
+
+[package.metadata.docs.rs]
+all-features = true
+rustdoc-args = ["--generate-link-to-definition"]
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "Unreleased"
+replace = "{{version}}"
+min = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = '\.\.\.HEAD'
+replace = "...{{tag_name}}"
+exactly = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "ReleaseDate"
+replace = "{{date}}"
+min = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "<!-- next-header -->"
+replace = """
+<!-- next-header -->
+## [Unreleased] - ReleaseDate
+"""
+exactly = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "<!-- next-url -->"
+replace = """
+<!-- next-url -->
+[Unreleased]: https://github.com/toml-rs/toml/compare/{{tag_name}}...HEAD"""
+exactly = 1
+
+[features]
+alloc = ["serde_core?/alloc"]
+default = [
+ "std",
+ "serde",
+]
+serde = ["dep:serde_core"]
+std = [
+ "alloc",
+ "serde_core?/std",
+]
+
+[lib]
+name = "serde_spanned"
+path = "src/lib.rs"
+
+[dependencies.serde_core]
+version = "1.0.225"
+optional = true
+default-features = false
+
+[dev-dependencies.serde]
+version = "1"
+
+[dev-dependencies.serde-untagged]
+version = "0.1"
+
+[dev-dependencies.serde_derive]
+version = "1"
+
+[lints.clippy]
+bool_assert_comparison = "allow"
+branches_sharing_code = "allow"
+checked_conversions = "warn"
+collapsible_else_if = "allow"
+create_dir = "warn"
+dbg_macro = "warn"
+debug_assert_with_mut_call = "warn"
+doc_markdown = "warn"
+empty_enum = "warn"
+enum_glob_use = "warn"
+expl_impl_clone_on_copy = "warn"
+explicit_deref_methods = "warn"
+explicit_into_iter_loop = "warn"
+fallible_impl_from = "warn"
+filter_map_next = "warn"
+flat_map_option = "warn"
+float_cmp_const = "warn"
+fn_params_excessive_bools = "warn"
+from_iter_instead_of_collect = "warn"
+get_first = "allow"
+if_same_then_else = "allow"
+implicit_clone = "warn"
+imprecise_flops = "warn"
+inconsistent_struct_constructor = "warn"
+inefficient_to_string = "warn"
+infinite_loop = "warn"
+invalid_upcast_comparisons = "warn"
+large_digit_groups = "warn"
+large_stack_arrays = "warn"
+large_types_passed_by_value = "warn"
+let_and_return = "allow"
+linkedlist = "warn"
+lossy_float_literal = "warn"
+macro_use_imports = "warn"
+mem_forget = "warn"
+mutex_integer = "warn"
+needless_bool = "allow"
+needless_continue = "allow"
+needless_for_each = "warn"
+negative_feature_names = "warn"
+path_buf_push_overwrite = "warn"
+ptr_as_ptr = "warn"
+rc_mutex = "warn"
+redundant_feature_names = "warn"
+ref_option_ref = "warn"
+rest_pat_in_fully_bound_structs = "warn"
+result_large_err = "allow"
+same_functions_in_if_condition = "warn"
+self_named_module_files = "warn"
+semicolon_if_nothing_returned = "warn"
+str_to_string = "warn"
+string_add = "warn"
+string_add_assign = "warn"
+string_lit_as_bytes = "warn"
+string_to_string = "warn"
+todo = "warn"
+trait_duplication_in_bounds = "warn"
+uninlined_format_args = "warn"
+use_self = "warn"
+verbose_file_reads = "warn"
+wildcard_imports = "warn"
+zero_sized_map_values = "warn"
+
+[lints.rust]
+unnameable_types = "allow"
+unreachable_pub = "warn"
+unsafe_op_in_unsafe_fn = "warn"
+unused_lifetimes = "warn"
+unused_macro_rules = "warn"
+unused_qualifications = "warn"
+
+[lints.rust.rust_2018_idioms]
+level = "warn"
+priority = -1
diff --git a/third_party/rust/serde_spanned/LICENSE-APACHE b/third_party/rust/serde_spanned/LICENSE-APACHE
@@ -0,0 +1,202 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/third_party/rust/serde_spanned/LICENSE-MIT b/third_party/rust/serde_spanned/LICENSE-MIT
@@ -0,0 +1,19 @@
+Copyright (c) Individual contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/rust/serde_spanned/README.md b/third_party/rust/serde_spanned/README.md
@@ -0,0 +1,27 @@
+# serde_spanned
+
+[](https://crates.io/crates/serde_spanned)
+[](https://docs.rs/serde_spanned)
+
+A [serde]-compatible spanned Value
+
+This allows capturing the location, in bytes, for a value in the original parsed document for
+compatible deserializers.
+
+[serde]: https://serde.rs/
+
+## License
+
+Licensed under either of
+
+* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or <https://www.apache.org/licenses/LICENSE-2.0>)
+* MIT license ([LICENSE-MIT](LICENSE-MIT) or <https://opensource.org/license/mit>)
+
+at your option.
+
+### Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally
+submitted for inclusion in the work by you, as defined in the Apache-2.0
+license, shall be dual-licensed as above, without any additional terms or
+conditions.
diff --git a/third_party/rust/serde_spanned/src/de.rs b/third_party/rust/serde_spanned/src/de.rs
@@ -0,0 +1,81 @@
+//! Deserialization support for [`Spanned`]
+
+use serde_core::de::value::BorrowedStrDeserializer;
+use serde_core::de::IntoDeserializer as _;
+
+use crate::Spanned;
+
+/// Check if deserializing a [`Spanned`]
+pub fn is_spanned(name: &'static str) -> bool {
+ crate::spanned::is_spanned(name)
+}
+
+/// Deserializer / format support for emitting [`Spanned`]
+pub struct SpannedDeserializer<'de, T, E>
+where
+ T: serde_core::de::IntoDeserializer<'de, E>,
+ E: serde_core::de::Error,
+{
+ start: Option<usize>,
+ end: Option<usize>,
+ value: Option<T>,
+ _lifetime: core::marker::PhantomData<&'de ()>,
+ _error: core::marker::PhantomData<E>,
+}
+
+impl<'de, T, E> SpannedDeserializer<'de, T, E>
+where
+ T: serde_core::de::IntoDeserializer<'de, E>,
+ E: serde_core::de::Error,
+{
+ /// Create a deserializer to emit [`Spanned`]
+ pub fn new(value: T, span: core::ops::Range<usize>) -> Self {
+ Self {
+ start: Some(span.start),
+ end: Some(span.end),
+ value: Some(value),
+ _lifetime: Default::default(),
+ _error: Default::default(),
+ }
+ }
+}
+
+impl<'de, T, E> serde_core::de::MapAccess<'de> for SpannedDeserializer<'de, T, E>
+where
+ T: serde_core::de::IntoDeserializer<'de, E>,
+ E: serde_core::de::Error,
+{
+ type Error = E;
+ fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
+ where
+ K: serde_core::de::DeserializeSeed<'de>,
+ {
+ if self.start.is_some() {
+ seed.deserialize(BorrowedStrDeserializer::new(Spanned::<T>::START_FIELD))
+ .map(Some)
+ } else if self.end.is_some() {
+ seed.deserialize(BorrowedStrDeserializer::new(Spanned::<T>::END_FIELD))
+ .map(Some)
+ } else if self.value.is_some() {
+ seed.deserialize(BorrowedStrDeserializer::new(Spanned::<T>::VALUE_FIELD))
+ .map(Some)
+ } else {
+ Ok(None)
+ }
+ }
+
+ fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::DeserializeSeed<'de>,
+ {
+ if let Some(start) = self.start.take() {
+ seed.deserialize(start.into_deserializer())
+ } else if let Some(end) = self.end.take() {
+ seed.deserialize(end.into_deserializer())
+ } else if let Some(value) = self.value.take() {
+ seed.deserialize(value.into_deserializer())
+ } else {
+ panic!("next_value_seed called before next_key_seed")
+ }
+ }
+}
diff --git a/third_party/rust/serde_spanned/src/lib.rs b/third_party/rust/serde_spanned/src/lib.rs
@@ -0,0 +1,32 @@
+//! A [serde]-compatible spanned Value
+//!
+//! This allows capturing the location, in bytes, for a value in the original parsed document for
+//! compatible deserializers.
+//!
+//! [serde]: https://serde.rs/
+
+#![cfg_attr(docsrs, feature(doc_cfg))]
+#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
+#![warn(missing_docs)]
+#![warn(clippy::std_instead_of_core)]
+#![warn(clippy::std_instead_of_alloc)]
+// Makes rustc abort compilation if there are any unsafe blocks in the crate.
+// Presence of this annotation is picked up by tools such as cargo-geiger
+// and lets them ensure that there is indeed no unsafe code as opposed to
+// something they couldn't detect (e.g. unsafe added via macro expansion, etc).
+#![forbid(unsafe_code)]
+#![warn(clippy::print_stderr)]
+#![warn(clippy::print_stdout)]
+
+#[cfg(feature = "alloc")]
+#[allow(unused_extern_crates)]
+extern crate alloc;
+
+mod spanned;
+pub use crate::spanned::Spanned;
+#[cfg(feature = "serde")]
+pub mod de;
+
+#[doc = include_str!("../README.md")]
+#[cfg(doctest)]
+pub struct ReadmeDoctests;
diff --git a/third_party/rust/serde_spanned/src/spanned.rs b/third_party/rust/serde_spanned/src/spanned.rs
@@ -0,0 +1,261 @@
+use core::cmp::Ordering;
+use core::hash::{Hash, Hasher};
+
+// Currently serde itself doesn't have a spanned type, so we map our `Spanned`
+// to a special value in the serde data model. Namely one with these special
+// fields/struct names.
+//
+// In general, supported deserializers should catch this and not literally emit
+// these strings but rather emit `Spanned` as they're intended.
+#[cfg(feature = "serde")]
+pub(crate) const NAME: &str = "$__serde_spanned_private_Spanned";
+#[cfg(feature = "serde")]
+pub(crate) const START_FIELD: &str = "$__serde_spanned_private_start";
+#[cfg(feature = "serde")]
+pub(crate) const END_FIELD: &str = "$__serde_spanned_private_end";
+#[cfg(feature = "serde")]
+pub(crate) const VALUE_FIELD: &str = "$__serde_spanned_private_value";
+#[cfg(feature = "serde")]
+pub(crate) fn is_spanned(name: &'static str) -> bool {
+ name == NAME
+}
+
+/// A spanned value, indicating the range at which it is defined in the source.
+#[derive(Clone, Debug)]
+pub struct Spanned<T> {
+ /// Byte range
+ span: core::ops::Range<usize>,
+ /// The spanned value.
+ value: T,
+}
+
+impl<T> Spanned<T> {
+ /// Create a spanned value encompassing the given byte range.
+ ///
+ /// # Example
+ ///
+ /// Transposing a `Spanned<Enum<T>>` into `Enum<Spanned<T>>`:
+ ///
+ /// ```
+ /// use serde::de::{Deserialize, Deserializer};
+ /// use serde_untagged::UntaggedEnumVisitor;
+ /// use toml::Spanned;
+ ///
+ /// pub enum Dependency {
+ /// Simple(Spanned<String>),
+ /// Detailed(Spanned<DetailedDependency>),
+ /// }
+ ///
+ /// impl<'de> Deserialize<'de> for Dependency {
+ /// fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ /// where
+ /// D: Deserializer<'de>,
+ /// {
+ /// enum DependencyKind {
+ /// Simple(String),
+ /// Detailed(DetailedDependency),
+ /// }
+ ///
+ /// impl<'de> Deserialize<'de> for DependencyKind {
+ /// fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ /// where
+ /// D: Deserializer<'de>,
+ /// {
+ /// UntaggedEnumVisitor::new()
+ /// .expecting(
+ /// "a version string like \"0.9.8\" or a \
+ /// detailed dependency like { version = \"0.9.8\" }",
+ /// )
+ /// .string(|value| Ok(DependencyKind::Simple(value.to_owned())))
+ /// .map(|value| value.deserialize().map(DependencyKind::Detailed))
+ /// .deserialize(deserializer)
+ /// }
+ /// }
+ ///
+ /// let spanned: Spanned<DependencyKind> = Deserialize::deserialize(deserializer)?;
+ /// let range = spanned.span();
+ /// Ok(match spanned.into_inner() {
+ /// DependencyKind::Simple(simple) => Dependency::Simple(Spanned::new(range, simple)),
+ /// DependencyKind::Detailed(detailed) => Dependency::Detailed(Spanned::new(range, detailed)),
+ /// })
+ /// }
+ /// }
+ /// #
+ /// # type DetailedDependency = std::collections::BTreeMap<String, String>;
+ /// ```
+ pub fn new(range: core::ops::Range<usize>, value: T) -> Self {
+ Self { span: range, value }
+ }
+
+ /// Byte range
+ pub fn span(&self) -> core::ops::Range<usize> {
+ self.span.clone()
+ }
+
+ /// Consumes the spanned value and returns the contained value.
+ pub fn into_inner(self) -> T {
+ self.value
+ }
+
+ /// Returns a reference to the contained value.
+ pub fn get_ref(&self) -> &T {
+ &self.value
+ }
+
+ /// Returns a mutable reference to the contained value.
+ pub fn get_mut(&mut self) -> &mut T {
+ &mut self.value
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<T> Spanned<T> {
+ pub(crate) const START_FIELD: &str = START_FIELD;
+ pub(crate) const END_FIELD: &str = END_FIELD;
+ pub(crate) const VALUE_FIELD: &str = VALUE_FIELD;
+}
+
+impl<T: core::fmt::Display> core::fmt::Display for Spanned<T> {
+ fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.get_ref().fmt(fmt)
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[allow(unused_qualifications)]
+impl core::borrow::Borrow<str> for Spanned<alloc::string::String> {
+ fn borrow(&self) -> &str {
+ self.get_ref()
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl core::borrow::Borrow<str> for Spanned<alloc::borrow::Cow<'_, str>> {
+ fn borrow(&self) -> &str {
+ self.get_ref()
+ }
+}
+
+impl<T> AsRef<T> for Spanned<T> {
+ fn as_ref(&self) -> &T {
+ self.get_ref()
+ }
+}
+
+impl<T> AsMut<T> for Spanned<T> {
+ fn as_mut(&mut self) -> &mut T {
+ self.get_mut()
+ }
+}
+
+impl<T: PartialEq> PartialEq for Spanned<T> {
+ fn eq(&self, other: &Self) -> bool {
+ self.value.eq(&other.value)
+ }
+}
+
+impl<T: Eq> Eq for Spanned<T> {}
+
+impl<T: Hash> Hash for Spanned<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.value.hash(state);
+ }
+}
+
+impl<T: PartialOrd> PartialOrd for Spanned<T> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.value.partial_cmp(&other.value)
+ }
+}
+
+impl<T: Ord> Ord for Spanned<T> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.value.cmp(&other.value)
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'de, T> serde_core::de::Deserialize<'de> for Spanned<T>
+where
+ T: serde_core::de::Deserialize<'de>,
+{
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde_core::de::Deserializer<'de>,
+ {
+ struct SpannedVisitor<T>(::core::marker::PhantomData<T>);
+
+ impl<'de, T> serde_core::de::Visitor<'de> for SpannedVisitor<T>
+ where
+ T: serde_core::de::Deserialize<'de>,
+ {
+ type Value = Spanned<T>;
+
+ fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ formatter.write_str("a spanned value")
+ }
+
+ fn visit_map<V>(self, mut visitor: V) -> Result<Spanned<T>, V::Error>
+ where
+ V: serde_core::de::MapAccess<'de>,
+ {
+ let mut start: Option<usize> = None;
+ let mut end: Option<usize> = None;
+ let mut value: Option<T> = None;
+ while let Some(key) = visitor.next_key()? {
+ match key {
+ START_FIELD => {
+ if start.is_some() {
+ return Err(serde_core::de::Error::duplicate_field(START_FIELD));
+ }
+ start = Some(visitor.next_value()?);
+ }
+ END_FIELD => {
+ if end.is_some() {
+ return Err(serde_core::de::Error::duplicate_field(END_FIELD));
+ }
+ end = Some(visitor.next_value()?);
+ }
+ VALUE_FIELD => {
+ if value.is_some() {
+ return Err(serde_core::de::Error::duplicate_field(VALUE_FIELD));
+ }
+ value = Some(visitor.next_value()?);
+ }
+ field => {
+ return Err(serde_core::de::Error::unknown_field(
+ field,
+ &[START_FIELD, END_FIELD, VALUE_FIELD],
+ ));
+ }
+ }
+ }
+ match (start, end, value) {
+ (Some(start), Some(end), Some(value)) => Ok(Spanned {
+ span: start..end,
+ value,
+ }),
+ (None, _, _) => Err(serde_core::de::Error::missing_field(START_FIELD)),
+ (_, None, _) => Err(serde_core::de::Error::missing_field(END_FIELD)),
+ (_, _, None) => Err(serde_core::de::Error::missing_field(VALUE_FIELD)),
+ }
+ }
+ }
+
+ static FIELDS: [&str; 3] = [START_FIELD, END_FIELD, VALUE_FIELD];
+
+ let visitor = SpannedVisitor(::core::marker::PhantomData);
+
+ deserializer.deserialize_struct(NAME, &FIELDS, visitor)
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<T: serde_core::ser::Serialize> serde_core::ser::Serialize for Spanned<T> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde_core::ser::Serializer,
+ {
+ self.value.serialize(serializer)
+ }
+}
diff --git a/third_party/rust/toml/.cargo-checksum.json b/third_party/rust/toml/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"f310922cd8870e9f05600a24ea667cf258fd2f9452e052c2e8c63ff9561be060","Cargo.toml":"c26985583d0c680bafc99c11b4a99f5c8e63942c63a33c17588a5e9d3b101ef4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"a915ead738822da2d74b883c7519d8e558299286bb823e26c7fdece901c3267e","examples/decode.rs":"5c1bb83eef7fdf41f7d7873db4681492ce181000669aadee8895ca21e85f0aed","examples/enum_external.rs":"360618cced5c6339afbbc8799df2c047aaeeb1fa7d840dc777c2ffd83871883e","examples/toml2json.rs":"b0256e8393f91633abaa5b5c1ce496b73efee14a81b51e4f242aae967eaf9548","src/datetime.rs":"fe76c72cc5da6bccbd75ae3c7cb6658e92877631a93af4b88dcf54be6b6aeaed","src/de.rs":"6955dc090b657169f95e899f125c7fa6a668c2ab9a07b8c1bf330854e7e37087","src/lib.rs":"d599207a47e5fef74a0eeeb12d412429c305554816a89193ebbb8242f9585681","src/macros.rs":"506f00f2c12dd973a074d3c3263b0a97288eab4bda431d76d7a2cc95c63be0e0","src/map.rs":"ffc5539c8884baea62ed0e2ea89b018797c5e2d285f778f1d0e124a69ec29ddc","src/ser.rs":"e7f1e703efc1d30b0aca55c8d3f6bd62bb163652e0680cd5aeea7bf7217ba90e","src/spanned.rs":"eb370fed2aae511150d7ece47d38c95f11519db8e0b0748524338943579fe876","src/tokens.rs":"62c38be95ac994e6ae42ff983ac3ed66d3924decfd10e929455bf9168d9a1410","src/value.rs":"185ddf60a8e85472449b03e73f7353c3374f20cc720e32cdc33c750c2871a08b","tests/enum_external_deserialize.rs":"32515d5fa80e36efed6006da6514b5054c5c903960d9d884866d608ce9d23c35"},"package":"f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"}
-\ No newline at end of file
+{"files":{"Cargo.lock":"5eebad880856e5bbdb9780d7a3afd45d7f6395fb9227fb2f35ef9cea9190a12e","Cargo.toml":"d838a0909e213ebce5daae6a7b7e761031b4f0a8bb99177150108eb04adfa7c8","LICENSE-APACHE":"c6596eb7be8581c18be736c846fb9173b69eccf6ef94c5135893ec56bd92ba08","LICENSE-MIT":"6efb0476a1cc085077ed49357026d8c173bf33017278ef440f222fb9cbcb66e6","README.md":"e601aefac55b2f026b055f87b9f0ad3f6145cb641a9e5c935b92d34787ed643f","examples/decode.rs":"5742451499c0cbe481aa90c116cc219db12b3ca5b3734a4d19b558958d97501d","examples/enum_external.rs":"3875b625e537459e3fc161eeb7b204e16282331efc0fc4a0837fe9c50646b9a5","examples/toml2json.rs":"6472dbff3bd8809aa1e66598e03a8068d997bc8afd09531c6d9da10fecacaf7e","src/de/deserializer/array.rs":"8fd74ca8f9e27d385133d3c81d2e1bd897bda5bb2a7e064da989c7f731c0cc3b","src/de/deserializer/key.rs":"8df2621359f20709b4ff0dc1abbeb9ca58578d92a3e3746f8eec5fd58a3eb8a4","src/de/deserializer/mod.rs":"ced5a182b9c34274951d04f0bf9451be1c65b1d3499ed55897d3a014092c76e2","src/de/deserializer/table.rs":"4a410c6c5ef0df6efd886a8c1806c1284b016e011c12111459116ff64272e955","src/de/deserializer/table_enum.rs":"cd969f58f3767b624879a5aed34567b52e14d0017de960a7e270d237113a79a8","src/de/deserializer/value.rs":"b40887b2ff53e5418589f089eb1718707ea459936b1686be8e34b280febd5a8a","src/de/error.rs":"528546478bbf611285729ae9105052b4e390646d6725c30634329eb3cab3f794","src/de/mod.rs":"7f1c75ffc08f6938873fe4a3b79e6586bef6feba3b3af4f47e13a8ca5fa597ba","src/de/parser/array.rs":"b9b140283f10fe93e7d644d0836e426a9eca4e5c267e7b564bc3d1fa644f2fae","src/de/parser/dearray.rs":"e4e67bb5366da643a49d1a31214fe4f9b746b5016bec439f114062dd6e3e83c8","src/de/parser/debug.rs":"d74042b87745c6e02e647b68514ab620580faeee7b00d880fff7d62fbf7b8cee","src/de/parser/detable.rs":"d6e5498a19046695d8f8447b111a632d41ab7060808753a5b1950a36e0159faa","src/de/parser/devalue.rs":"4d0fca1bdb99279032163baab25e64f8c0340fd713c237d2f860ecc4db01d247","src/de/parser/document.rs":"cf7c0194df75bdf853104ebd4cba66e368930284d803a53f28e6932c8a36bc9f","src/de/parser/inline_table.rs":"cda1f41b0d0126f447846197519c2f9be828a2e326de66f6f065ac70004af27b","src/de/parser/key.rs":"3d2059ab16af81b027b02f792ba957080d80a367ac765f30fea53dc80c1a8fe5","src/de/parser/mod.rs":"c2c82bbc61aa84f4a35820f3ba543e404c5894411bb658105864985206489d49","src/de/parser/value.rs":"b95bd3bf05060d0eed3bd3fefc0109ecc2176543a2144a235d14a46599bbeb1c","src/lib.rs":"33e6ee691679492cd257d0eb32bab4876eb501fd18749a846c212820554c7e88","src/macros.rs":"91a1469399c446852155011f8411f50009ed23d8eb4a849419c4d048c1deb1db","src/map.rs":"e4fc73f295a7c88da6d07cd80e788b762bed5affdb0252b133317bff48130010","src/ser/document/array.rs":"134fe4b3ce5dad4af91c5971597c1c2a990c04e25ff0bc7c76e8f36708681de7","src/ser/document/array_of_tables.rs":"a74bf074e116c41634d30d10f1a2c2c32f8f82bbed3f985fcf9866be8de3d3b6","src/ser/document/buffer.rs":"e3b549ce91844732453de0b5221a89f3d301ad67cc384c8e8f058b9420bf0b24","src/ser/document/map.rs":"72171f6a8b2899b8f12b90a565771ac8cce91902ae10c91ddd09a322b1d8908b","src/ser/document/mod.rs":"f9b083034c1c51c6352ed31c1fbbd5afc9725b85ea2718698a063d1398b2e418","src/ser/document/strategy.rs":"4d1e0c1b68a5d3597d5cd35b9b4aa24eb27dce7e50972b5cfcc65e8596fcb343","src/ser/error.rs":"f58f4f60796d6fe1a17edf76fb563c7d92ac13346a9526d6695d0e67a2ae7e2f","src/ser/mod.rs":"e6a87be00c29dcfd839c06e6a6b2c69f76ee9ab41e6a7751fb96f45f13bc09bb","src/ser/style.rs":"828ab1f4c43f34ca2c8fb3ba5bbbe1987536bec3788b9a07f82f78e436f84c13","src/ser/value/array.rs":"343a0d4bc2f02701189c37567aa690307157612c8c204eebfe3974031af5e08c","src/ser/value/key.rs":"6f730c3123424c90cb1cf5e5f109ae5dfd570e173f52401c291f972a29b6e40a","src/ser/value/map.rs":"198afdc2145983888d684db61c01fd26dfbdc1e27cf2669233bf9ec9da57875f","src/ser/value/mod.rs":"ff9d8750ed3340373e9224ef0c25438284c6f6900ddee027156255f04c497e78","src/table.rs":"7aa1cead31721c4d0bc2f606ba67a463dbfc095a83490d01f08699369716d65d","src/value.rs":"fce12d46485c895b06442c536f3297894474605a1889150e4cfa4c459be28d78"},"package":"f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8"}
+\ No newline at end of file
diff --git a/third_party/rust/toml/Cargo.lock b/third_party/rust/toml/Cargo.lock
@@ -3,68 +3,426 @@
version = 3
[[package]]
+name = "aho-corasick"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "anstream"
+version = "0.6.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192"
+dependencies = [
+ "anstyle",
+ "anstyle-parse",
+ "anstyle-query",
+ "anstyle-wincon",
+ "colorchoice",
+ "is_terminal_polyfill",
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
+
+[[package]]
+name = "anstyle-parse"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
+dependencies = [
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle-query"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2"
+dependencies = [
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "anstyle-wincon"
+version = "3.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a"
+dependencies = [
+ "anstyle",
+ "once_cell_polyfill",
+ "windows-sys 0.60.2",
+]
+
+[[package]]
name = "autocfg"
-version = "1.1.0"
+version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
+
+[[package]]
+name = "bstr"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4"
+dependencies = [
+ "memchr",
+ "serde",
+]
+
+[[package]]
+name = "chrono"
+version = "0.4.42"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2"
+dependencies = [
+ "num-traits",
+]
+
+[[package]]
+name = "colorchoice"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
+dependencies = [
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
+dependencies = [
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
+
+[[package]]
+name = "either"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
+
+[[package]]
+name = "equivalent"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
+
+[[package]]
+name = "erased-serde"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7"
+dependencies = [
+ "serde",
+ "typeid",
+]
+
+[[package]]
+name = "foldhash"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb"
+
+[[package]]
+name = "globset"
+version = "0.4.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5"
+dependencies = [
+ "aho-corasick",
+ "bstr",
+ "log",
+ "regex-automata",
+ "regex-syntax",
+]
[[package]]
name = "hashbrown"
-version = "0.12.3"
+version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
+
+[[package]]
+name = "ignore"
+version = "0.4.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b"
+dependencies = [
+ "crossbeam-deque",
+ "globset",
+ "log",
+ "memchr",
+ "regex-automata",
+ "same-file",
+ "walkdir",
+ "winapi-util",
+]
+
+[[package]]
+name = "include_dir"
+version = "0.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "923d117408f1e49d914f1a379a309cffe4f18c05cf4e3d12e613a15fc81bd0dd"
+dependencies = [
+ "include_dir_macros",
+]
+
+[[package]]
+name = "include_dir_macros"
+version = "0.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7cab85a7ed0bd5f0e76d93846e0147172bed2e2d3f859bcc33a8d9699cad1a75"
+dependencies = [
+ "proc-macro2",
+ "quote",
+]
[[package]]
name = "indexmap"
-version = "1.9.2"
+version = "2.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
+checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5"
dependencies = [
- "autocfg",
+ "equivalent",
"hashbrown",
]
[[package]]
+name = "is_terminal_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
+
+[[package]]
+name = "itertools"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
+dependencies = [
+ "either",
+]
+
+[[package]]
name = "itoa"
-version = "1.0.5"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
+
+[[package]]
+name = "json-write"
+version = "0.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
+checksum = "4085027557b9a870495ab8b7b411e14576055491d7220c88b11b7e2ba198b297"
+
+[[package]]
+name = "lexarg"
+version = "0.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84373d01a60bb462a2e7caa28796669692d38ba075028a656bd626ac211062d4"
+dependencies = [
+ "lexarg-error",
+ "lexarg-parser",
+]
+
+[[package]]
+name = "lexarg-error"
+version = "0.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "178daf11ee95fa4bf31ff1af878dda4c637dc08a37aabdd63ae3683481fd13a9"
+dependencies = [
+ "lexarg-parser",
+]
+
+[[package]]
+name = "lexarg-parser"
+version = "0.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67367be9ace12a2d51c03741b5280ff3029a833c49a4ec1193223a1a8cfbc863"
+
+[[package]]
+name = "libtest-json"
+version = "0.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8f58e30343a6bfe0fb896b0bb92cf482c34ed047c637aac4b561d0b251b638c"
+dependencies = [
+ "json-write",
+]
+
+[[package]]
+name = "libtest-lexarg"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "914e11e515e22a1b2a0aac76c0615e6417d470789c2f0433a0598a5b6aae491f"
+dependencies = [
+ "lexarg",
+ "lexarg-error",
+]
+
+[[package]]
+name = "libtest2-harness"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ca907aef7f70aceecb01e88588062946dd0af0727d80a68d9a1e928dacf07c9"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "lexarg-error",
+ "lexarg-parser",
+ "libtest-json",
+ "libtest-lexarg",
+]
+
+[[package]]
+name = "libtest2-mimic"
+version = "0.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a91f49bdcba4db89919ae87029787d2102f051e5dcf635df29e3bfdf55b2a4ac"
+dependencies = [
+ "libtest-json",
+ "libtest2-harness",
+]
+
+[[package]]
+name = "log"
+version = "0.4.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
+
+[[package]]
+name = "memchr"
+version = "2.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
+
+[[package]]
+name = "normalize-line-endings"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
+
+[[package]]
+name = "num-traits"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "once_cell_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
[[package]]
name = "proc-macro2"
-version = "1.0.50"
+version = "1.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
+checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.23"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
+checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
dependencies = [
"proc-macro2",
]
[[package]]
+name = "regex-automata"
+version = "0.4.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001"
+
+[[package]]
name = "ryu"
-version = "1.0.12"
+version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
+checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
[[package]]
name = "serde"
-version = "1.0.152"
+version = "1.0.225"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
+checksum = "fd6c24dee235d0da097043389623fb913daddf92c76e9f5a1db88607a0bcbd1d"
+dependencies = [
+ "serde_core",
+ "serde_derive",
+]
+
+[[package]]
+name = "serde-untagged"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f9faf48a4a2d2693be24c6289dbe26552776eb7737074e6722891fadbe6c5058"
+dependencies = [
+ "erased-serde",
+ "serde",
+ "serde_core",
+ "typeid",
+]
+
+[[package]]
+name = "serde_core"
+version = "1.0.225"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "659356f9a0cb1e529b24c01e43ad2bdf520ec4ceaf83047b83ddcc2251f96383"
+dependencies = [
+ "serde_derive",
+]
[[package]]
name = "serde_derive"
-version = "1.0.152"
+version = "1.0.225"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
+checksum = "0ea936adf78b1f766949a4977b91d2f5595825bd6ec079aa9543ad2685fc4516"
dependencies = [
"proc-macro2",
"quote",
@@ -73,20 +431,59 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.91"
+version = "1.0.145"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
+checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
dependencies = [
"itoa",
+ "memchr",
"ryu",
"serde",
+ "serde_core",
+]
+
+[[package]]
+name = "serde_spanned"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392"
+dependencies = [
+ "serde_core",
+]
+
+[[package]]
+name = "similar"
+version = "2.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa"
+
+[[package]]
+name = "snapbox"
+version = "0.6.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96dcfc4581e3355d70ac2ee14cfdf81dce3d85c85f1ed9e2c1d3013f53b3436b"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "normalize-line-endings",
+ "similar",
+ "snapbox-macros",
+]
+
+[[package]]
+name = "snapbox-macros"
+version = "0.3.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "16569f53ca23a41bb6f62e0a5084aa1661f4814a67fa33696a79073e03a664af"
+dependencies = [
+ "anstream",
]
[[package]]
name = "syn"
-version = "1.0.107"
+version = "2.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
+checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
dependencies = [
"proc-macro2",
"quote",
@@ -95,16 +492,222 @@ dependencies = [
[[package]]
name = "toml"
-version = "0.5.11"
+version = "0.9.8"
dependencies = [
+ "anstream",
+ "anstyle",
+ "foldhash",
"indexmap",
+ "itertools",
"serde",
- "serde_derive",
+ "serde-untagged",
+ "serde_core",
"serde_json",
+ "serde_spanned",
+ "snapbox",
+ "toml-test-data",
+ "toml-test-harness",
+ "toml_datetime",
+ "toml_parser",
+ "toml_writer",
+ "walkdir",
+ "winnow",
]
[[package]]
+name = "toml-test"
+version = "2.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "36ecba8b17b5b0e3fe952335b981ce2fb477b3bff362d01d7db18d52b1b6733b"
+dependencies = [
+ "chrono",
+ "ryu",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "toml-test-data"
+version = "2.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea600d3ed690c00626705b301d30e25787300d80d9dc0b582097f4d6308599c3"
+dependencies = [
+ "include_dir",
+]
+
+[[package]]
+name = "toml-test-harness"
+version = "1.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "99a0b9e5066013f8b5ac9c5c9402605b222636e1ab115074977bf27e745e4252"
+dependencies = [
+ "ignore",
+ "libtest2-mimic",
+ "snapbox",
+ "toml-test",
+ "toml-test-data",
+]
+
+[[package]]
+name = "toml_datetime"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533"
+dependencies = [
+ "serde_core",
+]
+
+[[package]]
+name = "toml_parser"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "winnow",
+]
+
+[[package]]
+name = "toml_writer"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2"
+
+[[package]]
+name = "typeid"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c"
+
+[[package]]
name = "unicode-ident"
-version = "1.0.6"
+version = "1.0.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d"
+
+[[package]]
+name = "utf8parse"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
+
+[[package]]
+name = "walkdir"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
+dependencies = [
+ "same-file",
+ "winapi-util",
+]
+
+[[package]]
+name = "winapi-util"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
+dependencies = [
+ "windows-sys 0.61.0",
+]
+
+[[package]]
+name = "windows-link"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
+
+[[package]]
+name = "windows-link"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65"
+
+[[package]]
+name = "windows-sys"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.61.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e201184e40b2ede64bc2ea34968b28e33622acdbbf37104f0e4a33f7abe657aa"
+dependencies = [
+ "windows-link 0.2.0",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.53.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91"
+dependencies = [
+ "windows-link 0.1.3",
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
+
+[[package]]
+name = "winnow"
+version = "0.7.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
+checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
diff --git a/third_party/rust/toml/Cargo.toml b/third_party/rust/toml/Cargo.toml
@@ -10,31 +10,35 @@
# See Cargo.toml.orig for the original contents.
[package]
-edition = "2018"
-rust-version = "1.48.0"
+edition = "2021"
+rust-version = "1.76"
name = "toml"
-version = "0.5.11"
-authors = ["Alex Crichton <alex@alexcrichton.com>"]
+version = "0.9.8"
+build = false
include = [
+ "build.rs",
"src/**/*",
"Cargo.toml",
+ "Cargo.lock",
"LICENSE*",
"README.md",
"examples/**/*",
- "benches/**/*",
- "tests/**/*",
]
+autolib = false
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
description = """
A native Rust encoder and decoder of TOML-formatted files and streams. Provides
implementations of the standard Serialize/Deserialize traits for TOML data to
facilitate deserializing and serializing Rust structures.
"""
-homepage = "https://github.com/toml-rs/toml"
-documentation = "https://docs.rs/toml"
readme = "README.md"
keywords = [
"encoding",
"toml",
+ "no_std",
]
categories = [
"encoding",
@@ -42,9 +46,13 @@ categories = [
"parsing",
"config",
]
-license = "MIT/Apache-2.0"
+license = "MIT OR Apache-2.0"
repository = "https://github.com/toml-rs/toml"
+[package.metadata.docs.rs]
+all-features = true
+rustdoc-args = ["--generate-link-to-definition"]
+
[[package.metadata.release.pre-release-replacements]]
file = "CHANGELOG.md"
search = "Unreleased"
@@ -77,22 +85,230 @@ file = "CHANGELOG.md"
search = "<!-- next-url -->"
replace = """
<!-- next-url -->
-[Unreleased]: https://github.com/toml-rs/toml_edit/compare/{{tag_name}}...HEAD"""
+[Unreleased]: https://github.com/toml-rs/toml/compare/{{tag_name}}...HEAD"""
exactly = 1
+[features]
+debug = [
+ "std",
+ "toml_parser?/debug",
+ "dep:anstream",
+ "dep:anstyle",
+]
+default = [
+ "std",
+ "serde",
+ "parse",
+ "display",
+]
+display = ["dep:toml_writer"]
+fast_hash = [
+ "preserve_order",
+ "dep:foldhash",
+]
+parse = [
+ "dep:toml_parser",
+ "dep:winnow",
+]
+preserve_order = [
+ "dep:indexmap",
+ "std",
+]
+serde = [
+ "dep:serde_core",
+ "toml_datetime/serde",
+ "serde_spanned/serde",
+]
+std = [
+ "indexmap?/std",
+ "serde_core?/std",
+ "toml_parser?/std",
+ "toml_writer?/std",
+ "toml_datetime/std",
+ "serde_spanned/std",
+]
+unbounded = []
+
+[lib]
+name = "toml"
+path = "src/lib.rs"
+
+[[example]]
+name = "decode"
+path = "examples/decode.rs"
+required-features = [
+ "parse",
+ "display",
+ "serde",
+]
+
+[[example]]
+name = "enum_external"
+path = "examples/enum_external.rs"
+required-features = [
+ "parse",
+ "display",
+ "serde",
+]
+
+[[example]]
+name = "toml2json"
+path = "examples/toml2json.rs"
+required-features = [
+ "parse",
+ "display",
+ "serde",
+]
+
+[dependencies.anstream]
+version = "0.6.20"
+optional = true
+
+[dependencies.anstyle]
+version = "1.0.11"
+optional = true
+
+[dependencies.foldhash]
+version = "0.2.0"
+optional = true
+default-features = false
+
[dependencies.indexmap]
-version = "1.0"
+version = "2.11.4"
optional = true
+default-features = false
+
+[dependencies.serde_core]
+version = "1.0.225"
+features = ["alloc"]
+optional = true
+default-features = false
+
+[dependencies.serde_spanned]
+version = "1.0.3"
+features = ["alloc"]
+default-features = false
-[dependencies.serde]
-version = "1.0.97"
+[dependencies.toml_datetime]
+version = "0.7.3"
+features = ["alloc"]
+default-features = false
-[dev-dependencies.serde_derive]
-version = "1.0"
+[dependencies.toml_parser]
+version = "1.0.4"
+features = ["alloc"]
+optional = true
+default-features = false
+
+[dependencies.toml_writer]
+version = "1.0.4"
+features = ["alloc"]
+optional = true
+default-features = false
+
+[dependencies.winnow]
+version = "0.7.13"
+optional = true
+default-features = false
+
+[dev-dependencies.itertools]
+version = "0.14.0"
+
+[dev-dependencies.serde]
+version = "1.0.225"
+features = ["derive"]
+
+[dev-dependencies.serde-untagged]
+version = "0.1.9"
[dev-dependencies.serde_json]
-version = "1.0"
+version = "1.0.145"
-[features]
-default = []
-preserve_order = ["indexmap"]
+[dev-dependencies.snapbox]
+version = "0.6.21"
+
+[dev-dependencies.toml-test-data]
+version = "2.3.3"
+
+[dev-dependencies.toml-test-harness]
+version = "1.3.3"
+features = ["snapshot"]
+
+[dev-dependencies.walkdir]
+version = "2.5.0"
+
+[lints.clippy]
+bool_assert_comparison = "allow"
+branches_sharing_code = "allow"
+checked_conversions = "warn"
+collapsible_else_if = "allow"
+create_dir = "warn"
+dbg_macro = "warn"
+debug_assert_with_mut_call = "warn"
+doc_markdown = "warn"
+empty_enum = "warn"
+enum_glob_use = "warn"
+expl_impl_clone_on_copy = "warn"
+explicit_deref_methods = "warn"
+explicit_into_iter_loop = "warn"
+fallible_impl_from = "warn"
+filter_map_next = "warn"
+flat_map_option = "warn"
+float_cmp_const = "warn"
+fn_params_excessive_bools = "warn"
+from_iter_instead_of_collect = "warn"
+get_first = "allow"
+if_same_then_else = "allow"
+implicit_clone = "warn"
+imprecise_flops = "warn"
+inconsistent_struct_constructor = "warn"
+inefficient_to_string = "warn"
+infinite_loop = "warn"
+invalid_upcast_comparisons = "warn"
+large_digit_groups = "warn"
+large_stack_arrays = "warn"
+large_types_passed_by_value = "warn"
+let_and_return = "allow"
+linkedlist = "warn"
+lossy_float_literal = "warn"
+macro_use_imports = "warn"
+mem_forget = "warn"
+mutex_integer = "warn"
+needless_bool = "allow"
+needless_continue = "allow"
+needless_for_each = "warn"
+negative_feature_names = "warn"
+path_buf_push_overwrite = "warn"
+ptr_as_ptr = "warn"
+rc_mutex = "warn"
+redundant_feature_names = "warn"
+ref_option_ref = "warn"
+rest_pat_in_fully_bound_structs = "warn"
+result_large_err = "allow"
+same_functions_in_if_condition = "warn"
+self_named_module_files = "warn"
+semicolon_if_nothing_returned = "warn"
+str_to_string = "warn"
+string_add = "warn"
+string_add_assign = "warn"
+string_lit_as_bytes = "warn"
+string_to_string = "warn"
+todo = "warn"
+trait_duplication_in_bounds = "warn"
+uninlined_format_args = "warn"
+use_self = "warn"
+verbose_file_reads = "warn"
+wildcard_imports = "warn"
+zero_sized_map_values = "warn"
+
+[lints.rust]
+unnameable_types = "allow"
+unreachable_pub = "warn"
+unsafe_op_in_unsafe_fn = "warn"
+unused_lifetimes = "warn"
+unused_macro_rules = "warn"
+unused_qualifications = "warn"
+
+[lints.rust.rust_2018_idioms]
+level = "warn"
+priority = -1
diff --git a/third_party/rust/toml/LICENSE-APACHE b/third_party/rust/toml/LICENSE-APACHE
@@ -1,201 +1,202 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/third_party/rust/toml/LICENSE-MIT b/third_party/rust/toml/LICENSE-MIT
@@ -1,25 +1,19 @@
-Copyright (c) 2014 Alex Crichton
+Copyright (c) Individual contributors
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/rust/toml/README.md b/third_party/rust/toml/README.md
@@ -5,25 +5,24 @@
A [serde]-compatible [TOML][toml] decoder and encoder for Rust.
-For format-preserving edits or finer control over output, see [toml_edit]
+For format-preserving editing or finer control over output, see [toml_edit]
[serde]: https://serde.rs/
[toml]: https://github.com/toml-lang/toml
[toml_edit]: https://docs.rs/toml_edit
-# License
+## License
-This project is licensed under either of
+Licensed under either of
- * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
- http://www.apache.org/licenses/LICENSE-2.0)
- * MIT license ([LICENSE-MIT](LICENSE-MIT) or
- http://opensource.org/licenses/MIT)
+* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or <https://www.apache.org/licenses/LICENSE-2.0>)
+* MIT license ([LICENSE-MIT](LICENSE-MIT) or <https://opensource.org/license/mit>)
at your option.
### Contribution
-Unless you explicitly state otherwise, any contribution intentionally submitted
-for inclusion in toml-rs by you, as defined in the Apache-2.0 license, shall be
-dual licensed as above, without any additional terms or conditions.
+Unless you explicitly state otherwise, any contribution intentionally
+submitted for inclusion in the work by you, as defined in the Apache-2.0
+license, shall be dual-licensed as above, without any additional terms or
+conditions.
diff --git a/third_party/rust/toml/examples/decode.rs b/third_party/rust/toml/examples/decode.rs
@@ -4,7 +4,7 @@
#![deny(warnings)]
#![allow(dead_code)]
-use serde_derive::Deserialize;
+use serde::Deserialize;
/// This is what we're going to decode into. Each field is optional, meaning
/// that it doesn't have to be present in TOML.
@@ -50,5 +50,5 @@ fn main() {
"#;
let decoded: Config = toml::from_str(toml_str).unwrap();
- println!("{:#?}", decoded);
+ println!("{decoded:#?}");
}
diff --git a/third_party/rust/toml/examples/enum_external.rs b/third_party/rust/toml/examples/enum_external.rs
@@ -4,7 +4,7 @@
#![deny(warnings)]
#![allow(dead_code)]
-use serde_derive::Deserialize;
+use serde::Deserialize;
/// This is what we're going to decode into.
#[derive(Debug, Deserialize)]
@@ -41,5 +41,5 @@ fn main() {
]"#;
let decoded: Config = toml::from_str(toml_str).unwrap();
- println!("{:#?}", decoded);
+ println!("{decoded:#?}");
}
diff --git a/third_party/rust/toml/examples/toml2json.rs b/third_party/rust/toml/examples/toml2json.rs
@@ -1,7 +1,4 @@
-#![deny(warnings)]
-
use std::env;
-use std::fs::File;
use std::io;
use std::io::prelude::*;
@@ -10,22 +7,21 @@ use toml::Value as Toml;
fn main() {
let mut args = env::args();
- let mut input = String::new();
- if args.len() > 1 {
+ let input = if args.len() > 1 {
let name = args.nth(1).unwrap();
- File::open(&name)
- .and_then(|mut f| f.read_to_string(&mut input))
- .unwrap();
+ std::fs::read_to_string(name).unwrap()
} else {
+ let mut input = String::new();
io::stdin().read_to_string(&mut input).unwrap();
- }
+ input
+ };
match input.parse() {
Ok(toml) => {
let json = convert(toml);
println!("{}", serde_json::to_string_pretty(&json).unwrap());
}
- Err(error) => println!("failed to parse TOML: {}", error),
+ Err(error) => println!("failed to parse TOML: {error}"),
}
}
diff --git a/third_party/rust/toml/src/datetime.rs b/third_party/rust/toml/src/datetime.rs
@@ -1,544 +0,0 @@
-use std::error;
-use std::fmt;
-use std::str::{self, FromStr};
-
-use serde::{de, ser};
-
-/// A parsed TOML datetime value
-///
-/// This structure is intended to represent the datetime primitive type that can
-/// be encoded into TOML documents. This type is a parsed version that contains
-/// all metadata internally.
-///
-/// Currently this type is intentionally conservative and only supports
-/// `to_string` as an accessor. Over time though it's intended that it'll grow
-/// more support!
-///
-/// Note that if you're using `Deserialize` to deserialize a TOML document, you
-/// can use this as a placeholder for where you're expecting a datetime to be
-/// specified.
-///
-/// Also note though that while this type implements `Serialize` and
-/// `Deserialize` it's only recommended to use this type with the TOML format,
-/// otherwise encoded in other formats it may look a little odd.
-///
-/// Depending on how the option values are used, this struct will correspond
-/// with one of the following four datetimes from the [TOML v1.0.0 spec]:
-///
-/// | `date` | `time` | `offset` | TOML type |
-/// | --------- | --------- | --------- | ------------------ |
-/// | `Some(_)` | `Some(_)` | `Some(_)` | [Offset Date-Time] |
-/// | `Some(_)` | `Some(_)` | `None` | [Local Date-Time] |
-/// | `Some(_)` | `None` | `None` | [Local Date] |
-/// | `None` | `Some(_)` | `None` | [Local Time] |
-///
-/// **1. Offset Date-Time**: If all the optional values are used, `Datetime`
-/// corresponds to an [Offset Date-Time]. From the TOML v1.0.0 spec:
-///
-/// > To unambiguously represent a specific instant in time, you may use an
-/// > RFC 3339 formatted date-time with offset.
-/// >
-/// > ```toml
-/// > odt1 = 1979-05-27T07:32:00Z
-/// > odt2 = 1979-05-27T00:32:00-07:00
-/// > odt3 = 1979-05-27T00:32:00.999999-07:00
-/// > ```
-/// >
-/// > For the sake of readability, you may replace the T delimiter between date
-/// > and time with a space character (as permitted by RFC 3339 section 5.6).
-/// >
-/// > ```toml
-/// > odt4 = 1979-05-27 07:32:00Z
-/// > ```
-///
-/// **2. Local Date-Time**: If `date` and `time` are given but `offset` is
-/// `None`, `Datetime` corresponds to a [Local Date-Time]. From the spec:
-///
-/// > If you omit the offset from an RFC 3339 formatted date-time, it will
-/// > represent the given date-time without any relation to an offset or
-/// > timezone. It cannot be converted to an instant in time without additional
-/// > information. Conversion to an instant, if required, is implementation-
-/// > specific.
-/// >
-/// > ```toml
-/// > ldt1 = 1979-05-27T07:32:00
-/// > ldt2 = 1979-05-27T00:32:00.999999
-/// > ```
-///
-/// **3. Local Date**: If only `date` is given, `Datetime` corresponds to a
-/// [Local Date]; see the docs for [`Date`].
-///
-/// **4. Local Time**: If only `time` is given, `Datetime` corresponds to a
-/// [Local Time]; see the docs for [`Time`].
-///
-/// [TOML v1.0.0 spec]: https://toml.io/en/v1.0.0
-/// [Offset Date-Time]: https://toml.io/en/v1.0.0#offset-date-time
-/// [Local Date-Time]: https://toml.io/en/v1.0.0#local-date-time
-/// [Local Date]: https://toml.io/en/v1.0.0#local-date
-/// [Local Time]: https://toml.io/en/v1.0.0#local-time
-#[derive(PartialEq, Eq, Clone)]
-pub struct Datetime {
- /// Optional date.
- /// Required for: *Offset Date-Time*, *Local Date-Time*, *Local Date*.
- pub date: Option<Date>,
-
- /// Optional time.
- /// Required for: *Offset Date-Time*, *Local Date-Time*, *Local Time*.
- pub time: Option<Time>,
-
- /// Optional offset.
- /// Required for: *Offset Date-Time*.
- pub offset: Option<Offset>,
-}
-
-/// Error returned from parsing a `Datetime` in the `FromStr` implementation.
-#[derive(Debug, Clone)]
-pub struct DatetimeParseError {
- _private: (),
-}
-
-// Currently serde itself doesn't have a datetime type, so we map our `Datetime`
-// to a special valid in the serde data model. Namely one with these special
-// fields/struct names.
-//
-// In general the TOML encoder/decoder will catch this and not literally emit
-// these strings but rather emit datetimes as they're intended.
-pub const FIELD: &str = "$__toml_private_datetime";
-pub const NAME: &str = "$__toml_private_Datetime";
-
-/// A parsed TOML date value
-///
-/// May be part of a [`Datetime`]. Alone, `Date` corresponds to a [Local Date].
-/// From the TOML v1.0.0 spec:
-///
-/// > If you include only the date portion of an RFC 3339 formatted date-time,
-/// > it will represent that entire day without any relation to an offset or
-/// > timezone.
-/// >
-/// > ```toml
-/// > ld1 = 1979-05-27
-/// > ```
-///
-/// [Local Date]: https://toml.io/en/v1.0.0#local-date
-#[derive(PartialEq, Eq, Clone)]
-pub struct Date {
- /// Year: four digits
- pub year: u16,
- /// Month: 1 to 12
- pub month: u8,
- /// Day: 1 to {28, 29, 30, 31} (based on month/year)
- pub day: u8,
-}
-
-/// A parsed TOML time value
-///
-/// May be part of a [`Datetime`]. Alone, `Time` corresponds to a [Local Time].
-/// From the TOML v1.0.0 spec:
-///
-/// > If you include only the time portion of an RFC 3339 formatted date-time,
-/// > it will represent that time of day without any relation to a specific
-/// > day or any offset or timezone.
-/// >
-/// > ```toml
-/// > lt1 = 07:32:00
-/// > lt2 = 00:32:00.999999
-/// > ```
-/// >
-/// > Millisecond precision is required. Further precision of fractional
-/// > seconds is implementation-specific. If the value contains greater
-/// > precision than the implementation can support, the additional precision
-/// > must be truncated, not rounded.
-///
-/// [Local Time]: https://toml.io/en/v1.0.0#local-time
-#[derive(PartialEq, Eq, Clone)]
-pub struct Time {
- /// Hour: 0 to 23
- pub hour: u8,
- /// Minute: 0 to 59
- pub minute: u8,
- /// Second: 0 to {58, 59, 60} (based on leap second rules)
- pub second: u8,
- /// Nanosecond: 0 to 999_999_999
- pub nanosecond: u32,
-}
-
-/// A parsed TOML time offset
-///
-#[derive(PartialEq, Eq, Clone)]
-pub enum Offset {
- /// > A suffix which, when applied to a time, denotes a UTC offset of 00:00;
- /// > often spoken "Zulu" from the ICAO phonetic alphabet representation of
- /// > the letter "Z". --- [RFC 3339 section 2]
- ///
- /// [RFC 3339 section 2]: https://datatracker.ietf.org/doc/html/rfc3339#section-2
- Z,
-
- /// Offset between local time and UTC
- Custom {
- /// Hours: -12 to +12
- hours: i8,
-
- /// Minutes: 0 to 59
- minutes: u8,
- },
-}
-
-impl fmt::Debug for Datetime {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Display::fmt(self, f)
- }
-}
-
-impl fmt::Display for Datetime {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- if let Some(ref date) = self.date {
- write!(f, "{}", date)?;
- }
- if let Some(ref time) = self.time {
- if self.date.is_some() {
- write!(f, "T")?;
- }
- write!(f, "{}", time)?;
- }
- if let Some(ref offset) = self.offset {
- write!(f, "{}", offset)?;
- }
- Ok(())
- }
-}
-
-impl fmt::Display for Date {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "{:04}-{:02}-{:02}", self.year, self.month, self.day)
- }
-}
-
-impl fmt::Display for Time {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "{:02}:{:02}:{:02}", self.hour, self.minute, self.second)?;
- if self.nanosecond != 0 {
- let s = format!("{:09}", self.nanosecond);
- write!(f, ".{}", s.trim_end_matches('0'))?;
- }
- Ok(())
- }
-}
-
-impl fmt::Display for Offset {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match *self {
- Offset::Z => write!(f, "Z"),
- Offset::Custom { hours, minutes } => write!(f, "{:+03}:{:02}", hours, minutes),
- }
- }
-}
-
-impl FromStr for Datetime {
- type Err = DatetimeParseError;
-
- fn from_str(date: &str) -> Result<Datetime, DatetimeParseError> {
- // Accepted formats:
- //
- // 0000-00-00T00:00:00.00Z
- // 0000-00-00T00:00:00.00
- // 0000-00-00
- // 00:00:00.00
- if date.len() < 3 {
- return Err(DatetimeParseError { _private: () });
- }
- let mut offset_allowed = true;
- let mut chars = date.chars();
-
- // First up, parse the full date if we can
- let full_date = if chars.clone().nth(2) == Some(':') {
- offset_allowed = false;
- None
- } else {
- let y1 = u16::from(digit(&mut chars)?);
- let y2 = u16::from(digit(&mut chars)?);
- let y3 = u16::from(digit(&mut chars)?);
- let y4 = u16::from(digit(&mut chars)?);
-
- match chars.next() {
- Some('-') => {}
- _ => return Err(DatetimeParseError { _private: () }),
- }
-
- let m1 = digit(&mut chars)?;
- let m2 = digit(&mut chars)?;
-
- match chars.next() {
- Some('-') => {}
- _ => return Err(DatetimeParseError { _private: () }),
- }
-
- let d1 = digit(&mut chars)?;
- let d2 = digit(&mut chars)?;
-
- let date = Date {
- year: y1 * 1000 + y2 * 100 + y3 * 10 + y4,
- month: m1 * 10 + m2,
- day: d1 * 10 + d2,
- };
-
- if date.month < 1 || date.month > 12 {
- return Err(DatetimeParseError { _private: () });
- }
- if date.day < 1 || date.day > 31 {
- return Err(DatetimeParseError { _private: () });
- }
-
- Some(date)
- };
-
- // Next parse the "partial-time" if available
- let next = chars.clone().next();
- let partial_time = if full_date.is_some()
- && (next == Some('T') || next == Some('t') || next == Some(' '))
- {
- chars.next();
- true
- } else {
- full_date.is_none()
- };
-
- let time = if partial_time {
- let h1 = digit(&mut chars)?;
- let h2 = digit(&mut chars)?;
- match chars.next() {
- Some(':') => {}
- _ => return Err(DatetimeParseError { _private: () }),
- }
- let m1 = digit(&mut chars)?;
- let m2 = digit(&mut chars)?;
- match chars.next() {
- Some(':') => {}
- _ => return Err(DatetimeParseError { _private: () }),
- }
- let s1 = digit(&mut chars)?;
- let s2 = digit(&mut chars)?;
-
- let mut nanosecond = 0;
- if chars.clone().next() == Some('.') {
- chars.next();
- let whole = chars.as_str();
-
- let mut end = whole.len();
- for (i, byte) in whole.bytes().enumerate() {
- match byte {
- b'0'..=b'9' => {
- if i < 9 {
- let p = 10_u32.pow(8 - i as u32);
- nanosecond += p * u32::from(byte - b'0');
- }
- }
- _ => {
- end = i;
- break;
- }
- }
- }
- if end == 0 {
- return Err(DatetimeParseError { _private: () });
- }
- chars = whole[end..].chars();
- }
-
- let time = Time {
- hour: h1 * 10 + h2,
- minute: m1 * 10 + m2,
- second: s1 * 10 + s2,
- nanosecond,
- };
-
- if time.hour > 24 {
- return Err(DatetimeParseError { _private: () });
- }
- if time.minute > 59 {
- return Err(DatetimeParseError { _private: () });
- }
- if time.second > 59 {
- return Err(DatetimeParseError { _private: () });
- }
- if time.nanosecond > 999_999_999 {
- return Err(DatetimeParseError { _private: () });
- }
-
- Some(time)
- } else {
- offset_allowed = false;
- None
- };
-
- // And finally, parse the offset
- let offset = if offset_allowed {
- let next = chars.clone().next();
- if next == Some('Z') || next == Some('z') {
- chars.next();
- Some(Offset::Z)
- } else if next.is_none() {
- None
- } else {
- let sign = match next {
- Some('+') => 1,
- Some('-') => -1,
- _ => return Err(DatetimeParseError { _private: () }),
- };
- chars.next();
- let h1 = digit(&mut chars)? as i8;
- let h2 = digit(&mut chars)? as i8;
- match chars.next() {
- Some(':') => {}
- _ => return Err(DatetimeParseError { _private: () }),
- }
- let m1 = digit(&mut chars)?;
- let m2 = digit(&mut chars)?;
-
- Some(Offset::Custom {
- hours: sign * (h1 * 10 + h2),
- minutes: m1 * 10 + m2,
- })
- }
- } else {
- None
- };
-
- // Return an error if we didn't hit eof, otherwise return our parsed
- // date
- if chars.next().is_some() {
- return Err(DatetimeParseError { _private: () });
- }
-
- Ok(Datetime {
- date: full_date,
- time,
- offset,
- })
- }
-}
-
-fn digit(chars: &mut str::Chars<'_>) -> Result<u8, DatetimeParseError> {
- match chars.next() {
- Some(c) if ('0'..='9').contains(&c) => Ok(c as u8 - b'0'),
- _ => Err(DatetimeParseError { _private: () }),
- }
-}
-
-impl ser::Serialize for Datetime {
- fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
- where
- S: ser::Serializer,
- {
- use serde::ser::SerializeStruct;
-
- let mut s = serializer.serialize_struct(NAME, 1)?;
- s.serialize_field(FIELD, &self.to_string())?;
- s.end()
- }
-}
-
-impl<'de> de::Deserialize<'de> for Datetime {
- fn deserialize<D>(deserializer: D) -> Result<Datetime, D::Error>
- where
- D: de::Deserializer<'de>,
- {
- struct DatetimeVisitor;
-
- impl<'de> de::Visitor<'de> for DatetimeVisitor {
- type Value = Datetime;
-
- fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
- formatter.write_str("a TOML datetime")
- }
-
- fn visit_map<V>(self, mut visitor: V) -> Result<Datetime, V::Error>
- where
- V: de::MapAccess<'de>,
- {
- let value = visitor.next_key::<DatetimeKey>()?;
- if value.is_none() {
- return Err(de::Error::custom("datetime key not found"));
- }
- let v: DatetimeFromString = visitor.next_value()?;
- Ok(v.value)
- }
- }
-
- static FIELDS: [&str; 1] = [FIELD];
- deserializer.deserialize_struct(NAME, &FIELDS, DatetimeVisitor)
- }
-}
-
-struct DatetimeKey;
-
-impl<'de> de::Deserialize<'de> for DatetimeKey {
- fn deserialize<D>(deserializer: D) -> Result<DatetimeKey, D::Error>
- where
- D: de::Deserializer<'de>,
- {
- struct FieldVisitor;
-
- impl<'de> de::Visitor<'de> for FieldVisitor {
- type Value = ();
-
- fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
- formatter.write_str("a valid datetime field")
- }
-
- fn visit_str<E>(self, s: &str) -> Result<(), E>
- where
- E: de::Error,
- {
- if s == FIELD {
- Ok(())
- } else {
- Err(de::Error::custom("expected field with custom name"))
- }
- }
- }
-
- deserializer.deserialize_identifier(FieldVisitor)?;
- Ok(DatetimeKey)
- }
-}
-
-pub struct DatetimeFromString {
- pub value: Datetime,
-}
-
-impl<'de> de::Deserialize<'de> for DatetimeFromString {
- fn deserialize<D>(deserializer: D) -> Result<DatetimeFromString, D::Error>
- where
- D: de::Deserializer<'de>,
- {
- struct Visitor;
-
- impl<'de> de::Visitor<'de> for Visitor {
- type Value = DatetimeFromString;
-
- fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
- formatter.write_str("string containing a datetime")
- }
-
- fn visit_str<E>(self, s: &str) -> Result<DatetimeFromString, E>
- where
- E: de::Error,
- {
- match s.parse() {
- Ok(date) => Ok(DatetimeFromString { value: date }),
- Err(e) => Err(de::Error::custom(e)),
- }
- }
- }
-
- deserializer.deserialize_str(Visitor)
- }
-}
-
-impl fmt::Display for DatetimeParseError {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- "failed to parse datetime".fmt(f)
- }
-}
-
-impl error::Error for DatetimeParseError {}
diff --git a/third_party/rust/toml/src/de.rs b/third_party/rust/toml/src/de.rs
@@ -1,2246 +0,0 @@
-//! Deserializing TOML into Rust structures.
-//!
-//! This module contains all the Serde support for deserializing TOML documents
-//! into Rust structures. Note that some top-level functions here are also
-//! provided at the top of the crate.
-
-use std::borrow::Cow;
-use std::collections::HashMap;
-use std::error;
-use std::f64;
-use std::fmt;
-use std::iter;
-use std::marker::PhantomData;
-use std::str;
-use std::vec;
-
-use serde::de;
-use serde::de::value::BorrowedStrDeserializer;
-use serde::de::IntoDeserializer;
-
-use crate::datetime;
-use crate::spanned;
-use crate::tokens::{Error as TokenError, Span, Token, Tokenizer};
-
-/// Type Alias for a TOML Table pair
-type TablePair<'a> = ((Span, Cow<'a, str>), Value<'a>);
-
-/// Deserializes a byte slice into a type.
-///
-/// This function will attempt to interpret `bytes` as UTF-8 data and then
-/// deserialize `T` from the TOML document provided.
-pub fn from_slice<'de, T>(bytes: &'de [u8]) -> Result<T, Error>
-where
- T: de::Deserialize<'de>,
-{
- match str::from_utf8(bytes) {
- Ok(s) => from_str(s),
- Err(e) => Err(Error::custom(None, e.to_string())),
- }
-}
-
-/// Deserializes a string into a type.
-///
-/// This function will attempt to interpret `s` as a TOML document and
-/// deserialize `T` from the document.
-///
-/// # Examples
-///
-/// ```
-/// use serde_derive::Deserialize;
-///
-/// #[derive(Deserialize)]
-/// struct Config {
-/// title: String,
-/// owner: Owner,
-/// }
-///
-/// #[derive(Deserialize)]
-/// struct Owner {
-/// name: String,
-/// }
-///
-/// let config: Config = toml::from_str(r#"
-/// title = 'TOML Example'
-///
-/// [owner]
-/// name = 'Lisa'
-/// "#).unwrap();
-///
-/// assert_eq!(config.title, "TOML Example");
-/// assert_eq!(config.owner.name, "Lisa");
-/// ```
-pub fn from_str<'de, T>(s: &'de str) -> Result<T, Error>
-where
- T: de::Deserialize<'de>,
-{
- let mut d = Deserializer::new(s);
- let ret = T::deserialize(&mut d)?;
- d.end()?;
- Ok(ret)
-}
-
-/// Errors that can occur when deserializing a type.
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct Error {
- inner: Box<ErrorInner>,
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-struct ErrorInner {
- kind: ErrorKind,
- line: Option<usize>,
- col: usize,
- at: Option<usize>,
- message: String,
- key: Vec<String>,
-}
-
-/// Errors that can occur when deserializing a type.
-#[derive(Debug, PartialEq, Eq, Clone)]
-#[non_exhaustive]
-enum ErrorKind {
- /// EOF was reached when looking for a value
- UnexpectedEof,
-
- /// An invalid character not allowed in a string was found
- InvalidCharInString(char),
-
- /// An invalid character was found as an escape
- InvalidEscape(char),
-
- /// An invalid character was found in a hex escape
- InvalidHexEscape(char),
-
- /// An invalid escape value was specified in a hex escape in a string.
- ///
- /// Valid values are in the plane of unicode codepoints.
- InvalidEscapeValue(u32),
-
- /// A newline in a string was encountered when one was not allowed.
- NewlineInString,
-
- /// An unexpected character was encountered, typically when looking for a
- /// value.
- Unexpected(char),
-
- /// An unterminated string was found where EOF was found before the ending
- /// EOF mark.
- UnterminatedString,
-
- /// A newline was found in a table key.
- NewlineInTableKey,
-
- /// A number failed to parse
- NumberInvalid,
-
- /// A date or datetime was invalid
- DateInvalid,
-
- /// Wanted one sort of token, but found another.
- Wanted {
- /// Expected token type
- expected: &'static str,
- /// Actually found token type
- found: &'static str,
- },
-
- /// A duplicate table definition was found.
- DuplicateTable(String),
-
- /// A previously defined table was redefined as an array.
- RedefineAsArray,
-
- /// An empty table key was found.
- EmptyTableKey,
-
- /// Multiline strings are not allowed for key
- MultilineStringKey,
-
- /// A custom error which could be generated when deserializing a particular
- /// type.
- Custom,
-
- /// A tuple with a certain number of elements was expected but something
- /// else was found.
- ExpectedTuple(usize),
-
- /// Expected table keys to be in increasing tuple index order, but something
- /// else was found.
- ExpectedTupleIndex {
- /// Expected index.
- expected: usize,
- /// Key that was specified.
- found: String,
- },
-
- /// An empty table was expected but entries were found
- ExpectedEmptyTable,
-
- /// Dotted key attempted to extend something that is not a table.
- DottedKeyInvalidType,
-
- /// An unexpected key was encountered.
- ///
- /// Used when deserializing a struct with a limited set of fields.
- UnexpectedKeys {
- /// The unexpected keys.
- keys: Vec<String>,
- /// Keys that may be specified.
- available: &'static [&'static str],
- },
-
- /// Unquoted string was found when quoted one was expected
- UnquotedString,
-}
-
-/// Deserialization implementation for TOML.
-pub struct Deserializer<'a> {
- require_newline_after_table: bool,
- allow_duplciate_after_longer_table: bool,
- input: &'a str,
- tokens: Tokenizer<'a>,
-}
-
-impl<'de, 'b> de::Deserializer<'de> for &'b mut Deserializer<'de> {
- type Error = Error;
-
- fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- let mut tables = self.tables()?;
- let table_indices = build_table_indices(&tables);
- let table_pindices = build_table_pindices(&tables);
-
- let res = visitor.visit_map(MapVisitor {
- values: Vec::new().into_iter().peekable(),
- next_value: None,
- depth: 0,
- cur: 0,
- cur_parent: 0,
- max: tables.len(),
- table_indices: &table_indices,
- table_pindices: &table_pindices,
- tables: &mut tables,
- array: false,
- de: self,
- });
- res.map_err(|mut err| {
- // Errors originating from this library (toml), have an offset
- // attached to them already. Other errors, like those originating
- // from serde (like "missing field") or from a custom deserializer,
- // do not have offsets on them. Here, we do a best guess at their
- // location, by attributing them to the "current table" (the last
- // item in `tables`).
- err.fix_offset(|| tables.last().map(|table| table.at));
- err.fix_linecol(|at| self.to_linecol(at));
- err
- })
- }
-
- // Called when the type to deserialize is an enum, as opposed to a field in the type.
- fn deserialize_enum<V>(
- self,
- _name: &'static str,
- _variants: &'static [&'static str],
- visitor: V,
- ) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- let (value, name) = self.string_or_table()?;
- match value.e {
- E::String(val) => visitor.visit_enum(val.into_deserializer()),
- E::InlineTable(values) => {
- if values.len() != 1 {
- Err(Error::from_kind(
- Some(value.start),
- ErrorKind::Wanted {
- expected: "exactly 1 element",
- found: if values.is_empty() {
- "zero elements"
- } else {
- "more than 1 element"
- },
- },
- ))
- } else {
- visitor.visit_enum(InlineTableDeserializer {
- values: values.into_iter(),
- next_value: None,
- })
- }
- }
- E::DottedTable(_) => visitor.visit_enum(DottedTableDeserializer {
- name: name.expect("Expected table header to be passed."),
- value,
- }),
- e => Err(Error::from_kind(
- Some(value.start),
- ErrorKind::Wanted {
- expected: "string or table",
- found: e.type_name(),
- },
- )),
- }
- }
-
- fn deserialize_struct<V>(
- self,
- name: &'static str,
- fields: &'static [&'static str],
- visitor: V,
- ) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- if name == spanned::NAME && fields == [spanned::START, spanned::END, spanned::VALUE] {
- let start = 0;
- let end = self.input.len();
-
- let res = visitor.visit_map(SpannedDeserializer {
- phantom_data: PhantomData,
- start: Some(start),
- value: Some(self),
- end: Some(end),
- });
- return res;
- }
-
- self.deserialize_any(visitor)
- }
-
- serde::forward_to_deserialize_any! {
- bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
- bytes byte_buf map unit newtype_struct
- ignored_any unit_struct tuple_struct tuple option identifier
- }
-}
-
-// Builds a datastructure that allows for efficient sublinear lookups.
-// The returned HashMap contains a mapping from table header (like [a.b.c])
-// to list of tables with that precise name. The tables are being identified
-// by their index in the passed slice. We use a list as the implementation
-// uses this data structure for arrays as well as tables,
-// so if any top level [[name]] array contains multiple entries,
-// there are multiple entries in the list.
-// The lookup is performed in the `SeqAccess` implementation of `MapVisitor`.
-// The lists are ordered, which we exploit in the search code by using
-// bisection.
-fn build_table_indices<'de>(tables: &[Table<'de>]) -> HashMap<Vec<Cow<'de, str>>, Vec<usize>> {
- let mut res = HashMap::new();
- for (i, table) in tables.iter().enumerate() {
- let header = table.header.iter().map(|v| v.1.clone()).collect::<Vec<_>>();
- res.entry(header).or_insert_with(Vec::new).push(i);
- }
- res
-}
-
-// Builds a datastructure that allows for efficient sublinear lookups.
-// The returned HashMap contains a mapping from table header (like [a.b.c])
-// to list of tables whose name at least starts with the specified
-// name. So searching for [a.b] would give both [a.b.c.d] as well as [a.b.e].
-// The tables are being identified by their index in the passed slice.
-//
-// A list is used for two reasons: First, the implementation also
-// stores arrays in the same data structure and any top level array
-// of size 2 or greater creates multiple entries in the list with the
-// same shared name. Second, there can be multiple tables sharing
-// the same prefix.
-//
-// The lookup is performed in the `MapAccess` implementation of `MapVisitor`.
-// The lists are ordered, which we exploit in the search code by using
-// bisection.
-fn build_table_pindices<'de>(tables: &[Table<'de>]) -> HashMap<Vec<Cow<'de, str>>, Vec<usize>> {
- let mut res = HashMap::new();
- for (i, table) in tables.iter().enumerate() {
- let header = table.header.iter().map(|v| v.1.clone()).collect::<Vec<_>>();
- for len in 0..=header.len() {
- res.entry(header[..len].to_owned())
- .or_insert_with(Vec::new)
- .push(i);
- }
- }
- res
-}
-
-fn headers_equal<'a, 'b>(hdr_a: &[(Span, Cow<'a, str>)], hdr_b: &[(Span, Cow<'b, str>)]) -> bool {
- if hdr_a.len() != hdr_b.len() {
- return false;
- }
- hdr_a.iter().zip(hdr_b.iter()).all(|(h1, h2)| h1.1 == h2.1)
-}
-
-struct Table<'a> {
- at: usize,
- header: Vec<(Span, Cow<'a, str>)>,
- values: Option<Vec<TablePair<'a>>>,
- array: bool,
-}
-
-struct MapVisitor<'de, 'b> {
- values: iter::Peekable<vec::IntoIter<TablePair<'de>>>,
- next_value: Option<TablePair<'de>>,
- depth: usize,
- cur: usize,
- cur_parent: usize,
- max: usize,
- table_indices: &'b HashMap<Vec<Cow<'de, str>>, Vec<usize>>,
- table_pindices: &'b HashMap<Vec<Cow<'de, str>>, Vec<usize>>,
- tables: &'b mut [Table<'de>],
- array: bool,
- de: &'b mut Deserializer<'de>,
-}
-
-impl<'de, 'b> de::MapAccess<'de> for MapVisitor<'de, 'b> {
- type Error = Error;
-
- fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Error>
- where
- K: de::DeserializeSeed<'de>,
- {
- if self.cur_parent == self.max || self.cur == self.max {
- return Ok(None);
- }
-
- loop {
- assert!(self.next_value.is_none());
- if let Some((key, value)) = self.values.next() {
- let ret = seed.deserialize(StrDeserializer::spanned(key.clone()))?;
- self.next_value = Some((key, value));
- return Ok(Some(ret));
- }
-
- let next_table = {
- let prefix_stripped = self.tables[self.cur_parent].header[..self.depth]
- .iter()
- .map(|v| v.1.clone())
- .collect::<Vec<_>>();
- self.table_pindices
- .get(&prefix_stripped)
- .and_then(|entries| {
- let start = entries.binary_search(&self.cur).unwrap_or_else(|v| v);
- if start == entries.len() || entries[start] < self.cur {
- return None;
- }
- entries[start..]
- .iter()
- .filter_map(|i| if *i < self.max { Some(*i) } else { None })
- .map(|i| (i, &self.tables[i]))
- .find(|(_, table)| table.values.is_some())
- .map(|p| p.0)
- })
- };
-
- let pos = match next_table {
- Some(pos) => pos,
- None => return Ok(None),
- };
- self.cur = pos;
-
- // Test to see if we're duplicating our parent's table, and if so
- // then this is an error in the toml format
- if self.cur_parent != pos {
- if headers_equal(
- &self.tables[self.cur_parent].header,
- &self.tables[pos].header,
- ) {
- let at = self.tables[pos].at;
- let name = self.tables[pos]
- .header
- .iter()
- .map(|k| k.1.to_owned())
- .collect::<Vec<_>>()
- .join(".");
- return Err(self.de.error(at, ErrorKind::DuplicateTable(name)));
- }
-
- // If we're here we know we should share the same prefix, and if
- // the longer table was defined first then we want to narrow
- // down our parent's length if possible to ensure that we catch
- // duplicate tables defined afterwards.
- if !self.de.allow_duplciate_after_longer_table {
- let parent_len = self.tables[self.cur_parent].header.len();
- let cur_len = self.tables[pos].header.len();
- if cur_len < parent_len {
- self.cur_parent = pos;
- }
- }
- }
-
- let table = &mut self.tables[pos];
-
- // If we're not yet at the appropriate depth for this table then we
- // just next the next portion of its header and then continue
- // decoding.
- if self.depth != table.header.len() {
- let key = &table.header[self.depth];
- let key = seed.deserialize(StrDeserializer::spanned(key.clone()))?;
- return Ok(Some(key));
- }
-
- // Rule out cases like:
- //
- // [[foo.bar]]
- // [[foo]]
- if table.array {
- let kind = ErrorKind::RedefineAsArray;
- return Err(self.de.error(table.at, kind));
- }
-
- self.values = table
- .values
- .take()
- .expect("Unable to read table values")
- .into_iter()
- .peekable();
- }
- }
-
- fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Error>
- where
- V: de::DeserializeSeed<'de>,
- {
- if let Some((k, v)) = self.next_value.take() {
- match seed.deserialize(ValueDeserializer::new(v)) {
- Ok(v) => return Ok(v),
- Err(mut e) => {
- e.add_key_context(&k.1);
- return Err(e);
- }
- }
- }
-
- let array =
- self.tables[self.cur].array && self.depth == self.tables[self.cur].header.len() - 1;
- self.cur += 1;
- let res = seed.deserialize(MapVisitor {
- values: Vec::new().into_iter().peekable(),
- next_value: None,
- depth: self.depth + if array { 0 } else { 1 },
- cur_parent: self.cur - 1,
- cur: 0,
- max: self.max,
- array,
- table_indices: self.table_indices,
- table_pindices: self.table_pindices,
- tables: &mut *self.tables,
- de: &mut *self.de,
- });
- res.map_err(|mut e| {
- e.add_key_context(&self.tables[self.cur - 1].header[self.depth].1);
- e
- })
- }
-}
-
-impl<'de, 'b> de::SeqAccess<'de> for MapVisitor<'de, 'b> {
- type Error = Error;
-
- fn next_element_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Error>
- where
- K: de::DeserializeSeed<'de>,
- {
- assert!(self.next_value.is_none());
- assert!(self.values.next().is_none());
-
- if self.cur_parent == self.max {
- return Ok(None);
- }
-
- let header_stripped = self.tables[self.cur_parent]
- .header
- .iter()
- .map(|v| v.1.clone())
- .collect::<Vec<_>>();
- let start_idx = self.cur_parent + 1;
- let next = self
- .table_indices
- .get(&header_stripped)
- .and_then(|entries| {
- let start = entries.binary_search(&start_idx).unwrap_or_else(|v| v);
- if start == entries.len() || entries[start] < start_idx {
- return None;
- }
- entries[start..]
- .iter()
- .filter_map(|i| if *i < self.max { Some(*i) } else { None })
- .map(|i| (i, &self.tables[i]))
- .find(|(_, table)| table.array)
- .map(|p| p.0)
- })
- .unwrap_or(self.max);
-
- let ret = seed.deserialize(MapVisitor {
- values: self.tables[self.cur_parent]
- .values
- .take()
- .expect("Unable to read table values")
- .into_iter()
- .peekable(),
- next_value: None,
- depth: self.depth + 1,
- cur_parent: self.cur_parent,
- max: next,
- cur: 0,
- array: false,
- table_indices: self.table_indices,
- table_pindices: self.table_pindices,
- tables: self.tables,
- de: self.de,
- })?;
- self.cur_parent = next;
- Ok(Some(ret))
- }
-}
-
-impl<'de, 'b> de::Deserializer<'de> for MapVisitor<'de, 'b> {
- type Error = Error;
-
- fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- if self.array {
- visitor.visit_seq(self)
- } else {
- visitor.visit_map(self)
- }
- }
-
- // `None` is interpreted as a missing field so be sure to implement `Some`
- // as a present field.
- fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- visitor.visit_some(self)
- }
-
- fn deserialize_newtype_struct<V>(
- self,
- _name: &'static str,
- visitor: V,
- ) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- visitor.visit_newtype_struct(self)
- }
-
- fn deserialize_struct<V>(
- mut self,
- name: &'static str,
- fields: &'static [&'static str],
- visitor: V,
- ) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- if name == spanned::NAME
- && fields == [spanned::START, spanned::END, spanned::VALUE]
- && !(self.array && self.values.peek().is_some())
- {
- // TODO we can't actually emit spans here for the *entire* table/array
- // due to the format that toml uses. Setting the start and end to 0 is
- // *detectable* (and no reasonable span would look like that),
- // it would be better to expose this in the API via proper
- // ADTs like Option<T>.
- let start = 0;
- let end = 0;
-
- let res = visitor.visit_map(SpannedDeserializer {
- phantom_data: PhantomData,
- start: Some(start),
- value: Some(self),
- end: Some(end),
- });
- return res;
- }
-
- self.deserialize_any(visitor)
- }
-
- fn deserialize_enum<V>(
- self,
- _name: &'static str,
- _variants: &'static [&'static str],
- visitor: V,
- ) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- if self.tables.len() != 1 {
- return Err(Error::custom(
- Some(self.cur),
- "enum table must contain exactly one table".into(),
- ));
- }
- let table = &mut self.tables[0];
- let values = table.values.take().expect("table has no values?");
- if table.header.is_empty() {
- return Err(self.de.error(self.cur, ErrorKind::EmptyTableKey));
- }
- let name = table.header[table.header.len() - 1].1.to_owned();
- visitor.visit_enum(DottedTableDeserializer {
- name,
- value: Value {
- e: E::DottedTable(values),
- start: 0,
- end: 0,
- },
- })
- }
-
- serde::forward_to_deserialize_any! {
- bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
- bytes byte_buf map unit identifier
- ignored_any unit_struct tuple_struct tuple
- }
-}
-
-struct StrDeserializer<'a> {
- span: Option<Span>,
- key: Cow<'a, str>,
-}
-
-impl<'a> StrDeserializer<'a> {
- fn spanned(inner: (Span, Cow<'a, str>)) -> StrDeserializer<'a> {
- StrDeserializer {
- span: Some(inner.0),
- key: inner.1,
- }
- }
- fn new(key: Cow<'a, str>) -> StrDeserializer<'a> {
- StrDeserializer { span: None, key }
- }
-}
-
-impl<'a> de::IntoDeserializer<'a, Error> for StrDeserializer<'a> {
- type Deserializer = Self;
-
- fn into_deserializer(self) -> Self::Deserializer {
- self
- }
-}
-
-impl<'de> de::Deserializer<'de> for StrDeserializer<'de> {
- type Error = Error;
-
- fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- match self.key {
- Cow::Borrowed(s) => visitor.visit_borrowed_str(s),
- Cow::Owned(s) => visitor.visit_string(s),
- }
- }
-
- fn deserialize_struct<V>(
- self,
- name: &'static str,
- fields: &'static [&'static str],
- visitor: V,
- ) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- if name == spanned::NAME && fields == [spanned::START, spanned::END, spanned::VALUE] {
- if let Some(span) = self.span {
- return visitor.visit_map(SpannedDeserializer {
- phantom_data: PhantomData,
- start: Some(span.start),
- value: Some(StrDeserializer::new(self.key)),
- end: Some(span.end),
- });
- }
- }
- self.deserialize_any(visitor)
- }
-
- serde::forward_to_deserialize_any! {
- bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
- bytes byte_buf map option unit newtype_struct
- ignored_any unit_struct tuple_struct tuple enum identifier
- }
-}
-
-struct ValueDeserializer<'a> {
- value: Value<'a>,
- validate_struct_keys: bool,
-}
-
-impl<'a> ValueDeserializer<'a> {
- fn new(value: Value<'a>) -> ValueDeserializer<'a> {
- ValueDeserializer {
- value,
- validate_struct_keys: false,
- }
- }
-
- fn with_struct_key_validation(mut self) -> Self {
- self.validate_struct_keys = true;
- self
- }
-}
-
-impl<'de> de::Deserializer<'de> for ValueDeserializer<'de> {
- type Error = Error;
-
- fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- let start = self.value.start;
- let res = match self.value.e {
- E::Integer(i) => visitor.visit_i64(i),
- E::Boolean(b) => visitor.visit_bool(b),
- E::Float(f) => visitor.visit_f64(f),
- E::String(Cow::Borrowed(s)) => visitor.visit_borrowed_str(s),
- E::String(Cow::Owned(s)) => visitor.visit_string(s),
- E::Datetime(s) => visitor.visit_map(DatetimeDeserializer {
- date: s,
- visited: false,
- }),
- E::Array(values) => {
- let mut s = de::value::SeqDeserializer::new(values.into_iter());
- let ret = visitor.visit_seq(&mut s)?;
- s.end()?;
- Ok(ret)
- }
- E::InlineTable(values) | E::DottedTable(values) => {
- visitor.visit_map(InlineTableDeserializer {
- values: values.into_iter(),
- next_value: None,
- })
- }
- };
- res.map_err(|mut err| {
- // Attribute the error to whatever value returned the error.
- err.fix_offset(|| Some(start));
- err
- })
- }
-
- fn deserialize_struct<V>(
- self,
- name: &'static str,
- fields: &'static [&'static str],
- visitor: V,
- ) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- if name == datetime::NAME && fields == [datetime::FIELD] {
- if let E::Datetime(s) = self.value.e {
- return visitor.visit_map(DatetimeDeserializer {
- date: s,
- visited: false,
- });
- }
- }
-
- if self.validate_struct_keys {
- match self.value.e {
- E::InlineTable(ref values) | E::DottedTable(ref values) => {
- let extra_fields = values
- .iter()
- .filter_map(|key_value| {
- let (ref key, ref _val) = *key_value;
- if !fields.contains(&&*(key.1)) {
- Some(key.clone())
- } else {
- None
- }
- })
- .collect::<Vec<_>>();
-
- if !extra_fields.is_empty() {
- return Err(Error::from_kind(
- Some(self.value.start),
- ErrorKind::UnexpectedKeys {
- keys: extra_fields
- .iter()
- .map(|k| k.1.to_string())
- .collect::<Vec<_>>(),
- available: fields,
- },
- ));
- }
- }
- _ => {}
- }
- }
-
- if name == spanned::NAME && fields == [spanned::START, spanned::END, spanned::VALUE] {
- let start = self.value.start;
- let end = self.value.end;
-
- return visitor.visit_map(SpannedDeserializer {
- phantom_data: PhantomData,
- start: Some(start),
- value: Some(self.value),
- end: Some(end),
- });
- }
-
- self.deserialize_any(visitor)
- }
-
- // `None` is interpreted as a missing field so be sure to implement `Some`
- // as a present field.
- fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- visitor.visit_some(self)
- }
-
- fn deserialize_enum<V>(
- self,
- _name: &'static str,
- _variants: &'static [&'static str],
- visitor: V,
- ) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- match self.value.e {
- E::String(val) => visitor.visit_enum(val.into_deserializer()),
- E::InlineTable(values) => {
- if values.len() != 1 {
- Err(Error::from_kind(
- Some(self.value.start),
- ErrorKind::Wanted {
- expected: "exactly 1 element",
- found: if values.is_empty() {
- "zero elements"
- } else {
- "more than 1 element"
- },
- },
- ))
- } else {
- visitor.visit_enum(InlineTableDeserializer {
- values: values.into_iter(),
- next_value: None,
- })
- }
- }
- e => Err(Error::from_kind(
- Some(self.value.start),
- ErrorKind::Wanted {
- expected: "string or inline table",
- found: e.type_name(),
- },
- )),
- }
- }
-
- fn deserialize_newtype_struct<V>(
- self,
- _name: &'static str,
- visitor: V,
- ) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- visitor.visit_newtype_struct(self)
- }
-
- serde::forward_to_deserialize_any! {
- bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
- bytes byte_buf map unit identifier
- ignored_any unit_struct tuple_struct tuple
- }
-}
-
-impl<'de, 'b> de::IntoDeserializer<'de, Error> for MapVisitor<'de, 'b> {
- type Deserializer = MapVisitor<'de, 'b>;
-
- fn into_deserializer(self) -> Self::Deserializer {
- self
- }
-}
-
-impl<'de, 'b> de::IntoDeserializer<'de, Error> for &'b mut Deserializer<'de> {
- type Deserializer = Self;
-
- fn into_deserializer(self) -> Self::Deserializer {
- self
- }
-}
-
-impl<'de> de::IntoDeserializer<'de, Error> for Value<'de> {
- type Deserializer = ValueDeserializer<'de>;
-
- fn into_deserializer(self) -> Self::Deserializer {
- ValueDeserializer::new(self)
- }
-}
-
-struct SpannedDeserializer<'de, T: de::IntoDeserializer<'de, Error>> {
- phantom_data: PhantomData<&'de ()>,
- start: Option<usize>,
- end: Option<usize>,
- value: Option<T>,
-}
-
-impl<'de, T> de::MapAccess<'de> for SpannedDeserializer<'de, T>
-where
- T: de::IntoDeserializer<'de, Error>,
-{
- type Error = Error;
-
- fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Error>
- where
- K: de::DeserializeSeed<'de>,
- {
- if self.start.is_some() {
- seed.deserialize(BorrowedStrDeserializer::new(spanned::START))
- .map(Some)
- } else if self.end.is_some() {
- seed.deserialize(BorrowedStrDeserializer::new(spanned::END))
- .map(Some)
- } else if self.value.is_some() {
- seed.deserialize(BorrowedStrDeserializer::new(spanned::VALUE))
- .map(Some)
- } else {
- Ok(None)
- }
- }
-
- fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Error>
- where
- V: de::DeserializeSeed<'de>,
- {
- if let Some(start) = self.start.take() {
- seed.deserialize(start.into_deserializer())
- } else if let Some(end) = self.end.take() {
- seed.deserialize(end.into_deserializer())
- } else if let Some(value) = self.value.take() {
- seed.deserialize(value.into_deserializer())
- } else {
- panic!("next_value_seed called before next_key_seed")
- }
- }
-}
-
-struct DatetimeDeserializer<'a> {
- visited: bool,
- date: &'a str,
-}
-
-impl<'de> de::MapAccess<'de> for DatetimeDeserializer<'de> {
- type Error = Error;
-
- fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Error>
- where
- K: de::DeserializeSeed<'de>,
- {
- if self.visited {
- return Ok(None);
- }
- self.visited = true;
- seed.deserialize(DatetimeFieldDeserializer).map(Some)
- }
-
- fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Error>
- where
- V: de::DeserializeSeed<'de>,
- {
- seed.deserialize(StrDeserializer::new(self.date.into()))
- }
-}
-
-struct DatetimeFieldDeserializer;
-
-impl<'de> de::Deserializer<'de> for DatetimeFieldDeserializer {
- type Error = Error;
-
- fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Error>
- where
- V: de::Visitor<'de>,
- {
- visitor.visit_borrowed_str(datetime::FIELD)
- }
-
- serde::forward_to_deserialize_any! {
- bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
- bytes byte_buf map struct option unit newtype_struct
- ignored_any unit_struct tuple_struct tuple enum identifier
- }
-}
-
-struct DottedTableDeserializer<'a> {
- name: Cow<'a, str>,
- value: Value<'a>,
-}
-
-impl<'de> de::EnumAccess<'de> for DottedTableDeserializer<'de> {
- type Error = Error;
- type Variant = TableEnumDeserializer<'de>;
-
- fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>
- where
- V: de::DeserializeSeed<'de>,
- {
- let (name, value) = (self.name, self.value);
- seed.deserialize(StrDeserializer::new(name))
- .map(|val| (val, TableEnumDeserializer { value }))
- }
-}
-
-struct InlineTableDeserializer<'a> {
- values: vec::IntoIter<TablePair<'a>>,
- next_value: Option<Value<'a>>,
-}
-
-impl<'de> de::MapAccess<'de> for InlineTableDeserializer<'de> {
- type Error = Error;
-
- fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Error>
- where
- K: de::DeserializeSeed<'de>,
- {
- let (key, value) = match self.values.next() {
- Some(pair) => pair,
- None => return Ok(None),
- };
- self.next_value = Some(value);
- seed.deserialize(StrDeserializer::spanned(key)).map(Some)
- }
-
- fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Error>
- where
- V: de::DeserializeSeed<'de>,
- {
- let value = self.next_value.take().expect("Unable to read table values");
- seed.deserialize(ValueDeserializer::new(value))
- }
-}
-
-impl<'de> de::EnumAccess<'de> for InlineTableDeserializer<'de> {
- type Error = Error;
- type Variant = TableEnumDeserializer<'de>;
-
- fn variant_seed<V>(mut self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>
- where
- V: de::DeserializeSeed<'de>,
- {
- let (key, value) = match self.values.next() {
- Some(pair) => pair,
- None => {
- return Err(Error::from_kind(
- None, // FIXME: How do we get an offset here?
- ErrorKind::Wanted {
- expected: "table with exactly 1 entry",
- found: "empty table",
- },
- ));
- }
- };
-
- seed.deserialize(StrDeserializer::new(key.1))
- .map(|val| (val, TableEnumDeserializer { value }))
- }
-}
-
-/// Deserializes table values into enum variants.
-struct TableEnumDeserializer<'a> {
- value: Value<'a>,
-}
-
-impl<'de> de::VariantAccess<'de> for TableEnumDeserializer<'de> {
- type Error = Error;
-
- fn unit_variant(self) -> Result<(), Self::Error> {
- match self.value.e {
- E::InlineTable(values) | E::DottedTable(values) => {
- if values.is_empty() {
- Ok(())
- } else {
- Err(Error::from_kind(
- Some(self.value.start),
- ErrorKind::ExpectedEmptyTable,
- ))
- }
- }
- e => Err(Error::from_kind(
- Some(self.value.start),
- ErrorKind::Wanted {
- expected: "table",
- found: e.type_name(),
- },
- )),
- }
- }
-
- fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value, Self::Error>
- where
- T: de::DeserializeSeed<'de>,
- {
- seed.deserialize(ValueDeserializer::new(self.value))
- }
-
- fn tuple_variant<V>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error>
- where
- V: de::Visitor<'de>,
- {
- match self.value.e {
- E::InlineTable(values) | E::DottedTable(values) => {
- let tuple_values = values
- .into_iter()
- .enumerate()
- .map(|(index, (key, value))| match key.1.parse::<usize>() {
- Ok(key_index) if key_index == index => Ok(value),
- Ok(_) | Err(_) => Err(Error::from_kind(
- Some(key.0.start),
- ErrorKind::ExpectedTupleIndex {
- expected: index,
- found: key.1.to_string(),
- },
- )),
- })
- // Fold all values into a `Vec`, or return the first error.
- .fold(Ok(Vec::with_capacity(len)), |result, value_result| {
- result.and_then(move |mut tuple_values| match value_result {
- Ok(value) => {
- tuple_values.push(value);
- Ok(tuple_values)
- }
- // `Result<de::Value, Self::Error>` to `Result<Vec<_>, Self::Error>`
- Err(e) => Err(e),
- })
- })?;
-
- if tuple_values.len() == len {
- de::Deserializer::deserialize_seq(
- ValueDeserializer::new(Value {
- e: E::Array(tuple_values),
- start: self.value.start,
- end: self.value.end,
- }),
- visitor,
- )
- } else {
- Err(Error::from_kind(
- Some(self.value.start),
- ErrorKind::ExpectedTuple(len),
- ))
- }
- }
- e => Err(Error::from_kind(
- Some(self.value.start),
- ErrorKind::Wanted {
- expected: "table",
- found: e.type_name(),
- },
- )),
- }
- }
-
- fn struct_variant<V>(
- self,
- fields: &'static [&'static str],
- visitor: V,
- ) -> Result<V::Value, Self::Error>
- where
- V: de::Visitor<'de>,
- {
- de::Deserializer::deserialize_struct(
- ValueDeserializer::new(self.value).with_struct_key_validation(),
- "", // TODO: this should be the variant name
- fields,
- visitor,
- )
- }
-}
-
-impl<'a> Deserializer<'a> {
- /// Creates a new deserializer which will be deserializing the string
- /// provided.
- pub fn new(input: &'a str) -> Deserializer<'a> {
- Deserializer {
- tokens: Tokenizer::new(input),
- input,
- require_newline_after_table: true,
- allow_duplciate_after_longer_table: false,
- }
- }
-
- #[doc(hidden)]
- #[deprecated(since = "0.5.11")]
- pub fn end(&mut self) -> Result<(), Error> {
- Ok(())
- }
-
- #[doc(hidden)]
- #[deprecated(since = "0.5.11")]
- pub fn set_require_newline_after_table(&mut self, require: bool) {
- self.require_newline_after_table = require;
- }
-
- #[doc(hidden)]
- #[deprecated(since = "0.5.11")]
- pub fn set_allow_duplicate_after_longer_table(&mut self, allow: bool) {
- self.allow_duplciate_after_longer_table = allow;
- }
-
- fn tables(&mut self) -> Result<Vec<Table<'a>>, Error> {
- let mut tables = Vec::new();
- let mut cur_table = Table {
- at: 0,
- header: Vec::new(),
- values: None,
- array: false,
- };
-
- while let Some(line) = self.line()? {
- match line {
- Line::Table {
- at,
- mut header,
- array,
- } => {
- if !cur_table.header.is_empty() || cur_table.values.is_some() {
- tables.push(cur_table);
- }
- cur_table = Table {
- at,
- header: Vec::new(),
- values: Some(Vec::new()),
- array,
- };
- loop {
- let part = header.next().map_err(|e| self.token_error(e));
- match part? {
- Some(part) => cur_table.header.push(part),
- None => break,
- }
- }
- }
- Line::KeyValue(key, value) => {
- if cur_table.values.is_none() {
- cur_table.values = Some(Vec::new());
- }
- self.add_dotted_key(key, value, cur_table.values.as_mut().unwrap())?;
- }
- }
- }
- if !cur_table.header.is_empty() || cur_table.values.is_some() {
- tables.push(cur_table);
- }
- Ok(tables)
- }
-
- fn line(&mut self) -> Result<Option<Line<'a>>, Error> {
- loop {
- self.eat_whitespace()?;
- if self.eat_comment()? {
- continue;
- }
- if self.eat(Token::Newline)? {
- continue;
- }
- break;
- }
-
- match self.peek()? {
- Some((_, Token::LeftBracket)) => self.table_header().map(Some),
- Some(_) => self.key_value().map(Some),
- None => Ok(None),
- }
- }
-
- fn table_header(&mut self) -> Result<Line<'a>, Error> {
- let start = self.tokens.current();
- self.expect(Token::LeftBracket)?;
- let array = self.eat(Token::LeftBracket)?;
- let ret = Header::new(self.tokens.clone(), array, self.require_newline_after_table);
- if self.require_newline_after_table {
- self.tokens.skip_to_newline();
- } else {
- loop {
- match self.next()? {
- Some((_, Token::RightBracket)) => {
- if array {
- self.eat(Token::RightBracket)?;
- }
- break;
- }
- Some((_, Token::Newline)) | None => break,
- _ => {}
- }
- }
- self.eat_whitespace()?;
- }
- Ok(Line::Table {
- at: start,
- header: ret,
- array,
- })
- }
-
- fn key_value(&mut self) -> Result<Line<'a>, Error> {
- let key = self.dotted_key()?;
- self.eat_whitespace()?;
- self.expect(Token::Equals)?;
- self.eat_whitespace()?;
-
- let value = self.value()?;
- self.eat_whitespace()?;
- if !self.eat_comment()? {
- self.eat_newline_or_eof()?;
- }
-
- Ok(Line::KeyValue(key, value))
- }
-
- fn value(&mut self) -> Result<Value<'a>, Error> {
- let at = self.tokens.current();
- let value = match self.next()? {
- Some((Span { start, end }, Token::String { val, .. })) => Value {
- e: E::String(val),
- start,
- end,
- },
- Some((Span { start, end }, Token::Keylike("true"))) => Value {
- e: E::Boolean(true),
- start,
- end,
- },
- Some((Span { start, end }, Token::Keylike("false"))) => Value {
- e: E::Boolean(false),
- start,
- end,
- },
- Some((span, Token::Keylike(key))) => self.parse_keylike(at, span, key)?,
- Some((span, Token::Plus)) => self.number_leading_plus(span)?,
- Some((Span { start, .. }, Token::LeftBrace)) => {
- self.inline_table().map(|(Span { end, .. }, table)| Value {
- e: E::InlineTable(table),
- start,
- end,
- })?
- }
- Some((Span { start, .. }, Token::LeftBracket)) => {
- self.array().map(|(Span { end, .. }, array)| Value {
- e: E::Array(array),
- start,
- end,
- })?
- }
- Some(token) => {
- return Err(self.error(
- at,
- ErrorKind::Wanted {
- expected: "a value",
- found: token.1.describe(),
- },
- ));
- }
- None => return Err(self.eof()),
- };
- Ok(value)
- }
-
- fn parse_keylike(&mut self, at: usize, span: Span, key: &'a str) -> Result<Value<'a>, Error> {
- if key == "inf" || key == "nan" {
- return self.number_or_date(span, key);
- }
-
- let first_char = key.chars().next().expect("key should not be empty here");
- match first_char {
- '-' | '0'..='9' => self.number_or_date(span, key),
- _ => Err(self.error(at, ErrorKind::UnquotedString)),
- }
- }
-
- fn number_or_date(&mut self, span: Span, s: &'a str) -> Result<Value<'a>, Error> {
- if s.contains('T')
- || s.contains('t')
- || (s.len() > 1 && s[1..].contains('-') && !s.contains("e-") && !s.contains("E-"))
- {
- self.datetime(span, s, false)
- .map(|(Span { start, end }, d)| Value {
- e: E::Datetime(d),
- start,
- end,
- })
- } else if self.eat(Token::Colon)? {
- self.datetime(span, s, true)
- .map(|(Span { start, end }, d)| Value {
- e: E::Datetime(d),
- start,
- end,
- })
- } else {
- self.number(span, s)
- }
- }
-
- /// Returns a string or table value type.
- ///
- /// Used to deserialize enums. Unit enums may be represented as a string or a table, all other
- /// structures (tuple, newtype, struct) must be represented as a table.
- fn string_or_table(&mut self) -> Result<(Value<'a>, Option<Cow<'a, str>>), Error> {
- match self.peek()? {
- Some((span, Token::LeftBracket)) => {
- let tables = self.tables()?;
- if tables.len() != 1 {
- return Err(Error::from_kind(
- Some(span.start),
- ErrorKind::Wanted {
- expected: "exactly 1 table",
- found: if tables.is_empty() {
- "zero tables"
- } else {
- "more than 1 table"
- },
- },
- ));
- }
-
- let table = tables
- .into_iter()
- .next()
- .expect("Expected exactly one table");
- let header = table
- .header
- .last()
- .expect("Expected at least one header value for table.");
-
- let start = table.at;
- let end = table
- .values
- .as_ref()
- .and_then(|values| values.last())
- .map(|&(_, ref val)| val.end)
- .unwrap_or_else(|| header.1.len());
- Ok((
- Value {
- e: E::DottedTable(table.values.unwrap_or_default()),
- start,
- end,
- },
- Some(header.1.clone()),
- ))
- }
- Some(_) => self.value().map(|val| (val, None)),
- None => Err(self.eof()),
- }
- }
-
- fn number(&mut self, Span { start, end }: Span, s: &'a str) -> Result<Value<'a>, Error> {
- let to_integer = |f| Value {
- e: E::Integer(f),
- start,
- end,
- };
- if let Some(value) = s.strip_prefix("0x") {
- self.integer(value, 16).map(to_integer)
- } else if let Some(value) = s.strip_prefix("0o") {
- self.integer(value, 8).map(to_integer)
- } else if let Some(value) = s.strip_prefix("0b") {
- self.integer(value, 2).map(to_integer)
- } else if s.contains('e') || s.contains('E') {
- self.float(s, None).map(|f| Value {
- e: E::Float(f),
- start,
- end,
- })
- } else if self.eat(Token::Period)? {
- let at = self.tokens.current();
- match self.next()? {
- Some((Span { start, end }, Token::Keylike(after))) => {
- self.float(s, Some(after)).map(|f| Value {
- e: E::Float(f),
- start,
- end,
- })
- }
- _ => Err(self.error(at, ErrorKind::NumberInvalid)),
- }
- } else if s == "inf" {
- Ok(Value {
- e: E::Float(f64::INFINITY),
- start,
- end,
- })
- } else if s == "-inf" {
- Ok(Value {
- e: E::Float(f64::NEG_INFINITY),
- start,
- end,
- })
- } else if s == "nan" {
- Ok(Value {
- e: E::Float(f64::NAN),
- start,
- end,
- })
- } else if s == "-nan" {
- Ok(Value {
- e: E::Float(-f64::NAN),
- start,
- end,
- })
- } else {
- self.integer(s, 10).map(to_integer)
- }
- }
-
- fn number_leading_plus(&mut self, Span { start, .. }: Span) -> Result<Value<'a>, Error> {
- let start_token = self.tokens.current();
- match self.next()? {
- Some((Span { end, .. }, Token::Keylike(s))) => self.number(Span { start, end }, s),
- _ => Err(self.error(start_token, ErrorKind::NumberInvalid)),
- }
- }
-
- fn integer(&self, s: &'a str, radix: u32) -> Result<i64, Error> {
- let allow_sign = radix == 10;
- let allow_leading_zeros = radix != 10;
- let (prefix, suffix) = self.parse_integer(s, allow_sign, allow_leading_zeros, radix)?;
- let start = self.tokens.substr_offset(s);
- if !suffix.is_empty() {
- return Err(self.error(start, ErrorKind::NumberInvalid));
- }
- i64::from_str_radix(prefix.replace('_', "").trim_start_matches('+'), radix)
- .map_err(|_e| self.error(start, ErrorKind::NumberInvalid))
- }
-
- fn parse_integer(
- &self,
- s: &'a str,
- allow_sign: bool,
- allow_leading_zeros: bool,
- radix: u32,
- ) -> Result<(&'a str, &'a str), Error> {
- let start = self.tokens.substr_offset(s);
-
- let mut first = true;
- let mut first_zero = false;
- let mut underscore = false;
- let mut end = s.len();
- for (i, c) in s.char_indices() {
- let at = i + start;
- if i == 0 && (c == '+' || c == '-') && allow_sign {
- continue;
- }
-
- if c == '0' && first {
- first_zero = true;
- } else if c.is_digit(radix) {
- if !first && first_zero && !allow_leading_zeros {
- return Err(self.error(at, ErrorKind::NumberInvalid));
- }
- underscore = false;
- } else if c == '_' && first {
- return Err(self.error(at, ErrorKind::NumberInvalid));
- } else if c == '_' && !underscore {
- underscore = true;
- } else {
- end = i;
- break;
- }
- first = false;
- }
- if first || underscore {
- return Err(self.error(start, ErrorKind::NumberInvalid));
- }
- Ok((&s[..end], &s[end..]))
- }
-
- fn float(&mut self, s: &'a str, after_decimal: Option<&'a str>) -> Result<f64, Error> {
- let (integral, mut suffix) = self.parse_integer(s, true, false, 10)?;
- let start = self.tokens.substr_offset(integral);
-
- let mut fraction = None;
- if let Some(after) = after_decimal {
- if !suffix.is_empty() {
- return Err(self.error(start, ErrorKind::NumberInvalid));
- }
- let (a, b) = self.parse_integer(after, false, true, 10)?;
- fraction = Some(a);
- suffix = b;
- }
-
- let mut exponent = None;
- if suffix.starts_with('e') || suffix.starts_with('E') {
- let (a, b) = if suffix.len() == 1 {
- self.eat(Token::Plus)?;
- match self.next()? {
- Some((_, Token::Keylike(s))) => self.parse_integer(s, false, true, 10)?,
- _ => return Err(self.error(start, ErrorKind::NumberInvalid)),
- }
- } else {
- self.parse_integer(&suffix[1..], true, true, 10)?
- };
- if !b.is_empty() {
- return Err(self.error(start, ErrorKind::NumberInvalid));
- }
- exponent = Some(a);
- } else if !suffix.is_empty() {
- return Err(self.error(start, ErrorKind::NumberInvalid));
- }
-
- let mut number = integral
- .trim_start_matches('+')
- .chars()
- .filter(|c| *c != '_')
- .collect::<String>();
- if let Some(fraction) = fraction {
- number.push('.');
- number.extend(fraction.chars().filter(|c| *c != '_'));
- }
- if let Some(exponent) = exponent {
- number.push('E');
- number.extend(exponent.chars().filter(|c| *c != '_'));
- }
- number
- .parse()
- .map_err(|_e| self.error(start, ErrorKind::NumberInvalid))
- .and_then(|n: f64| {
- if n.is_finite() {
- Ok(n)
- } else {
- Err(self.error(start, ErrorKind::NumberInvalid))
- }
- })
- }
-
- fn datetime(
- &mut self,
- mut span: Span,
- date: &'a str,
- colon_eaten: bool,
- ) -> Result<(Span, &'a str), Error> {
- let start = self.tokens.substr_offset(date);
-
- // Check for space separated date and time.
- let mut lookahead = self.tokens.clone();
- if let Ok(Some((_, Token::Whitespace(" ")))) = lookahead.next() {
- // Check if hour follows.
- if let Ok(Some((_, Token::Keylike(_)))) = lookahead.next() {
- self.next()?; // skip space
- self.next()?; // skip keylike hour
- }
- }
-
- if colon_eaten || self.eat(Token::Colon)? {
- // minutes
- match self.next()? {
- Some((_, Token::Keylike(_))) => {}
- _ => return Err(self.error(start, ErrorKind::DateInvalid)),
- }
- // Seconds
- self.expect(Token::Colon)?;
- match self.next()? {
- Some((Span { end, .. }, Token::Keylike(_))) => {
- span.end = end;
- }
- _ => return Err(self.error(start, ErrorKind::DateInvalid)),
- }
- // Fractional seconds
- if self.eat(Token::Period)? {
- match self.next()? {
- Some((Span { end, .. }, Token::Keylike(_))) => {
- span.end = end;
- }
- _ => return Err(self.error(start, ErrorKind::DateInvalid)),
- }
- }
-
- // offset
- if self.eat(Token::Plus)? {
- match self.next()? {
- Some((Span { end, .. }, Token::Keylike(_))) => {
- span.end = end;
- }
- _ => return Err(self.error(start, ErrorKind::DateInvalid)),
- }
- }
- if self.eat(Token::Colon)? {
- match self.next()? {
- Some((Span { end, .. }, Token::Keylike(_))) => {
- span.end = end;
- }
- _ => return Err(self.error(start, ErrorKind::DateInvalid)),
- }
- }
- }
-
- let end = self.tokens.current();
- Ok((span, &self.tokens.input()[start..end]))
- }
-
- // TODO(#140): shouldn't buffer up this entire table in memory, it'd be
- // great to defer parsing everything until later.
- fn inline_table(&mut self) -> Result<(Span, Vec<TablePair<'a>>), Error> {
- let mut ret = Vec::new();
- self.eat_whitespace()?;
- if let Some(span) = self.eat_spanned(Token::RightBrace)? {
- return Ok((span, ret));
- }
- loop {
- let key = self.dotted_key()?;
- self.eat_whitespace()?;
- self.expect(Token::Equals)?;
- self.eat_whitespace()?;
- let value = self.value()?;
- self.add_dotted_key(key, value, &mut ret)?;
-
- self.eat_whitespace()?;
- if let Some(span) = self.eat_spanned(Token::RightBrace)? {
- return Ok((span, ret));
- }
- self.expect(Token::Comma)?;
- self.eat_whitespace()?;
- }
- }
-
- // TODO(#140): shouldn't buffer up this entire array in memory, it'd be
- // great to defer parsing everything until later.
- fn array(&mut self) -> Result<(Span, Vec<Value<'a>>), Error> {
- let mut ret = Vec::new();
-
- let intermediate = |me: &mut Deserializer<'_>| {
- loop {
- me.eat_whitespace()?;
- if !me.eat(Token::Newline)? && !me.eat_comment()? {
- break;
- }
- }
- Ok(())
- };
-
- loop {
- intermediate(self)?;
- if let Some(span) = self.eat_spanned(Token::RightBracket)? {
- return Ok((span, ret));
- }
- let value = self.value()?;
- ret.push(value);
- intermediate(self)?;
- if !self.eat(Token::Comma)? {
- break;
- }
- }
- intermediate(self)?;
- let span = self.expect_spanned(Token::RightBracket)?;
- Ok((span, ret))
- }
-
- fn table_key(&mut self) -> Result<(Span, Cow<'a, str>), Error> {
- self.tokens.table_key().map_err(|e| self.token_error(e))
- }
-
- fn dotted_key(&mut self) -> Result<Vec<(Span, Cow<'a, str>)>, Error> {
- let mut result = vec![self.table_key()?];
- self.eat_whitespace()?;
- while self.eat(Token::Period)? {
- self.eat_whitespace()?;
- result.push(self.table_key()?);
- self.eat_whitespace()?;
- }
- Ok(result)
- }
-
- /// Stores a value in the appropriate hierarchical structure positioned based on the dotted key.
- ///
- /// Given the following definition: `multi.part.key = "value"`, `multi` and `part` are
- /// intermediate parts which are mapped to the relevant fields in the deserialized type's data
- /// hierarchy.
- ///
- /// # Parameters
- ///
- /// * `key_parts`: Each segment of the dotted key, e.g. `part.one` maps to
- /// `vec![Cow::Borrowed("part"), Cow::Borrowed("one")].`
- /// * `value`: The parsed value.
- /// * `values`: The `Vec` to store the value in.
- fn add_dotted_key(
- &self,
- mut key_parts: Vec<(Span, Cow<'a, str>)>,
- value: Value<'a>,
- values: &mut Vec<TablePair<'a>>,
- ) -> Result<(), Error> {
- let key = key_parts.remove(0);
- if key_parts.is_empty() {
- values.push((key, value));
- return Ok(());
- }
- match values.iter_mut().find(|&&mut (ref k, _)| *k.1 == key.1) {
- Some(&mut (
- _,
- Value {
- e: E::DottedTable(ref mut v),
- ..
- },
- )) => {
- return self.add_dotted_key(key_parts, value, v);
- }
- Some(&mut (_, Value { start, .. })) => {
- return Err(self.error(start, ErrorKind::DottedKeyInvalidType));
- }
- None => {}
- }
- // The start/end value is somewhat misleading here.
- let table_values = Value {
- e: E::DottedTable(Vec::new()),
- start: value.start,
- end: value.end,
- };
- values.push((key, table_values));
- let last_i = values.len() - 1;
- if let (
- _,
- Value {
- e: E::DottedTable(ref mut v),
- ..
- },
- ) = values[last_i]
- {
- self.add_dotted_key(key_parts, value, v)?;
- }
- Ok(())
- }
-
- fn eat_whitespace(&mut self) -> Result<(), Error> {
- self.tokens
- .eat_whitespace()
- .map_err(|e| self.token_error(e))
- }
-
- fn eat_comment(&mut self) -> Result<bool, Error> {
- self.tokens.eat_comment().map_err(|e| self.token_error(e))
- }
-
- fn eat_newline_or_eof(&mut self) -> Result<(), Error> {
- self.tokens
- .eat_newline_or_eof()
- .map_err(|e| self.token_error(e))
- }
-
- fn eat(&mut self, expected: Token<'a>) -> Result<bool, Error> {
- self.tokens.eat(expected).map_err(|e| self.token_error(e))
- }
-
- fn eat_spanned(&mut self, expected: Token<'a>) -> Result<Option<Span>, Error> {
- self.tokens
- .eat_spanned(expected)
- .map_err(|e| self.token_error(e))
- }
-
- fn expect(&mut self, expected: Token<'a>) -> Result<(), Error> {
- self.tokens
- .expect(expected)
- .map_err(|e| self.token_error(e))
- }
-
- fn expect_spanned(&mut self, expected: Token<'a>) -> Result<Span, Error> {
- self.tokens
- .expect_spanned(expected)
- .map_err(|e| self.token_error(e))
- }
-
- fn next(&mut self) -> Result<Option<(Span, Token<'a>)>, Error> {
- self.tokens.next().map_err(|e| self.token_error(e))
- }
-
- fn peek(&mut self) -> Result<Option<(Span, Token<'a>)>, Error> {
- self.tokens.peek().map_err(|e| self.token_error(e))
- }
-
- fn eof(&self) -> Error {
- self.error(self.input.len(), ErrorKind::UnexpectedEof)
- }
-
- fn token_error(&self, error: TokenError) -> Error {
- match error {
- TokenError::InvalidCharInString(at, ch) => {
- self.error(at, ErrorKind::InvalidCharInString(ch))
- }
- TokenError::InvalidEscape(at, ch) => self.error(at, ErrorKind::InvalidEscape(ch)),
- TokenError::InvalidEscapeValue(at, v) => {
- self.error(at, ErrorKind::InvalidEscapeValue(v))
- }
- TokenError::InvalidHexEscape(at, ch) => self.error(at, ErrorKind::InvalidHexEscape(ch)),
- TokenError::NewlineInString(at) => self.error(at, ErrorKind::NewlineInString),
- TokenError::Unexpected(at, ch) => self.error(at, ErrorKind::Unexpected(ch)),
- TokenError::UnterminatedString(at) => self.error(at, ErrorKind::UnterminatedString),
- TokenError::NewlineInTableKey(at) => self.error(at, ErrorKind::NewlineInTableKey),
- TokenError::Wanted {
- at,
- expected,
- found,
- } => self.error(at, ErrorKind::Wanted { expected, found }),
- TokenError::MultilineStringKey(at) => self.error(at, ErrorKind::MultilineStringKey),
- }
- }
-
- fn error(&self, at: usize, kind: ErrorKind) -> Error {
- let mut err = Error::from_kind(Some(at), kind);
- err.fix_linecol(|at| self.to_linecol(at));
- err
- }
-
- /// Converts a byte offset from an error message to a (line, column) pair
- ///
- /// All indexes are 0-based.
- fn to_linecol(&self, offset: usize) -> (usize, usize) {
- let mut cur = 0;
- // Use split_terminator instead of lines so that if there is a `\r`,
- // it is included in the offset calculation. The `+1` values below
- // account for the `\n`.
- for (i, line) in self.input.split_terminator('\n').enumerate() {
- if cur + line.len() + 1 > offset {
- return (i, offset - cur);
- }
- cur += line.len() + 1;
- }
- (self.input.lines().count(), 0)
- }
-}
-
-impl Error {
- /// Produces a (line, column) pair of the position of the error if available
- ///
- /// All indexes are 0-based.
- pub fn line_col(&self) -> Option<(usize, usize)> {
- self.inner.line.map(|line| (line, self.inner.col))
- }
-
- fn from_kind(at: Option<usize>, kind: ErrorKind) -> Error {
- Error {
- inner: Box::new(ErrorInner {
- kind,
- line: None,
- col: 0,
- at,
- message: String::new(),
- key: Vec::new(),
- }),
- }
- }
-
- fn custom(at: Option<usize>, s: String) -> Error {
- Error {
- inner: Box::new(ErrorInner {
- kind: ErrorKind::Custom,
- line: None,
- col: 0,
- at,
- message: s,
- key: Vec::new(),
- }),
- }
- }
-
- pub(crate) fn add_key_context(&mut self, key: &str) {
- self.inner.key.insert(0, key.to_string());
- }
-
- fn fix_offset<F>(&mut self, f: F)
- where
- F: FnOnce() -> Option<usize>,
- {
- // An existing offset is always better positioned than anything we
- // might want to add later.
- if self.inner.at.is_none() {
- self.inner.at = f();
- }
- }
-
- fn fix_linecol<F>(&mut self, f: F)
- where
- F: FnOnce(usize) -> (usize, usize),
- {
- if let Some(at) = self.inner.at {
- let (line, col) = f(at);
- self.inner.line = Some(line);
- self.inner.col = col;
- }
- }
-}
-
-impl std::convert::From<Error> for std::io::Error {
- fn from(e: Error) -> Self {
- std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())
- }
-}
-
-impl fmt::Display for Error {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match &self.inner.kind {
- ErrorKind::UnexpectedEof => "unexpected eof encountered".fmt(f)?,
- ErrorKind::InvalidCharInString(c) => write!(
- f,
- "invalid character in string: `{}`",
- c.escape_default().collect::<String>()
- )?,
- ErrorKind::InvalidEscape(c) => write!(
- f,
- "invalid escape character in string: `{}`",
- c.escape_default().collect::<String>()
- )?,
- ErrorKind::InvalidHexEscape(c) => write!(
- f,
- "invalid hex escape character in string: `{}`",
- c.escape_default().collect::<String>()
- )?,
- ErrorKind::InvalidEscapeValue(c) => write!(f, "invalid escape value: `{}`", c)?,
- ErrorKind::NewlineInString => "newline in string found".fmt(f)?,
- ErrorKind::Unexpected(ch) => write!(
- f,
- "unexpected character found: `{}`",
- ch.escape_default().collect::<String>()
- )?,
- ErrorKind::UnterminatedString => "unterminated string".fmt(f)?,
- ErrorKind::NewlineInTableKey => "found newline in table key".fmt(f)?,
- ErrorKind::Wanted { expected, found } => {
- write!(f, "expected {}, found {}", expected, found)?
- }
- ErrorKind::NumberInvalid => "invalid number".fmt(f)?,
- ErrorKind::DateInvalid => "invalid date".fmt(f)?,
- ErrorKind::DuplicateTable(ref s) => {
- write!(f, "redefinition of table `{}`", s)?;
- }
- ErrorKind::RedefineAsArray => "table redefined as array".fmt(f)?,
- ErrorKind::EmptyTableKey => "empty table key found".fmt(f)?,
- ErrorKind::MultilineStringKey => "multiline strings are not allowed for key".fmt(f)?,
- ErrorKind::Custom => self.inner.message.fmt(f)?,
- ErrorKind::ExpectedTuple(l) => write!(f, "expected table with length {}", l)?,
- ErrorKind::ExpectedTupleIndex {
- expected,
- ref found,
- } => write!(f, "expected table key `{}`, but was `{}`", expected, found)?,
- ErrorKind::ExpectedEmptyTable => "expected empty table".fmt(f)?,
- ErrorKind::DottedKeyInvalidType => {
- "dotted key attempted to extend non-table type".fmt(f)?
- }
- ErrorKind::UnexpectedKeys {
- ref keys,
- available,
- } => write!(
- f,
- "unexpected keys in table: `{:?}`, available keys: `{:?}`",
- keys, available
- )?,
- ErrorKind::UnquotedString => write!(
- f,
- "invalid TOML value, did you mean to use a quoted string?"
- )?,
- }
-
- if !self.inner.key.is_empty() {
- write!(f, " for key `")?;
- for (i, k) in self.inner.key.iter().enumerate() {
- if i > 0 {
- write!(f, ".")?;
- }
- write!(f, "{}", k)?;
- }
- write!(f, "`")?;
- }
-
- if let Some(line) = self.inner.line {
- write!(f, " at line {} column {}", line + 1, self.inner.col + 1)?;
- }
-
- Ok(())
- }
-}
-
-impl error::Error for Error {}
-
-impl de::Error for Error {
- fn custom<T: fmt::Display>(msg: T) -> Error {
- Error::custom(None, msg.to_string())
- }
-}
-
-enum Line<'a> {
- Table {
- at: usize,
- header: Header<'a>,
- array: bool,
- },
- KeyValue(Vec<(Span, Cow<'a, str>)>, Value<'a>),
-}
-
-struct Header<'a> {
- first: bool,
- array: bool,
- require_newline_after_table: bool,
- tokens: Tokenizer<'a>,
-}
-
-impl<'a> Header<'a> {
- fn new(tokens: Tokenizer<'a>, array: bool, require_newline_after_table: bool) -> Header<'a> {
- Header {
- first: true,
- array,
- tokens,
- require_newline_after_table,
- }
- }
-
- fn next(&mut self) -> Result<Option<(Span, Cow<'a, str>)>, TokenError> {
- self.tokens.eat_whitespace()?;
-
- if self.first || self.tokens.eat(Token::Period)? {
- self.first = false;
- self.tokens.eat_whitespace()?;
- self.tokens.table_key().map(Some)
- } else {
- self.tokens.expect(Token::RightBracket)?;
- if self.array {
- self.tokens.expect(Token::RightBracket)?;
- }
-
- self.tokens.eat_whitespace()?;
- if self.require_newline_after_table && !self.tokens.eat_comment()? {
- self.tokens.eat_newline_or_eof()?;
- }
- Ok(None)
- }
- }
-}
-
-#[derive(Debug)]
-struct Value<'a> {
- e: E<'a>,
- start: usize,
- end: usize,
-}
-
-#[derive(Debug)]
-enum E<'a> {
- Integer(i64),
- Float(f64),
- Boolean(bool),
- String(Cow<'a, str>),
- Datetime(&'a str),
- Array(Vec<Value<'a>>),
- InlineTable(Vec<TablePair<'a>>),
- DottedTable(Vec<TablePair<'a>>),
-}
-
-impl<'a> E<'a> {
- fn type_name(&self) -> &'static str {
- match *self {
- E::String(..) => "string",
- E::Integer(..) => "integer",
- E::Float(..) => "float",
- E::Boolean(..) => "boolean",
- E::Datetime(..) => "datetime",
- E::Array(..) => "array",
- E::InlineTable(..) => "inline table",
- E::DottedTable(..) => "dotted table",
- }
- }
-}
diff --git a/third_party/rust/toml/src/de/deserializer/array.rs b/third_party/rust/toml/src/de/deserializer/array.rs
@@ -0,0 +1,89 @@
+use serde_spanned::Spanned;
+
+use crate::de::DeArray;
+use crate::de::DeValue;
+use crate::de::Error;
+
+pub(crate) struct ArrayDeserializer<'i> {
+ input: DeArray<'i>,
+ span: core::ops::Range<usize>,
+}
+
+impl<'i> ArrayDeserializer<'i> {
+ pub(crate) fn new(input: DeArray<'i>, span: core::ops::Range<usize>) -> Self {
+ Self { input, span }
+ }
+}
+
+impl<'de> serde_core::Deserializer<'de> for ArrayDeserializer<'de> {
+ type Error = Error;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ visitor.visit_seq(ArraySeqAccess::new(self.input))
+ }
+
+ fn deserialize_struct<V>(
+ self,
+ name: &'static str,
+ _fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ if serde_spanned::de::is_spanned(name) {
+ let span = self.span.clone();
+ return visitor.visit_map(super::SpannedDeserializer::new(self, span));
+ }
+
+ self.deserialize_any(visitor)
+ }
+
+ serde_core::forward_to_deserialize_any! {
+ bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
+ bytes byte_buf map option unit newtype_struct
+ ignored_any unit_struct tuple_struct tuple enum identifier
+ }
+}
+
+impl<'de> serde_core::de::IntoDeserializer<'de, Error> for ArrayDeserializer<'de> {
+ type Deserializer = Self;
+
+ fn into_deserializer(self) -> Self::Deserializer {
+ self
+ }
+}
+
+pub(crate) struct ArraySeqAccess<'i> {
+ iter: alloc::vec::IntoIter<Spanned<DeValue<'i>>>,
+}
+
+impl<'i> ArraySeqAccess<'i> {
+ pub(crate) fn new(input: DeArray<'i>) -> Self {
+ Self {
+ iter: input.into_iter(),
+ }
+ }
+}
+
+impl<'de> serde_core::de::SeqAccess<'de> for ArraySeqAccess<'de> {
+ type Error = Error;
+
+ fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error>
+ where
+ T: serde_core::de::DeserializeSeed<'de>,
+ {
+ match self.iter.next() {
+ Some(v) => {
+ let span = v.span();
+ let v = v.into_inner();
+ seed.deserialize(crate::de::ValueDeserializer::with_parts(v, span))
+ .map(Some)
+ }
+ None => Ok(None),
+ }
+ }
+}
diff --git a/third_party/rust/toml/src/de/deserializer/key.rs b/third_party/rust/toml/src/de/deserializer/key.rs
@@ -0,0 +1,250 @@
+use serde_core::de::IntoDeserializer;
+
+use crate::de::DeString;
+use crate::de::Error;
+
+pub(crate) struct KeyDeserializer<'i> {
+ span: Option<core::ops::Range<usize>>,
+ key: DeString<'i>,
+}
+
+impl<'i> KeyDeserializer<'i> {
+ pub(crate) fn new(key: DeString<'i>, span: Option<core::ops::Range<usize>>) -> Self {
+ KeyDeserializer { span, key }
+ }
+}
+
+impl<'de> IntoDeserializer<'de, Error> for KeyDeserializer<'de> {
+ type Deserializer = Self;
+
+ fn into_deserializer(self) -> Self::Deserializer {
+ self
+ }
+}
+
+impl<'de> serde_core::de::Deserializer<'de> for KeyDeserializer<'de> {
+ type Error = Error;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ self.key.into_deserializer().deserialize_any(visitor)
+ }
+
+ fn deserialize_bool<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: bool = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_bool(visitor)
+ }
+
+ fn deserialize_i8<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: i8 = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_i8(visitor)
+ }
+
+ fn deserialize_i16<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: i16 = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_i16(visitor)
+ }
+
+ fn deserialize_i32<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: i32 = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_i32(visitor)
+ }
+
+ fn deserialize_i64<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: i64 = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_i64(visitor)
+ }
+
+ fn deserialize_i128<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: i128 = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_i128(visitor)
+ }
+
+ fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: u8 = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_u8(visitor)
+ }
+
+ fn deserialize_u16<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: u16 = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_u16(visitor)
+ }
+
+ fn deserialize_u32<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: u32 = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_u32(visitor)
+ }
+
+ fn deserialize_u64<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: u64 = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_u64(visitor)
+ }
+
+ fn deserialize_u128<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: u128 = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_u128(visitor)
+ }
+
+ fn deserialize_char<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let key: char = self.key.parse().map_err(serde_core::de::Error::custom)?;
+ key.into_deserializer().deserialize_char(visitor)
+ }
+
+ fn deserialize_enum<V>(
+ self,
+ name: &str,
+ variants: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let _ = name;
+ let _ = variants;
+ visitor.visit_enum(self)
+ }
+
+ fn deserialize_struct<V>(
+ self,
+ name: &'static str,
+ _fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ if serde_spanned::de::is_spanned(name) {
+ if let Some(span) = self.span.clone() {
+ return visitor.visit_map(super::SpannedDeserializer::new(self.key, span));
+ } else {
+ return Err(Error::custom("value is missing a span", None));
+ }
+ }
+ self.deserialize_any(visitor)
+ }
+
+ fn deserialize_newtype_struct<V>(
+ self,
+ _name: &'static str,
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ visitor.visit_newtype_struct(self)
+ }
+
+ serde_core::forward_to_deserialize_any! {
+ f32 f64 str string seq
+ bytes byte_buf map option unit
+ ignored_any unit_struct tuple_struct tuple identifier
+ }
+}
+
+impl<'de> serde_core::de::EnumAccess<'de> for KeyDeserializer<'de> {
+ type Error = Error;
+ type Variant = UnitOnly<Self::Error>;
+
+ fn variant_seed<T>(self, seed: T) -> Result<(T::Value, Self::Variant), Self::Error>
+ where
+ T: serde_core::de::DeserializeSeed<'de>,
+ {
+ seed.deserialize(self).map(unit_only)
+ }
+}
+
+pub(crate) struct UnitOnly<E> {
+ marker: core::marker::PhantomData<E>,
+}
+
+fn unit_only<T, E>(t: T) -> (T, UnitOnly<E>) {
+ (
+ t,
+ UnitOnly {
+ marker: core::marker::PhantomData,
+ },
+ )
+}
+
+impl<'de, E> serde_core::de::VariantAccess<'de> for UnitOnly<E>
+where
+ E: serde_core::de::Error,
+{
+ type Error = E;
+
+ fn unit_variant(self) -> Result<(), Self::Error> {
+ Ok(())
+ }
+
+ fn newtype_variant_seed<T>(self, _seed: T) -> Result<T::Value, Self::Error>
+ where
+ T: serde_core::de::DeserializeSeed<'de>,
+ {
+ Err(serde_core::de::Error::invalid_type(
+ serde_core::de::Unexpected::UnitVariant,
+ &"newtype variant",
+ ))
+ }
+
+ fn tuple_variant<V>(self, _len: usize, _visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ Err(serde_core::de::Error::invalid_type(
+ serde_core::de::Unexpected::UnitVariant,
+ &"tuple variant",
+ ))
+ }
+
+ fn struct_variant<V>(
+ self,
+ _fields: &'static [&'static str],
+ _visitor: V,
+ ) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ Err(serde_core::de::Error::invalid_type(
+ serde_core::de::Unexpected::UnitVariant,
+ &"struct variant",
+ ))
+ }
+}
diff --git a/third_party/rust/toml/src/de/deserializer/mod.rs b/third_party/rust/toml/src/de/deserializer/mod.rs
@@ -0,0 +1,177 @@
+//! Deserializing TOML into Rust structures.
+//!
+//! This module contains all the Serde support for deserializing TOML documents
+//! into Rust structures. Note that some top-level functions here are also
+//! provided at the top of the crate.
+
+mod array;
+mod key;
+mod table;
+mod table_enum;
+mod value;
+
+pub use value::ValueDeserializer;
+
+use crate::de::DeTable;
+use crate::de::DeValue;
+use crate::de::Error;
+use array::ArrayDeserializer;
+use key::KeyDeserializer;
+use serde_spanned::de::SpannedDeserializer;
+use serde_spanned::Spanned;
+use table::TableDeserializer;
+use table_enum::TableEnumDeserializer;
+use toml_datetime::de::DatetimeDeserializer;
+
+/// Deserialization for TOML [documents][crate::Table].
+///
+/// To deserializes TOML values, instead of documents, see [`ValueDeserializer`].
+pub struct Deserializer<'i> {
+ span: core::ops::Range<usize>,
+ root: DeTable<'i>,
+ raw: Option<&'i str>,
+}
+
+impl<'i> Deserializer<'i> {
+ /// Parse a TOML document
+ pub fn parse(raw: &'i str) -> Result<Self, Error> {
+ let root = DeTable::parse(raw)?;
+ let span = root.span();
+ let root = root.into_inner();
+ Ok(Self {
+ span,
+ root,
+ raw: Some(raw),
+ })
+ }
+
+ /// Deprecated, replaced with [`Deserializer::parse`]
+ #[deprecated(since = "0.9.0", note = "replaced with `Deserializer::parse`")]
+ pub fn new(raw: &'i str) -> Result<Self, Error> {
+ Self::parse(raw)
+ }
+
+ fn into_table_de(self) -> ValueDeserializer<'i> {
+ ValueDeserializer::with_parts(DeValue::Table(self.root), self.span)
+ }
+}
+
+impl<'i> From<Spanned<DeTable<'i>>> for Deserializer<'i> {
+ fn from(root: Spanned<DeTable<'i>>) -> Self {
+ let span = root.span();
+ let root = root.into_inner();
+ Self {
+ span,
+ root,
+ raw: None,
+ }
+ }
+}
+
+impl<'de> serde_core::Deserializer<'de> for Deserializer<'de> {
+ type Error = Error;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let raw = self.raw;
+ self.into_table_de()
+ .deserialize_any(visitor)
+ .map_err(|mut e: Self::Error| {
+ e.set_input(raw);
+ e
+ })
+ }
+
+ // `None` is interpreted as a missing field so be sure to implement `Some`
+ // as a present field.
+ fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let raw = self.raw;
+ self.into_table_de()
+ .deserialize_option(visitor)
+ .map_err(|mut e: Self::Error| {
+ e.set_input(raw);
+ e
+ })
+ }
+
+ fn deserialize_newtype_struct<V>(
+ self,
+ name: &'static str,
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let raw = self.raw;
+ self.into_table_de()
+ .deserialize_newtype_struct(name, visitor)
+ .map_err(|mut e: Self::Error| {
+ e.set_input(raw);
+ e
+ })
+ }
+
+ fn deserialize_struct<V>(
+ self,
+ name: &'static str,
+ fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let raw = self.raw;
+ self.into_table_de()
+ .deserialize_struct(name, fields, visitor)
+ .map_err(|mut e: Self::Error| {
+ e.set_input(raw);
+ e
+ })
+ }
+
+ // Called when the type to deserialize is an enum, as opposed to a field in the type.
+ fn deserialize_enum<V>(
+ self,
+ name: &'static str,
+ variants: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let raw = self.raw;
+ self.into_table_de()
+ .deserialize_enum(name, variants, visitor)
+ .map_err(|mut e: Self::Error| {
+ e.set_input(raw);
+ e
+ })
+ }
+
+ serde_core::forward_to_deserialize_any! {
+ bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
+ bytes byte_buf map unit
+ ignored_any unit_struct tuple_struct tuple identifier
+ }
+}
+
+impl<'de> serde_core::de::IntoDeserializer<'de, Error> for Deserializer<'de> {
+ type Deserializer = Self;
+
+ fn into_deserializer(self) -> Self::Deserializer {
+ self
+ }
+}
+
+impl<'de> serde_core::de::IntoDeserializer<'de, Error> for Spanned<DeTable<'de>> {
+ type Deserializer = Deserializer<'de>;
+
+ fn into_deserializer(self) -> Self::Deserializer {
+ Deserializer::from(self)
+ }
+}
diff --git a/third_party/rust/toml/src/de/deserializer/table.rs b/third_party/rust/toml/src/de/deserializer/table.rs
@@ -0,0 +1,216 @@
+use serde_core::de::IntoDeserializer;
+use serde_spanned::Spanned;
+
+use crate::de::DeString;
+use crate::de::DeTable;
+use crate::de::DeValue;
+use crate::de::Error;
+use crate::map::IntoIter;
+
+pub(crate) struct TableDeserializer<'i> {
+ span: core::ops::Range<usize>,
+ items: DeTable<'i>,
+}
+
+impl<'i> TableDeserializer<'i> {
+ pub(crate) fn new(items: DeTable<'i>, span: core::ops::Range<usize>) -> Self {
+ Self { span, items }
+ }
+}
+
+impl<'de> serde_core::Deserializer<'de> for TableDeserializer<'de> {
+ type Error = Error;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ visitor.visit_map(TableMapAccess::new(self))
+ }
+
+ // `None` is interpreted as a missing field so be sure to implement `Some`
+ // as a present field.
+ fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ visitor.visit_some(self)
+ }
+
+ fn deserialize_newtype_struct<V>(
+ self,
+ _name: &'static str,
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ visitor.visit_newtype_struct(self)
+ }
+
+ fn deserialize_struct<V>(
+ self,
+ name: &'static str,
+ _fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ if serde_spanned::de::is_spanned(name) {
+ let span = self.span.clone();
+ return visitor.visit_map(super::SpannedDeserializer::new(self, span));
+ }
+
+ self.deserialize_any(visitor)
+ }
+
+ // Called when the type to deserialize is an enum, as opposed to a field in the type.
+ fn deserialize_enum<V>(
+ self,
+ _name: &'static str,
+ _variants: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ if self.items.is_empty() {
+ Err(Error::custom(
+ "wanted exactly 1 element, found 0 elements",
+ Some(self.span),
+ ))
+ } else if self.items.len() != 1 {
+ Err(Error::custom(
+ "wanted exactly 1 element, more than 1 element",
+ Some(self.span),
+ ))
+ } else {
+ visitor.visit_enum(TableMapAccess::new(self))
+ }
+ }
+
+ serde_core::forward_to_deserialize_any! {
+ bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
+ bytes byte_buf map unit
+ ignored_any unit_struct tuple_struct tuple identifier
+ }
+}
+
+impl<'de> IntoDeserializer<'de, Error> for TableDeserializer<'de> {
+ type Deserializer = Self;
+
+ fn into_deserializer(self) -> Self::Deserializer {
+ self
+ }
+}
+
+pub(crate) struct TableMapAccess<'i> {
+ iter: IntoIter<Spanned<DeString<'i>>, Spanned<DeValue<'i>>>,
+ span: core::ops::Range<usize>,
+ value: Option<(Spanned<DeString<'i>>, Spanned<DeValue<'i>>)>,
+}
+
+impl<'i> TableMapAccess<'i> {
+ pub(crate) fn new(input: TableDeserializer<'i>) -> Self {
+ Self {
+ iter: input.items.into_iter(),
+ span: input.span,
+ value: None,
+ }
+ }
+}
+
+impl<'de> serde_core::de::MapAccess<'de> for TableMapAccess<'de> {
+ type Error = Error;
+
+ fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
+ where
+ K: serde_core::de::DeserializeSeed<'de>,
+ {
+ match self.iter.next() {
+ Some((k, v)) => {
+ let key_span = k.span();
+ let ret = seed
+ .deserialize(super::KeyDeserializer::new(
+ k.clone().into_inner(),
+ Some(key_span.clone()),
+ ))
+ .map(Some)
+ .map_err(|mut e: Self::Error| {
+ if e.span().is_none() {
+ e.set_span(Some(key_span));
+ }
+ e
+ });
+ self.value = Some((k, v));
+ ret
+ }
+ None => Ok(None),
+ }
+ }
+
+ fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::DeserializeSeed<'de>,
+ {
+ match self.value.take() {
+ Some((k, v)) => {
+ let span = v.span();
+ seed.deserialize(crate::de::ValueDeserializer::with_parts(
+ v.into_inner(),
+ span.clone(),
+ ))
+ .map_err(|mut e: Self::Error| {
+ if e.span().is_none() {
+ e.set_span(Some(span));
+ }
+ e.add_key(k.into_inner().into_owned());
+ e
+ })
+ }
+ None => {
+ panic!("no more values in next_value_seed, internal error in ValueDeserializer")
+ }
+ }
+ }
+}
+
+impl<'de> serde_core::de::EnumAccess<'de> for TableMapAccess<'de> {
+ type Error = Error;
+ type Variant = super::TableEnumDeserializer<'de>;
+
+ fn variant_seed<V>(mut self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>
+ where
+ V: serde_core::de::DeserializeSeed<'de>,
+ {
+ let (key, value) = match self.iter.next() {
+ Some(pair) => pair,
+ None => {
+ return Err(Error::custom(
+ "expected table with exactly 1 entry, found empty table",
+ Some(self.span),
+ ));
+ }
+ };
+
+ let key_span = key.span();
+ let val = seed
+ .deserialize(super::KeyDeserializer::new(
+ key.into_inner(),
+ Some(key_span.clone()),
+ ))
+ .map_err(|mut e: Self::Error| {
+ if e.span().is_none() {
+ e.set_span(Some(key_span));
+ }
+ e
+ })?;
+
+ let value_span = value.span();
+ let value = value.into_inner();
+ let variant = super::TableEnumDeserializer::new(value, value_span);
+
+ Ok((val, variant))
+ }
+}
diff --git a/third_party/rust/toml/src/de/deserializer/table_enum.rs b/third_party/rust/toml/src/de/deserializer/table_enum.rs
@@ -0,0 +1,124 @@
+use crate::alloc_prelude::*;
+use crate::de::DeArray;
+use crate::de::DeValue;
+use crate::de::Error;
+
+/// Deserializes table values into enum variants.
+pub(crate) struct TableEnumDeserializer<'i> {
+ value: DeValue<'i>,
+ span: core::ops::Range<usize>,
+}
+
+impl<'i> TableEnumDeserializer<'i> {
+ pub(crate) fn new(value: DeValue<'i>, span: core::ops::Range<usize>) -> Self {
+ TableEnumDeserializer { value, span }
+ }
+}
+
+impl<'de> serde_core::de::VariantAccess<'de> for TableEnumDeserializer<'de> {
+ type Error = Error;
+
+ fn unit_variant(self) -> Result<(), Self::Error> {
+ match self.value {
+ DeValue::Array(values) => {
+ if values.is_empty() {
+ Ok(())
+ } else {
+ Err(Error::custom("expected empty array", Some(self.span)))
+ }
+ }
+ DeValue::Table(values) => {
+ if values.is_empty() {
+ Ok(())
+ } else {
+ Err(Error::custom("expected empty table", Some(self.span)))
+ }
+ }
+ e => Err(Error::custom(
+ format!("expected table, found {}", e.type_str()),
+ Some(self.span),
+ )),
+ }
+ }
+
+ fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value, Self::Error>
+ where
+ T: serde_core::de::DeserializeSeed<'de>,
+ {
+ seed.deserialize(super::ValueDeserializer::with_parts(self.value, self.span))
+ }
+
+ fn tuple_variant<V>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ match self.value {
+ DeValue::Array(values) => {
+ let values_span = self.span.clone();
+ let tuple_values = values;
+
+ if tuple_values.len() == len {
+ serde_core::de::Deserializer::deserialize_seq(
+ super::ArrayDeserializer::new(tuple_values, values_span),
+ visitor,
+ )
+ } else {
+ Err(Error::custom(
+ format!("expected tuple with length {len}"),
+ Some(values_span),
+ ))
+ }
+ }
+ DeValue::Table(values) => {
+ let values_span = self.span.clone();
+ let tuple_values: Result<DeArray<'_>, _> = values
+ .into_iter()
+ .enumerate()
+ .map(
+ |(index, (key, value))| match key.get_ref().parse::<usize>() {
+ Ok(key_index) if key_index == index => Ok(value),
+ Ok(_) | Err(_) => Err(Error::custom(
+ format!("expected table key `{index}`, but was `{key}`"),
+ Some(key.span()),
+ )),
+ },
+ )
+ .collect();
+ let tuple_values = tuple_values?;
+
+ if tuple_values.len() == len {
+ serde_core::de::Deserializer::deserialize_seq(
+ super::ArrayDeserializer::new(tuple_values, values_span),
+ visitor,
+ )
+ } else {
+ Err(Error::custom(
+ format!("expected tuple with length {len}"),
+ Some(values_span),
+ ))
+ }
+ }
+ e => Err(Error::custom(
+ format!("expected table, found {}", e.type_str()),
+ Some(self.span),
+ )),
+ }
+ }
+
+ fn struct_variant<V>(
+ self,
+ fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ serde_core::de::Deserializer::deserialize_struct(
+ super::ValueDeserializer::with_parts(self.value, self.span)
+ .with_struct_key_validation(),
+ "", // TODO: this should be the variant name
+ fields,
+ visitor,
+ )
+ }
+}
diff --git a/third_party/rust/toml/src/de/deserializer/value.rs b/third_party/rust/toml/src/de/deserializer/value.rs
@@ -0,0 +1,301 @@
+use serde_core::de::IntoDeserializer as _;
+use serde_spanned::Spanned;
+
+use super::ArrayDeserializer;
+use super::DatetimeDeserializer;
+use super::TableDeserializer;
+use crate::alloc_prelude::*;
+use crate::de::DeString;
+use crate::de::DeTable;
+use crate::de::DeValue;
+use crate::de::Error;
+
+/// Deserialization implementation for TOML [values][crate::Value].
+///
+/// # Example
+///
+/// ```
+/// # #[cfg(feature = "parse")] {
+/// # #[cfg(feature = "display")] {
+/// use serde::Deserialize;
+///
+/// #[derive(Deserialize)]
+/// struct Config {
+/// title: String,
+/// owner: Owner,
+/// }
+///
+/// #[derive(Deserialize)]
+/// struct Owner {
+/// name: String,
+/// }
+///
+/// let value = r#"{ title = 'TOML Example', owner = { name = 'Lisa' } }"#;
+/// let deserializer = toml::de::ValueDeserializer::parse(value).unwrap();
+/// let config = Config::deserialize(deserializer).unwrap();
+/// assert_eq!(config.title, "TOML Example");
+/// assert_eq!(config.owner.name, "Lisa");
+/// # }
+/// # }
+/// ```
+pub struct ValueDeserializer<'i> {
+ span: core::ops::Range<usize>,
+ input: DeValue<'i>,
+ validate_struct_keys: bool,
+}
+
+impl<'i> ValueDeserializer<'i> {
+ /// Parse a TOML value
+ pub fn parse(raw: &'i str) -> Result<Self, Error> {
+ let input = DeValue::parse(raw)?;
+ let span = input.span();
+ let input = input.into_inner();
+ Ok(Self::with_parts(input, span))
+ }
+
+ /// Deprecated, replaced with [`ValueDeserializer::parse`]
+ #[deprecated(since = "0.9.0", note = "replaced with `ValueDeserializer::parse`")]
+ pub fn new(raw: &'i str) -> Result<Self, Error> {
+ Self::parse(raw)
+ }
+
+ pub(crate) fn with_parts(input: DeValue<'i>, span: core::ops::Range<usize>) -> Self {
+ Self {
+ input,
+ span,
+ validate_struct_keys: false,
+ }
+ }
+
+ pub(crate) fn with_struct_key_validation(mut self) -> Self {
+ self.validate_struct_keys = true;
+ self
+ }
+}
+
+impl<'i> From<Spanned<DeValue<'i>>> for ValueDeserializer<'i> {
+ fn from(root: Spanned<DeValue<'i>>) -> Self {
+ let span = root.span();
+ let root = root.into_inner();
+ Self::with_parts(root, span)
+ }
+}
+
+impl<'de> serde_core::Deserializer<'de> for ValueDeserializer<'de> {
+ type Error = Error;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let span = self.span.clone();
+ match self.input {
+ DeValue::String(DeString::Owned(v)) => visitor.visit_string(v),
+ DeValue::String(DeString::Borrowed(v)) => visitor.visit_str(v),
+ DeValue::Integer(v) => {
+ if let Some(v) = v.to_i64() {
+ visitor.visit_i64(v)
+ } else if let Some(v) = v.to_u64() {
+ visitor.visit_u64(v)
+ } else if let Some(v) = v.to_i128() {
+ visitor.visit_i128(v)
+ } else if let Some(v) = v.to_u128() {
+ visitor.visit_u128(v)
+ } else {
+ Err(Error::custom("integer number overflowed", None))
+ }
+ }
+ DeValue::Float(v) => {
+ if let Some(v) = v.to_f64() {
+ visitor.visit_f64(v)
+ } else {
+ Err(Error::custom("floating-point number overflowed", None))
+ }
+ }
+ DeValue::Boolean(v) => visitor.visit_bool(v),
+ DeValue::Datetime(v) => visitor.visit_map(DatetimeDeserializer::new(v)),
+ DeValue::Array(v) => ArrayDeserializer::new(v, span.clone()).deserialize_any(visitor),
+ DeValue::Table(v) => TableDeserializer::new(v, span.clone()).deserialize_any(visitor),
+ }
+ .map_err(|mut e: Self::Error| {
+ if e.span().is_none() {
+ e.set_span(Some(span));
+ }
+ e
+ })
+ }
+
+ fn deserialize_u128<V>(self, visitor: V) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ self.deserialize_any(visitor)
+ }
+
+ fn deserialize_i128<V>(self, visitor: V) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ self.deserialize_any(visitor)
+ }
+
+ // `None` is interpreted as a missing field so be sure to implement `Some`
+ // as a present field.
+ fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let span = self.span.clone();
+ visitor.visit_some(self).map_err(|mut e: Self::Error| {
+ if e.span().is_none() {
+ e.set_span(Some(span));
+ }
+ e
+ })
+ }
+
+ fn deserialize_newtype_struct<V>(
+ self,
+ _name: &'static str,
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let span = self.span.clone();
+ visitor
+ .visit_newtype_struct(self)
+ .map_err(|mut e: Self::Error| {
+ if e.span().is_none() {
+ e.set_span(Some(span));
+ }
+ e
+ })
+ }
+
+ fn deserialize_struct<V>(
+ self,
+ name: &'static str,
+ fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ if serde_spanned::de::is_spanned(name) {
+ let span = self.span.clone();
+ return visitor.visit_map(super::SpannedDeserializer::new(self, span));
+ }
+
+ if toml_datetime::de::is_datetime(name) {
+ let span = self.span.clone();
+ if let DeValue::Datetime(d) = self.input {
+ return visitor.visit_map(DatetimeDeserializer::new(d)).map_err(
+ |mut e: Self::Error| {
+ if e.span().is_none() {
+ e.set_span(Some(span));
+ }
+ e
+ },
+ );
+ }
+ }
+
+ if self.validate_struct_keys {
+ let span = self.span.clone();
+ match &self.input {
+ DeValue::Table(values) => validate_struct_keys(values, fields),
+ _ => Ok(()),
+ }
+ .map_err(|mut e: Self::Error| {
+ if e.span().is_none() {
+ e.set_span(Some(span));
+ }
+ e
+ })?;
+ }
+
+ self.deserialize_any(visitor)
+ }
+
+ // Called when the type to deserialize is an enum, as opposed to a field in the type.
+ fn deserialize_enum<V>(
+ self,
+ name: &'static str,
+ variants: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Error>
+ where
+ V: serde_core::de::Visitor<'de>,
+ {
+ let span = self.span.clone();
+ match self.input {
+ DeValue::String(v) => visitor.visit_enum(v.into_deserializer()),
+ DeValue::Table(v) => {
+ TableDeserializer::new(v, span.clone()).deserialize_enum(name, variants, visitor)
+ }
+ _ => Err(Error::custom("wanted string or table", Some(span.clone()))),
+ }
+ .map_err(|mut e: Self::Error| {
+ if e.span().is_none() {
+ e.set_span(Some(span));
+ }
+ e
+ })
+ }
+
+ serde_core::forward_to_deserialize_any! {
+ bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
+ bytes byte_buf map unit
+ ignored_any unit_struct tuple_struct tuple identifier
+ }
+}
+
+impl<'de> serde_core::de::IntoDeserializer<'de, Error> for ValueDeserializer<'de> {
+ type Deserializer = Self;
+
+ fn into_deserializer(self) -> Self::Deserializer {
+ self
+ }
+}
+
+impl<'de> serde_core::de::IntoDeserializer<'de, Error> for Spanned<DeValue<'de>> {
+ type Deserializer = ValueDeserializer<'de>;
+
+ fn into_deserializer(self) -> Self::Deserializer {
+ ValueDeserializer::from(self)
+ }
+}
+
+pub(crate) fn validate_struct_keys(
+ table: &DeTable<'_>,
+ fields: &'static [&'static str],
+) -> Result<(), Error> {
+ let extra_fields = table
+ .keys()
+ .filter_map(|key| {
+ if !fields.contains(&key.get_ref().as_ref()) {
+ Some(key.clone())
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>();
+
+ if extra_fields.is_empty() {
+ Ok(())
+ } else {
+ Err(Error::custom(
+ format!(
+ "unexpected keys in table: {}, available keys: {}",
+ extra_fields
+ .iter()
+ .map(|k| k.get_ref().as_ref())
+ .collect::<Vec<_>>()
+ .join(", "),
+ fields.join(", "),
+ ),
+ Some(extra_fields[0].span()),
+ ))
+ }
+}
diff --git a/third_party/rust/toml/src/de/error.rs b/third_party/rust/toml/src/de/error.rs
@@ -0,0 +1,315 @@
+use crate::alloc_prelude::*;
+
+/// Errors that can occur when deserializing a type.
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct Error {
+ message: String,
+ input: Option<alloc::sync::Arc<str>>,
+ keys: Vec<String>,
+ span: Option<core::ops::Range<usize>>,
+}
+
+impl Error {
+ #[cfg(feature = "parse")]
+ pub(crate) fn new(input: alloc::sync::Arc<str>, error: toml_parser::ParseError) -> Self {
+ let mut message = String::new();
+ message.push_str(error.description());
+ if let Some(expected) = error.expected() {
+ message.push_str(", expected ");
+ if expected.is_empty() {
+ message.push_str("nothing");
+ } else {
+ for (i, expected) in expected.iter().enumerate() {
+ if i != 0 {
+ message.push_str(", ");
+ }
+ match expected {
+ toml_parser::Expected::Literal(desc) => {
+ message.push_str(&render_literal(desc));
+ }
+ toml_parser::Expected::Description(desc) => message.push_str(desc),
+ _ => message.push_str("etc"),
+ }
+ }
+ }
+ }
+
+ let span = error.unexpected().map(|span| span.start()..span.end());
+
+ Self {
+ message,
+ input: Some(input),
+ keys: Vec::new(),
+ span,
+ }
+ }
+
+ pub(crate) fn custom<T>(msg: T, span: Option<core::ops::Range<usize>>) -> Self
+ where
+ T: core::fmt::Display,
+ {
+ Self {
+ message: msg.to_string(),
+ input: None,
+ keys: Vec::new(),
+ span,
+ }
+ }
+
+ pub(crate) fn add_key(&mut self, key: String) {
+ self.keys.insert(0, key);
+ }
+
+ /// What went wrong
+ pub fn message(&self) -> &str {
+ &self.message
+ }
+
+ /// The start/end index into the original document where the error occurred
+ pub fn span(&self) -> Option<core::ops::Range<usize>> {
+ self.span.clone()
+ }
+
+ pub(crate) fn set_span(&mut self, span: Option<core::ops::Range<usize>>) {
+ self.span = span;
+ }
+
+ /// Provide the encoded TOML the error applies to
+ pub fn set_input(&mut self, input: Option<&str>) {
+ self.input = input.map(|s| s.into());
+ }
+}
+
+#[cfg(feature = "serde")]
+impl serde_core::de::Error for Error {
+ fn custom<T>(msg: T) -> Self
+ where
+ T: core::fmt::Display,
+ {
+ Self::custom(msg.to_string(), None)
+ }
+}
+
+fn render_literal(literal: &str) -> String {
+ match literal {
+ "\n" => "newline".to_owned(),
+ "`" => "'`'".to_owned(),
+ s if s.chars().all(|c| c.is_ascii_control()) => {
+ format!("`{}`", s.escape_debug())
+ }
+ s => format!("`{s}`"),
+ }
+}
+
+/// Displays a TOML parse error
+///
+/// # Example
+///
+/// TOML parse error at line 1, column 10
+/// |
+/// 1 | 00:32:00.a999999
+/// | ^
+/// Unexpected `a`
+/// Expected `digit`
+/// While parsing a Time
+/// While parsing a Date-Time
+impl core::fmt::Display for Error {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ let mut context = false;
+ if let (Some(input), Some(span)) = (&self.input, self.span()) {
+ context = true;
+
+ let (line, column) = translate_position(input.as_bytes(), span.start);
+ let line_num = line + 1;
+ let col_num = column + 1;
+ let gutter = line_num.to_string().len();
+ let content = input.split('\n').nth(line).expect("valid line number");
+ let highlight_len = span.end - span.start;
+ // Allow highlight to go one past the line
+ let highlight_len = highlight_len.min(content.len().saturating_sub(column));
+
+ writeln!(f, "TOML parse error at line {line_num}, column {col_num}")?;
+ // |
+ for _ in 0..=gutter {
+ write!(f, " ")?;
+ }
+ writeln!(f, "|")?;
+
+ // 1 | 00:32:00.a999999
+ write!(f, "{line_num} | ")?;
+ writeln!(f, "{content}")?;
+
+ // | ^
+ for _ in 0..=gutter {
+ write!(f, " ")?;
+ }
+ write!(f, "|")?;
+ for _ in 0..=column {
+ write!(f, " ")?;
+ }
+ // The span will be empty at eof, so we need to make sure we always print at least
+ // one `^`
+ write!(f, "^")?;
+ for _ in 1..highlight_len {
+ write!(f, "^")?;
+ }
+ writeln!(f)?;
+ }
+ writeln!(f, "{}", self.message)?;
+ if !context && !self.keys.is_empty() {
+ writeln!(f, "in `{}`", self.keys.join("."))?;
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "std")]
+impl std::error::Error for Error {}
+#[cfg(not(feature = "std"))]
+#[cfg(feature = "serde")]
+impl serde_core::de::StdError for Error {}
+
+fn translate_position(input: &[u8], index: usize) -> (usize, usize) {
+ if input.is_empty() {
+ return (0, index);
+ }
+
+ let safe_index = index.min(input.len() - 1);
+ let column_offset = index - safe_index;
+ let index = safe_index;
+
+ let nl = input[0..index]
+ .iter()
+ .rev()
+ .enumerate()
+ .find(|(_, b)| **b == b'\n')
+ .map(|(nl, _)| index - nl - 1);
+ let line_start = match nl {
+ Some(nl) => nl + 1,
+ None => 0,
+ };
+ let line = input[0..line_start].iter().filter(|b| **b == b'\n').count();
+
+ let column = core::str::from_utf8(&input[line_start..=index])
+ .map(|s| s.chars().count() - 1)
+ .unwrap_or_else(|_| index - line_start);
+ let column = column + column_offset;
+
+ (line, column)
+}
+
+#[cfg(feature = "parse")]
+pub(crate) struct TomlSink<'i, S> {
+ source: toml_parser::Source<'i>,
+ input: Option<alloc::sync::Arc<str>>,
+ sink: S,
+}
+
+#[cfg(feature = "parse")]
+impl<'i, S: Default> TomlSink<'i, S> {
+ pub(crate) fn new(source: toml_parser::Source<'i>) -> Self {
+ Self {
+ source,
+ input: None,
+ sink: Default::default(),
+ }
+ }
+
+ pub(crate) fn into_inner(self) -> S {
+ self.sink
+ }
+}
+
+#[cfg(feature = "parse")]
+impl<'i> toml_parser::ErrorSink for TomlSink<'i, Option<Error>> {
+ fn report_error(&mut self, error: toml_parser::ParseError) {
+ if self.sink.is_none() {
+ let input = self
+ .input
+ .get_or_insert_with(|| alloc::sync::Arc::from(self.source.input()));
+ let error = Error::new(input.clone(), error);
+ self.sink = Some(error);
+ }
+ }
+}
+
+#[cfg(feature = "parse")]
+impl<'i> toml_parser::ErrorSink for TomlSink<'i, Vec<Error>> {
+ fn report_error(&mut self, error: toml_parser::ParseError) {
+ let input = self
+ .input
+ .get_or_insert_with(|| alloc::sync::Arc::from(self.source.input()));
+ let error = Error::new(input.clone(), error);
+ self.sink.push(error);
+ }
+}
+
+#[cfg(test)]
+mod test_translate_position {
+ use super::*;
+
+ #[test]
+ fn empty() {
+ let input = b"";
+ let index = 0;
+ let position = translate_position(&input[..], index);
+ assert_eq!(position, (0, 0));
+ }
+
+ #[test]
+ fn start() {
+ let input = b"Hello";
+ let index = 0;
+ let position = translate_position(&input[..], index);
+ assert_eq!(position, (0, 0));
+ }
+
+ #[test]
+ fn end() {
+ let input = b"Hello";
+ let index = input.len() - 1;
+ let position = translate_position(&input[..], index);
+ assert_eq!(position, (0, input.len() - 1));
+ }
+
+ #[test]
+ fn after() {
+ let input = b"Hello";
+ let index = input.len();
+ let position = translate_position(&input[..], index);
+ assert_eq!(position, (0, input.len()));
+ }
+
+ #[test]
+ fn first_line() {
+ let input = b"Hello\nWorld\n";
+ let index = 2;
+ let position = translate_position(&input[..], index);
+ assert_eq!(position, (0, 2));
+ }
+
+ #[test]
+ fn end_of_line() {
+ let input = b"Hello\nWorld\n";
+ let index = 5;
+ let position = translate_position(&input[..], index);
+ assert_eq!(position, (0, 5));
+ }
+
+ #[test]
+ fn start_of_second_line() {
+ let input = b"Hello\nWorld\n";
+ let index = 6;
+ let position = translate_position(&input[..], index);
+ assert_eq!(position, (1, 0));
+ }
+
+ #[test]
+ fn second_line() {
+ let input = b"Hello\nWorld\n";
+ let index = 8;
+ let position = translate_position(&input[..], index);
+ assert_eq!(position, (1, 2));
+ }
+}
diff --git a/third_party/rust/toml/src/de/mod.rs b/third_party/rust/toml/src/de/mod.rs
@@ -0,0 +1,93 @@
+//! Deserializing TOML into Rust structures.
+//!
+//! This module contains all the Serde support for deserializing TOML documents
+//! into Rust structures. Note that some top-level functions here are also
+//! provided at the top of the crate.
+
+#[cfg(feature = "parse")]
+#[cfg(feature = "serde")]
+mod deserializer;
+mod error;
+#[cfg(feature = "parse")]
+mod parser;
+
+#[cfg(feature = "parse")]
+#[cfg(feature = "serde")]
+pub use deserializer::Deserializer;
+#[cfg(feature = "parse")]
+#[cfg(feature = "serde")]
+pub use deserializer::ValueDeserializer;
+#[cfg(feature = "parse")]
+pub use parser::DeArray;
+#[cfg(feature = "parse")]
+pub use parser::DeFloat;
+#[cfg(feature = "parse")]
+pub use parser::DeInteger;
+#[cfg(feature = "parse")]
+pub use parser::DeString;
+#[cfg(feature = "parse")]
+pub use parser::DeTable;
+#[cfg(feature = "parse")]
+pub use parser::DeValue;
+
+pub use error::Error;
+
+use crate::alloc_prelude::*;
+
+/// Deserializes a string into a type.
+///
+/// This function will attempt to interpret `s` as a TOML document and
+/// deserialize `T` from the document.
+///
+/// To deserializes TOML values, instead of documents, see [`ValueDeserializer`].
+///
+/// # Examples
+///
+/// ```
+/// use serde::Deserialize;
+///
+/// #[derive(Deserialize)]
+/// struct Config {
+/// title: String,
+/// owner: Owner,
+/// }
+///
+/// #[derive(Deserialize)]
+/// struct Owner {
+/// name: String,
+/// }
+///
+/// let config: Config = toml::from_str(r#"
+/// title = 'TOML Example'
+///
+/// [owner]
+/// name = 'Lisa'
+/// "#).unwrap();
+///
+/// assert_eq!(config.title, "TOML Example");
+/// assert_eq!(config.owner.name, "Lisa");
+/// ```
+#[cfg(feature = "parse")]
+#[cfg(feature = "serde")]
+pub fn from_str<'de, T>(s: &'de str) -> Result<T, Error>
+where
+ T: serde_core::de::Deserialize<'de>,
+{
+ T::deserialize(Deserializer::parse(s)?)
+}
+
+/// Deserializes bytes into a type.
+///
+/// This function will attempt to interpret `s` as a TOML document and
+/// deserialize `T` from the document.
+///
+/// To deserializes TOML values, instead of documents, see [`ValueDeserializer`].
+#[cfg(feature = "parse")]
+#[cfg(feature = "serde")]
+pub fn from_slice<'de, T>(s: &'de [u8]) -> Result<T, Error>
+where
+ T: serde_core::de::Deserialize<'de>,
+{
+ let s = core::str::from_utf8(s).map_err(|e| Error::custom(e.to_string(), None))?;
+ from_str(s)
+}
diff --git a/third_party/rust/toml/src/de/parser/array.rs b/third_party/rust/toml/src/de/parser/array.rs
@@ -0,0 +1,124 @@
+use serde_spanned::Spanned;
+
+use crate::de::parser::inline_table::on_inline_table;
+use crate::de::parser::value::on_scalar;
+use crate::de::{DeArray, DeValue};
+
+use crate::de::parser::prelude::*;
+
+/// ```bnf
+/// ;; Array
+///
+/// array = array-open array-values array-close
+/// array-values = ws-comment-newline val ws-comment-newline array-sep array-values
+/// array-values =/ ws-comment-newline val ws-comment-newline [ array-sep ]
+/// ```
+pub(crate) fn on_array<'i>(
+ open_event: &toml_parser::parser::Event,
+ input: &mut Input<'_>,
+ source: toml_parser::Source<'i>,
+ errors: &mut dyn ErrorSink,
+) -> Spanned<DeValue<'i>> {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("array::on_array");
+ let mut result = DeArray::new();
+ let mut close_span = open_event.span();
+
+ let mut state = State::default();
+ state.open(open_event);
+ while let Some(event) = input.next_token() {
+ close_span = event.span();
+ match event.kind() {
+ EventKind::StdTableOpen
+ | EventKind::ArrayTableOpen
+ | EventKind::InlineTableClose
+ | EventKind::SimpleKey
+ | EventKind::KeySep
+ | EventKind::KeyValSep
+ | EventKind::StdTableClose
+ | EventKind::ArrayTableClose => {
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("unexpected {event:?}"),
+ anstyle::AnsiColor::Red.on_default(),
+ );
+ break;
+ }
+ EventKind::Error => {
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("unexpected {event:?}"),
+ anstyle::AnsiColor::Red.on_default(),
+ );
+ continue;
+ }
+ EventKind::InlineTableOpen => {
+ let value = on_inline_table(event, input, source, errors);
+ state.capture_value(event, value);
+ }
+ EventKind::ArrayOpen => {
+ let value = on_array(event, input, source, errors);
+ state.capture_value(event, value);
+ }
+ EventKind::Scalar => {
+ let value = on_scalar(event, source, errors);
+ state.capture_value(event, value);
+ }
+ EventKind::ValueSep => {
+ state.finish_value(event, &mut result);
+ state.sep_value(event);
+ }
+ EventKind::Whitespace | EventKind::Comment | EventKind::Newline => {
+ state.whitespace(event);
+ }
+ EventKind::ArrayClose => {
+ state.finish_value(event, &mut result);
+ state.close(open_event, event, &mut result);
+ break;
+ }
+ }
+ }
+
+ let span = open_event.span().start()..close_span.end();
+
+ Spanned::new(span, DeValue::Array(result))
+}
+
+#[derive(Default)]
+struct State<'i> {
+ current_value: Option<Spanned<DeValue<'i>>>,
+ trailing_start: Option<usize>,
+}
+
+impl<'i> State<'i> {
+ fn open(&mut self, _open_event: &toml_parser::parser::Event) {}
+
+ fn whitespace(&mut self, _event: &toml_parser::parser::Event) {}
+
+ fn capture_value(&mut self, _event: &toml_parser::parser::Event, value: Spanned<DeValue<'i>>) {
+ self.trailing_start = None;
+ self.current_value = Some(value);
+ }
+
+ fn finish_value(&mut self, _event: &toml_parser::parser::Event, result: &mut DeArray<'i>) {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("array::finish_value");
+ if let Some(value) = self.current_value.take() {
+ result.push(value);
+ }
+ }
+
+ fn sep_value(&mut self, event: &toml_parser::parser::Event) {
+ self.trailing_start = Some(event.span().end());
+ }
+
+ fn close(
+ &mut self,
+ _open_event: &toml_parser::parser::Event,
+ _close_event: &toml_parser::parser::Event,
+ _result: &mut DeArray<'i>,
+ ) {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("array::close");
+ }
+}
diff --git a/third_party/rust/toml/src/de/parser/dearray.rs b/third_party/rust/toml/src/de/parser/dearray.rs
@@ -0,0 +1,140 @@
+use serde_spanned::Spanned;
+
+use crate::alloc_prelude::*;
+use crate::de::DeValue;
+
+/// Type representing a TOML array, payload of the `DeValue::Array` variant
+#[derive(Clone)]
+pub struct DeArray<'i> {
+ items: Vec<Spanned<DeValue<'i>>>,
+ array_of_tables: bool,
+}
+
+impl<'i> DeArray<'i> {
+ /// Constructs a new, empty `DeArray`.
+ ///
+ /// This will not allocate until elements are pushed onto it.
+ pub const fn new() -> Self {
+ Self {
+ items: Vec::new(),
+ array_of_tables: false,
+ }
+ }
+
+ /// Appends an element to the back of a collection.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity exceeds `isize::MAX` _bytes_.
+ pub fn push(&mut self, value: Spanned<DeValue<'i>>) {
+ self.items.push(value);
+ }
+}
+
+impl DeArray<'_> {
+ pub(crate) fn is_array_of_tables(&self) -> bool {
+ self.array_of_tables
+ }
+
+ pub(crate) fn set_array_of_tables(&mut self, yes: bool) {
+ self.array_of_tables = yes;
+ }
+}
+
+impl<'i> core::ops::Deref for DeArray<'i> {
+ type Target = [Spanned<DeValue<'i>>];
+
+ #[inline]
+ fn deref(&self) -> &[Spanned<DeValue<'i>>] {
+ self.items.as_slice()
+ }
+}
+
+impl<'i> core::ops::DerefMut for DeArray<'i> {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut [Spanned<DeValue<'i>>] {
+ self.items.as_mut_slice()
+ }
+}
+
+impl<'i> AsRef<[Spanned<DeValue<'i>>]> for DeArray<'i> {
+ fn as_ref(&self) -> &[Spanned<DeValue<'i>>] {
+ &self.items
+ }
+}
+
+impl<'i> AsMut<[Spanned<DeValue<'i>>]> for DeArray<'i> {
+ fn as_mut(&mut self) -> &mut [Spanned<DeValue<'i>>] {
+ &mut self.items
+ }
+}
+
+impl<'i> core::borrow::Borrow<[Spanned<DeValue<'i>>]> for DeArray<'i> {
+ fn borrow(&self) -> &[Spanned<DeValue<'i>>] {
+ &self.items[..]
+ }
+}
+
+impl<'i> core::borrow::BorrowMut<[Spanned<DeValue<'i>>]> for DeArray<'i> {
+ fn borrow_mut(&mut self) -> &mut [Spanned<DeValue<'i>>] {
+ &mut self.items[..]
+ }
+}
+
+impl<'i, I: core::slice::SliceIndex<[Spanned<DeValue<'i>>]>> core::ops::Index<I> for DeArray<'i> {
+ type Output = I::Output;
+
+ #[inline]
+ fn index(&self, index: I) -> &Self::Output {
+ self.items.index(index)
+ }
+}
+
+impl<'a, 'i> IntoIterator for &'a DeArray<'i> {
+ type Item = &'a Spanned<DeValue<'i>>;
+
+ type IntoIter = core::slice::Iter<'a, Spanned<DeValue<'i>>>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter()
+ }
+}
+
+impl<'i> IntoIterator for DeArray<'i> {
+ type Item = Spanned<DeValue<'i>>;
+
+ type IntoIter = alloc::vec::IntoIter<Spanned<DeValue<'i>>>;
+
+ #[inline]
+ fn into_iter(self) -> Self::IntoIter {
+ self.items.into_iter()
+ }
+}
+
+impl<'i> FromIterator<Spanned<DeValue<'i>>> for DeArray<'i> {
+ #[inline]
+ #[track_caller]
+ fn from_iter<I: IntoIterator<Item = Spanned<DeValue<'i>>>>(iter: I) -> Self {
+ Self {
+ items: iter.into_iter().collect(),
+ array_of_tables: false,
+ }
+ }
+}
+
+impl Default for DeArray<'static> {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ items: Default::default(),
+ array_of_tables: false,
+ }
+ }
+}
+
+impl core::fmt::Debug for DeArray<'_> {
+ #[inline]
+ fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.items.fmt(formatter)
+ }
+}
diff --git a/third_party/rust/toml/src/de/parser/debug.rs b/third_party/rust/toml/src/de/parser/debug.rs
@@ -0,0 +1,98 @@
+pub(crate) struct TraceScope {
+ text: String,
+ style: anstyle::Style,
+ guard: DebugDepthGuard,
+}
+
+impl TraceScope {
+ pub(crate) fn new(text: impl core::fmt::Display) -> Self {
+ let text = text.to_string();
+ let style = anstyle::Style::new();
+ trace(&format!("> {text}"), style);
+ Self {
+ text,
+ style,
+ guard: DEBUG_DEPTH.scoped(),
+ }
+ }
+}
+
+impl Drop for TraceScope {
+ fn drop(&mut self) {
+ let text = &self.text;
+ let style = self.style;
+ drop(self.guard.take());
+ trace(&format!("< {text}"), style);
+ }
+}
+
+pub(crate) fn trace(text: &str, style: anstyle::Style) {
+ #![allow(unexpected_cfgs)] // HACK: fixed in newer versions
+ let depth = DEBUG_DEPTH.depth();
+ anstream::eprintln!("{:depth$}{style}{text}{style:#}", "");
+}
+
+pub(crate) struct DebugDepth(core::sync::atomic::AtomicUsize);
+
+impl DebugDepth {
+ pub(crate) fn scoped(&self) -> DebugDepthGuard {
+ DebugDepthGuard::new()
+ }
+
+ pub(crate) fn enter_unchecked(&self) -> usize {
+ self.0.fetch_add(1, core::sync::atomic::Ordering::SeqCst)
+ }
+
+ pub(crate) fn exit_unchecked(&self) {
+ let _ = self.0.fetch_sub(1, core::sync::atomic::Ordering::SeqCst);
+ }
+
+ pub(crate) fn depth(&self) -> usize {
+ self.0.load(core::sync::atomic::Ordering::SeqCst)
+ }
+}
+
+static DEBUG_DEPTH: DebugDepth = DebugDepth(core::sync::atomic::AtomicUsize::new(0));
+
+pub(crate) struct DebugDepthGuard {
+ depth: usize,
+ inc: bool,
+}
+
+impl DebugDepthGuard {
+ pub(crate) fn new() -> Self {
+ let depth = DEBUG_DEPTH.enter_unchecked();
+ Self { depth, inc: true }
+ }
+
+ fn take(&mut self) -> Self {
+ let depth = self.depth;
+ let inc = self.inc;
+ self.inc = false;
+ Self { depth, inc }
+ }
+}
+
+impl Drop for DebugDepthGuard {
+ fn drop(&mut self) {
+ if self.inc {
+ DEBUG_DEPTH.exit_unchecked();
+ }
+ }
+}
+
+impl AsRef<usize> for DebugDepthGuard {
+ #[inline(always)]
+ fn as_ref(&self) -> &usize {
+ &self.depth
+ }
+}
+
+impl core::ops::Deref for DebugDepthGuard {
+ type Target = usize;
+
+ #[inline(always)]
+ fn deref(&self) -> &Self::Target {
+ &self.depth
+ }
+}
diff --git a/third_party/rust/toml/src/de/parser/detable.rs b/third_party/rust/toml/src/de/parser/detable.rs
@@ -0,0 +1,47 @@
+use alloc::borrow::Cow;
+
+use serde_spanned::Spanned;
+
+use crate::alloc_prelude::*;
+use crate::de::DeString;
+use crate::de::DeValue;
+use crate::map::Map;
+
+/// Type representing a TOML table, payload of the `Value::Table` variant.
+///
+/// By default it entries are stored in
+/// [lexicographic order](https://doc.rust-lang.org/std/primitive.str.html#impl-Ord-for-str)
+/// of the keys. Enable the `preserve_order` feature to store entries in the order they appear in
+/// the source file.
+pub type DeTable<'i> = Map<Spanned<DeString<'i>>, Spanned<DeValue<'i>>>;
+
+impl<'i> DeTable<'i> {
+ /// Parse a TOML document
+ pub fn parse(input: &'i str) -> Result<Spanned<Self>, crate::de::Error> {
+ let source = toml_parser::Source::new(input);
+ let mut errors = crate::de::error::TomlSink::<Option<_>>::new(source);
+ let value = crate::de::parser::parse_document(source, &mut errors);
+ if let Some(err) = errors.into_inner() {
+ Err(err)
+ } else {
+ Ok(value)
+ }
+ }
+
+ /// Parse a TOML document, with best effort recovery on error
+ pub fn parse_recoverable(input: &'i str) -> (Spanned<Self>, Vec<crate::de::Error>) {
+ let source = toml_parser::Source::new(input);
+ let mut errors = crate::de::error::TomlSink::<Vec<_>>::new(source);
+ let value = crate::de::parser::parse_document(source, &mut errors);
+ (value, errors.into_inner())
+ }
+
+ /// Ensure no data is borrowed
+ pub fn make_owned(&mut self) {
+ self.mut_entries(|k, v| {
+ let owned = core::mem::take(k.get_mut());
+ *k.get_mut() = Cow::Owned(owned.into_owned());
+ v.get_mut().make_owned();
+ });
+ }
+}
diff --git a/third_party/rust/toml/src/de/parser/devalue.rs b/third_party/rust/toml/src/de/parser/devalue.rs
@@ -0,0 +1,384 @@
+//! Definition of a TOML [value][DeValue] for deserialization
+
+use alloc::borrow::Cow;
+use core::mem::discriminant;
+use core::ops;
+
+use serde_spanned::Spanned;
+use toml_datetime::Datetime;
+
+use crate::alloc_prelude::*;
+use crate::de::DeArray;
+use crate::de::DeTable;
+
+/// Type representing a TOML string, payload of the `DeValue::String` variant
+pub type DeString<'i> = Cow<'i, str>;
+
+/// Represents a TOML integer
+#[derive(Clone, Debug)]
+pub struct DeInteger<'i> {
+ pub(crate) inner: DeString<'i>,
+ pub(crate) radix: u32,
+}
+
+impl DeInteger<'_> {
+ pub(crate) fn to_u64(&self) -> Option<u64> {
+ u64::from_str_radix(self.inner.as_ref(), self.radix).ok()
+ }
+ pub(crate) fn to_i64(&self) -> Option<i64> {
+ i64::from_str_radix(self.inner.as_ref(), self.radix).ok()
+ }
+ pub(crate) fn to_u128(&self) -> Option<u128> {
+ u128::from_str_radix(self.inner.as_ref(), self.radix).ok()
+ }
+ pub(crate) fn to_i128(&self) -> Option<i128> {
+ i128::from_str_radix(self.inner.as_ref(), self.radix).ok()
+ }
+
+ /// [`from_str_radix`][i64::from_str_radix]-compatible representation of an integer
+ ///
+ /// Requires [`DeInteger::radix`] to interpret
+ ///
+ /// See [`Display`][std::fmt::Display] for a representation that includes the radix
+ pub fn as_str(&self) -> &str {
+ self.inner.as_ref()
+ }
+
+ /// Numeric base of [`DeInteger::as_str`]
+ pub fn radix(&self) -> u32 {
+ self.radix
+ }
+}
+
+impl Default for DeInteger<'_> {
+ fn default() -> Self {
+ Self {
+ inner: DeString::Borrowed("0"),
+ radix: 10,
+ }
+ }
+}
+
+impl core::fmt::Display for DeInteger<'_> {
+ fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self.radix {
+ 2 => "0b".fmt(formatter)?,
+ 8 => "0o".fmt(formatter)?,
+ 10 => {}
+ 16 => "0x".fmt(formatter)?,
+ _ => {
+ unreachable!(
+ "we should only ever have 2, 8, 10, and 16 radix, not {}",
+ self.radix
+ )
+ }
+ }
+ self.as_str().fmt(formatter)?;
+ Ok(())
+ }
+}
+
+/// Represents a TOML integer
+#[derive(Clone, Debug)]
+pub struct DeFloat<'i> {
+ pub(crate) inner: DeString<'i>,
+}
+
+impl DeFloat<'_> {
+ pub(crate) fn to_f64(&self) -> Option<f64> {
+ let f: f64 = self.inner.as_ref().parse().ok()?;
+ if f.is_infinite() && !self.as_str().contains("inf") {
+ None
+ } else {
+ Some(f)
+ }
+ }
+
+ /// [`FromStr`][std::str::FromStr]-compatible representation of a float
+ pub fn as_str(&self) -> &str {
+ self.inner.as_ref()
+ }
+}
+
+impl Default for DeFloat<'_> {
+ fn default() -> Self {
+ Self {
+ inner: DeString::Borrowed("0.0"),
+ }
+ }
+}
+
+impl core::fmt::Display for DeFloat<'_> {
+ fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.as_str().fmt(formatter)?;
+ Ok(())
+ }
+}
+
+/// Representation of a TOML value.
+#[derive(Clone, Debug)]
+pub enum DeValue<'i> {
+ /// Represents a TOML string
+ String(DeString<'i>),
+ /// Represents a TOML integer
+ Integer(DeInteger<'i>),
+ /// Represents a TOML float
+ Float(DeFloat<'i>),
+ /// Represents a TOML boolean
+ Boolean(bool),
+ /// Represents a TOML datetime
+ Datetime(Datetime),
+ /// Represents a TOML array
+ Array(DeArray<'i>),
+ /// Represents a TOML table
+ Table(DeTable<'i>),
+}
+
+impl<'i> DeValue<'i> {
+ /// Parse a TOML value
+ pub fn parse(input: &'i str) -> Result<Spanned<Self>, crate::de::Error> {
+ let source = toml_parser::Source::new(input);
+ let mut errors = crate::de::error::TomlSink::<Option<_>>::new(source);
+ let value = crate::de::parser::parse_value(source, &mut errors);
+ if let Some(err) = errors.into_inner() {
+ Err(err)
+ } else {
+ Ok(value)
+ }
+ }
+
+ /// Parse a TOML value, with best effort recovery on error
+ pub fn parse_recoverable(input: &'i str) -> (Spanned<Self>, Vec<crate::de::Error>) {
+ let source = toml_parser::Source::new(input);
+ let mut errors = crate::de::error::TomlSink::<Vec<_>>::new(source);
+ let value = crate::de::parser::parse_value(source, &mut errors);
+ (value, errors.into_inner())
+ }
+
+ /// Ensure no data is borrowed
+ pub fn make_owned(&mut self) {
+ match self {
+ DeValue::String(v) => {
+ let owned = core::mem::take(v);
+ *v = Cow::Owned(owned.into_owned());
+ }
+ DeValue::Integer(..)
+ | DeValue::Float(..)
+ | DeValue::Boolean(..)
+ | DeValue::Datetime(..) => {}
+ DeValue::Array(v) => {
+ for e in v.iter_mut() {
+ e.get_mut().make_owned();
+ }
+ }
+ DeValue::Table(v) => v.make_owned(),
+ }
+ }
+
+ /// Index into a TOML array or map. A string index can be used to access a
+ /// value in a map, and a usize index can be used to access an element of an
+ /// array.
+ ///
+ /// Returns `None` if the type of `self` does not match the type of the
+ /// index, for example if the index is a string and `self` is an array or a
+ /// number. Also returns `None` if the given key does not exist in the map
+ /// or the given index is not within the bounds of the array.
+ pub fn get<I: Index>(&self, index: I) -> Option<&Spanned<Self>> {
+ index.index(self)
+ }
+
+ /// Extracts the integer value if it is an integer.
+ pub fn as_integer(&self) -> Option<&DeInteger<'i>> {
+ match self {
+ DeValue::Integer(i) => Some(i),
+ _ => None,
+ }
+ }
+
+ /// Tests whether this value is an integer.
+ pub fn is_integer(&self) -> bool {
+ self.as_integer().is_some()
+ }
+
+ /// Extracts the float value if it is a float.
+ pub fn as_float(&self) -> Option<&DeFloat<'i>> {
+ match self {
+ DeValue::Float(f) => Some(f),
+ _ => None,
+ }
+ }
+
+ /// Tests whether this value is a float.
+ pub fn is_float(&self) -> bool {
+ self.as_float().is_some()
+ }
+
+ /// Extracts the boolean value if it is a boolean.
+ pub fn as_bool(&self) -> Option<bool> {
+ match *self {
+ DeValue::Boolean(b) => Some(b),
+ _ => None,
+ }
+ }
+
+ /// Tests whether this value is a boolean.
+ pub fn is_bool(&self) -> bool {
+ self.as_bool().is_some()
+ }
+
+ /// Extracts the string of this value if it is a string.
+ pub fn as_str(&self) -> Option<&str> {
+ match *self {
+ DeValue::String(ref s) => Some(&**s),
+ _ => None,
+ }
+ }
+
+ /// Tests if this value is a string.
+ pub fn is_str(&self) -> bool {
+ self.as_str().is_some()
+ }
+
+ /// Extracts the datetime value if it is a datetime.
+ ///
+ /// Note that a parsed TOML value will only contain ISO 8601 dates. An
+ /// example date is:
+ ///
+ /// ```notrust
+ /// 1979-05-27T07:32:00Z
+ /// ```
+ pub fn as_datetime(&self) -> Option<&Datetime> {
+ match *self {
+ DeValue::Datetime(ref s) => Some(s),
+ _ => None,
+ }
+ }
+
+ /// Tests whether this value is a datetime.
+ pub fn is_datetime(&self) -> bool {
+ self.as_datetime().is_some()
+ }
+
+ /// Extracts the array value if it is an array.
+ pub fn as_array(&self) -> Option<&DeArray<'i>> {
+ match *self {
+ DeValue::Array(ref s) => Some(s),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn as_array_mut(&mut self) -> Option<&mut DeArray<'i>> {
+ match self {
+ DeValue::Array(s) => Some(s),
+ _ => None,
+ }
+ }
+
+ /// Tests whether this value is an array.
+ pub fn is_array(&self) -> bool {
+ self.as_array().is_some()
+ }
+
+ /// Extracts the table value if it is a table.
+ pub fn as_table(&self) -> Option<&DeTable<'i>> {
+ match *self {
+ DeValue::Table(ref s) => Some(s),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn as_table_mut(&mut self) -> Option<&mut DeTable<'i>> {
+ match self {
+ DeValue::Table(s) => Some(s),
+ _ => None,
+ }
+ }
+
+ /// Tests whether this value is a table.
+ pub fn is_table(&self) -> bool {
+ self.as_table().is_some()
+ }
+
+ /// Tests whether this and another value have the same type.
+ pub fn same_type(&self, other: &DeValue<'_>) -> bool {
+ discriminant(self) == discriminant(other)
+ }
+
+ /// Returns a human-readable representation of the type of this value.
+ pub fn type_str(&self) -> &'static str {
+ match *self {
+ DeValue::String(..) => "string",
+ DeValue::Integer(..) => "integer",
+ DeValue::Float(..) => "float",
+ DeValue::Boolean(..) => "boolean",
+ DeValue::Datetime(..) => "datetime",
+ DeValue::Array(..) => "array",
+ DeValue::Table(..) => "table",
+ }
+ }
+}
+
+impl<I> ops::Index<I> for DeValue<'_>
+where
+ I: Index,
+{
+ type Output = Spanned<Self>;
+
+ fn index(&self, index: I) -> &Spanned<Self> {
+ self.get(index).expect("index not found")
+ }
+}
+
+/// Types that can be used to index a `toml::Value`
+///
+/// Currently this is implemented for `usize` to index arrays and `str` to index
+/// tables.
+///
+/// This trait is sealed and not intended for implementation outside of the
+/// `toml` crate.
+pub trait Index: Sealed {
+ #[doc(hidden)]
+ fn index<'r, 'i>(&self, val: &'r DeValue<'i>) -> Option<&'r Spanned<DeValue<'i>>>;
+}
+
+/// An implementation detail that should not be implemented, this will change in
+/// the future and break code otherwise.
+#[doc(hidden)]
+pub trait Sealed {}
+impl Sealed for usize {}
+impl Sealed for str {}
+impl Sealed for String {}
+impl<T: Sealed + ?Sized> Sealed for &T {}
+
+impl Index for usize {
+ fn index<'r, 'i>(&self, val: &'r DeValue<'i>) -> Option<&'r Spanned<DeValue<'i>>> {
+ match *val {
+ DeValue::Array(ref a) => a.get(*self),
+ _ => None,
+ }
+ }
+}
+
+impl Index for str {
+ fn index<'r, 'i>(&self, val: &'r DeValue<'i>) -> Option<&'r Spanned<DeValue<'i>>> {
+ match *val {
+ DeValue::Table(ref a) => a.get(self),
+ _ => None,
+ }
+ }
+}
+
+impl Index for String {
+ fn index<'r, 'i>(&self, val: &'r DeValue<'i>) -> Option<&'r Spanned<DeValue<'i>>> {
+ self[..].index(val)
+ }
+}
+
+impl<T> Index for &T
+where
+ T: Index + ?Sized,
+{
+ fn index<'r, 'i>(&self, val: &'r DeValue<'i>) -> Option<&'r Spanned<DeValue<'i>>> {
+ (**self).index(val)
+ }
+}
diff --git a/third_party/rust/toml/src/de/parser/document.rs b/third_party/rust/toml/src/de/parser/document.rs
@@ -0,0 +1,459 @@
+use serde_spanned::Spanned;
+
+use crate::alloc_prelude::*;
+use crate::de::parser::key::on_key;
+use crate::de::parser::prelude::*;
+use crate::de::parser::value::value;
+use crate::de::DeString;
+use crate::de::DeValue;
+use crate::de::{DeArray, DeTable};
+use crate::map::Entry;
+
+/// ```bnf
+/// ;; TOML
+///
+/// toml = expression *( newline expression )
+///
+/// expression = ( ( ws comment ) /
+/// ( ws keyval ws [ comment ] ) /
+/// ( ws table ws [ comment ] ) /
+/// ws )
+/// ```
+pub(crate) fn document<'i>(
+ input: &mut Input<'_>,
+ source: toml_parser::Source<'i>,
+ errors: &mut dyn ErrorSink,
+) -> Spanned<DeTable<'i>> {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("document::document");
+ let mut state = State::default();
+ while let Some(event) = input.next_token() {
+ match event.kind() {
+ EventKind::InlineTableOpen
+ | EventKind::InlineTableClose
+ | EventKind::ArrayOpen
+ | EventKind::ArrayClose
+ | EventKind::Scalar
+ | EventKind::ValueSep
+ | EventKind::Error
+ | EventKind::KeySep
+ | EventKind::KeyValSep
+ | EventKind::StdTableClose
+ | EventKind::ArrayTableClose => {
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("unexpected {event:?}"),
+ anstyle::AnsiColor::Red.on_default(),
+ );
+ continue;
+ }
+ EventKind::StdTableOpen | EventKind::ArrayTableOpen => {
+ state.finish_table(errors);
+
+ let header = on_table(event, input, source, errors);
+
+ state.start_table(header, errors);
+ }
+ EventKind::SimpleKey => {
+ let (path, key) = on_key(event, input, source, errors);
+ let Some(key) = key else {
+ break;
+ };
+ let Some(next_event) = input.next_token() else {
+ break;
+ };
+ let keyval_event = if next_event.kind() == EventKind::Whitespace {
+ let Some(next_event) = input.next_token() else {
+ break;
+ };
+ next_event
+ } else {
+ next_event
+ };
+ if keyval_event.kind() != EventKind::KeyValSep {
+ break;
+ }
+
+ if input
+ .first()
+ .map(|e| e.kind() == EventKind::Whitespace)
+ .unwrap_or(false)
+ {
+ let _ = input.next_token();
+ }
+ let value = value(input, source, errors);
+
+ state.capture_key_value(path, key, value, errors);
+ }
+ EventKind::Whitespace | EventKind::Comment | EventKind::Newline => {
+ state.capture_trailing(event);
+ }
+ }
+ }
+
+ state.finish_table(errors);
+
+ let span = Default::default();
+ Spanned::new(span, state.root)
+}
+
+/// ```bnf
+/// ;; Standard Table
+///
+/// std-table = std-table-open key *( table-key-sep key) std-table-close
+///
+/// ;; Array Table
+///
+/// array-table = array-table-open key *( table-key-sep key) array-table-close
+/// ```
+fn on_table<'i>(
+ open_event: &toml_parser::parser::Event,
+ input: &mut Input<'_>,
+ source: toml_parser::Source<'i>,
+ errors: &mut dyn ErrorSink,
+) -> TableHeader<'i> {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("document::on_table");
+ let is_array = open_event.kind() == EventKind::ArrayTableOpen;
+ let mut current_path = None;
+ let mut current_key = None;
+ let mut current_span = open_event.span();
+ let mut current_prefix = None;
+ let mut current_suffix = None;
+
+ while let Some(event) = input.next_token() {
+ match event.kind() {
+ EventKind::InlineTableOpen
+ | EventKind::InlineTableClose
+ | EventKind::ArrayOpen
+ | EventKind::ArrayClose
+ | EventKind::Scalar
+ | EventKind::ValueSep
+ | EventKind::Error
+ | EventKind::KeySep
+ | EventKind::KeyValSep
+ | EventKind::StdTableOpen
+ | EventKind::ArrayTableOpen
+ | EventKind::Comment
+ | EventKind::Newline => {
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("unexpected {event:?}"),
+ anstyle::AnsiColor::Red.on_default(),
+ );
+ continue;
+ }
+ EventKind::ArrayTableClose | EventKind::StdTableClose => {
+ current_span = current_span.append(event.span());
+ break;
+ }
+ EventKind::SimpleKey => {
+ current_prefix.get_or_insert_with(|| event.span().before());
+ let (path, key) = on_key(event, input, source, errors);
+ current_path = Some(path);
+ current_key = key;
+ current_suffix.get_or_insert_with(|| event.span().after());
+ }
+ EventKind::Whitespace => {
+ if current_key.is_some() {
+ current_suffix = Some(event.span());
+ } else {
+ current_prefix = Some(event.span());
+ }
+ }
+ }
+ }
+
+ TableHeader {
+ path: current_path.unwrap_or_default(),
+ key: current_key,
+ span: current_span,
+ is_array,
+ }
+}
+
+struct TableHeader<'i> {
+ path: Vec<Spanned<DeString<'i>>>,
+ key: Option<Spanned<DeString<'i>>>,
+ span: toml_parser::Span,
+ is_array: bool,
+}
+
+#[derive(Default)]
+struct State<'i> {
+ root: DeTable<'i>,
+ current_table: DeTable<'i>,
+ current_header: Option<TableHeader<'i>>,
+ current_position: usize,
+}
+
+impl<'i> State<'i> {
+ fn capture_trailing(&mut self, _event: &toml_parser::parser::Event) {}
+
+ fn capture_key_value(
+ &mut self,
+ path: Vec<Spanned<DeString<'i>>>,
+ key: Spanned<DeString<'i>>,
+ value: Spanned<DeValue<'i>>,
+ errors: &mut dyn ErrorSink,
+ ) {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("document::capture_key_value");
+ #[cfg(feature = "debug")]
+ trace(
+ &format!(
+ "path={:?}",
+ path.iter().map(|k| k.get_ref()).collect::<Vec<_>>()
+ ),
+ anstyle::AnsiColor::Blue.on_default(),
+ );
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("key={key}",),
+ anstyle::AnsiColor::Blue.on_default(),
+ );
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("value={value:?}",),
+ anstyle::AnsiColor::Blue.on_default(),
+ );
+
+ let dotted = true;
+ let Some(parent_table) = descend_path(&mut self.current_table, &path, dotted, errors)
+ else {
+ return;
+ };
+ // "Likewise, using dotted keys to redefine tables already defined in [table] form is not allowed"
+ let mixed_table_types = parent_table.is_dotted() == path.is_empty();
+ if mixed_table_types {
+ let key_span = get_key_span(&key);
+ errors.report_error(ParseError::new("duplicate key").with_unexpected(key_span));
+ return;
+ }
+ let key_span = get_key_span(&key);
+ match parent_table.entry(key) {
+ Entry::Vacant(o) => {
+ o.insert(value);
+ }
+ Entry::Occupied(existing) => {
+ // "Since tables cannot be defined more than once, redefining such tables using a [table] header is not allowed"
+ let old_span = get_key_span(existing.key());
+ errors.report_error(
+ ParseError::new("duplicate key")
+ .with_unexpected(key_span)
+ .with_context(old_span),
+ );
+ }
+ }
+ }
+
+ fn finish_table(&mut self, errors: &mut dyn ErrorSink) {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("document::finish_table");
+ let prev_table = core::mem::take(&mut self.current_table);
+ if let Some(header) = self.current_header.take() {
+ let Some(key) = &header.key else {
+ return;
+ };
+ let header_span = header.span.start()..header.span.end();
+ let prev_table = Spanned::new(header_span.clone(), DeValue::Table(prev_table));
+
+ let parent_key = &header.path;
+ let dotted = false;
+ let Some(parent_table) = descend_path(&mut self.root, parent_key, dotted, errors)
+ else {
+ return;
+ };
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("key={key}",),
+ anstyle::AnsiColor::Blue.on_default(),
+ );
+ if header.is_array {
+ let entry = parent_table.entry(key.clone()).or_insert_with(|| {
+ let mut array = DeArray::new();
+ array.set_array_of_tables(true);
+ Spanned::new(header_span, DeValue::Array(array))
+ });
+ let Some(array) = entry
+ .as_mut()
+ .as_array_mut()
+ .filter(|a| a.is_array_of_tables())
+ else {
+ let key_span = get_key_span(key);
+ let old_span = entry.span();
+ let old_span = toml_parser::Span::new_unchecked(old_span.start, old_span.end);
+ errors.report_error(
+ ParseError::new("duplicate key")
+ .with_unexpected(key_span)
+ .with_context(old_span),
+ );
+ return;
+ };
+ array.push(prev_table);
+ } else {
+ let existing = parent_table.insert(key.clone(), prev_table);
+ debug_assert!(existing.is_none());
+ }
+ } else {
+ self.root = prev_table;
+ }
+ }
+
+ fn start_table(&mut self, header: TableHeader<'i>, errors: &mut dyn ErrorSink) {
+ if !header.is_array {
+ // 1. Look up the table on start to ensure the duplicate_key error points to the right line
+ // 2. Ensure any child tables from an implicit table are preserved
+ let root = &mut self.root;
+ if let (Some(parent_table), Some(key)) =
+ (descend_path(root, &header.path, false, errors), &header.key)
+ {
+ if let Some((old_key, old_value)) = parent_table.remove_entry(key) {
+ match old_value.into_inner() {
+ DeValue::Table(t) if t.is_implicit() && !t.is_dotted() => {
+ self.current_table = t;
+ }
+ // Since tables cannot be defined more than once, redefining such tables using a [table] header is not allowed. Likewise, using dotted keys to redefine tables already defined in [table] form is not allowed.
+ old_value => {
+ let old_span = get_key_span(&old_key);
+ let key_span = get_key_span(key);
+ errors.report_error(
+ ParseError::new("duplicate key")
+ .with_unexpected(key_span)
+ .with_context(old_span),
+ );
+
+ if let DeValue::Table(t) = old_value {
+ self.current_table = t;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ self.current_position += 1;
+ self.current_table.set_implicit(false);
+ self.current_table.set_dotted(false);
+ self.current_header = Some(header);
+ }
+}
+
+fn descend_path<'t, 'i>(
+ mut table: &'t mut DeTable<'i>,
+ path: &[Spanned<DeString<'i>>],
+ dotted: bool,
+ errors: &mut dyn ErrorSink,
+) -> Option<&'t mut DeTable<'i>> {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("document::descend_path");
+ #[cfg(feature = "debug")]
+ trace(
+ &format!(
+ "path={:?}",
+ path.iter().map(|k| k.get_ref()).collect::<Vec<_>>()
+ ),
+ anstyle::AnsiColor::Blue.on_default(),
+ );
+ for key in path.iter() {
+ table = match table.entry(key.clone()) {
+ Entry::Vacant(entry) => {
+ let mut new_table = DeTable::new();
+ new_table.set_implicit(true);
+ new_table.set_dotted(dotted);
+
+ let value = DeValue::Table(new_table);
+ let value = Spanned::new(key.span(), value);
+ let value = entry.insert(value);
+ value.as_mut().as_table_mut().unwrap()
+ }
+ Entry::Occupied(entry) => {
+ let spanned = entry.into_mut();
+ let old_span = spanned.span();
+ match spanned.as_mut() {
+ DeValue::Array(ref mut array) => {
+ if !array.is_array_of_tables() {
+ let old_span =
+ toml_parser::Span::new_unchecked(old_span.start, old_span.end);
+ let key_span = get_key_span(key);
+ errors.report_error(
+ ParseError::new(
+ "cannot extend value of type array with a dotted key",
+ )
+ .with_unexpected(key_span)
+ .with_context(old_span),
+ );
+ return None;
+ }
+
+ debug_assert!(!array.is_empty());
+
+ let index = array.len() - 1;
+ let last_child = array.get_mut(index).unwrap();
+
+ match last_child.as_mut() {
+ DeValue::Table(table) => table,
+ existing => {
+ let old_span =
+ toml_parser::Span::new_unchecked(old_span.start, old_span.end);
+ let key_span = get_key_span(key);
+ errors.report_error(
+ ParseError::new(format!(
+ "cannot extend value of type {} with a dotted key",
+ existing.type_str()
+ ))
+ .with_unexpected(key_span)
+ .with_context(old_span),
+ );
+ return None;
+ }
+ }
+ }
+ DeValue::Table(ref mut sweet_child_of_mine) => {
+ if sweet_child_of_mine.is_inline() {
+ let key_span = get_key_span(key);
+ errors.report_error(
+ ParseError::new(
+ "cannot extend value of type inline table with a dotted key",
+ )
+ .with_unexpected(key_span),
+ );
+ return None;
+ }
+ // Since tables cannot be defined more than once, redefining such tables using a
+ // [table] header is not allowed. Likewise, using dotted keys to redefine tables
+ // already defined in [table] form is not allowed.
+ if dotted && !sweet_child_of_mine.is_implicit() {
+ let key_span = get_key_span(key);
+ errors.report_error(
+ ParseError::new("duplicate key").with_unexpected(key_span),
+ );
+ return None;
+ }
+ sweet_child_of_mine
+ }
+ existing => {
+ let old_span =
+ toml_parser::Span::new_unchecked(old_span.start, old_span.end);
+ let key_span = get_key_span(key);
+ errors.report_error(
+ ParseError::new(format!(
+ "cannot extend value of type {} with a dotted key",
+ existing.type_str()
+ ))
+ .with_unexpected(key_span)
+ .with_context(old_span),
+ );
+ return None;
+ }
+ }
+ }
+ };
+ }
+ Some(table)
+}
+
+fn get_key_span(key: &Spanned<DeString<'_>>) -> toml_parser::Span {
+ let key_span = key.span();
+ toml_parser::Span::new_unchecked(key_span.start, key_span.end)
+}
diff --git a/third_party/rust/toml/src/de/parser/inline_table.rs b/third_party/rust/toml/src/de/parser/inline_table.rs
@@ -0,0 +1,238 @@
+use serde_spanned::Spanned;
+
+use crate::alloc_prelude::*;
+use crate::de::parser::array::on_array;
+use crate::de::parser::key::on_key;
+use crate::de::parser::prelude::*;
+use crate::de::parser::value::on_scalar;
+use crate::de::DeString;
+use crate::de::DeTable;
+use crate::de::DeValue;
+use crate::map::Entry;
+
+/// ```bnf
+/// ;; Inline Table
+///
+/// inline-table = inline-table-open inline-table-keyvals inline-table-close
+/// ```
+pub(crate) fn on_inline_table<'i>(
+ open_event: &toml_parser::parser::Event,
+ input: &mut Input<'_>,
+ source: toml_parser::Source<'i>,
+ errors: &mut dyn ErrorSink,
+) -> Spanned<DeValue<'i>> {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("inline_table::on_inline_table");
+ let mut result = DeTable::new();
+ result.set_inline(true);
+ let mut close_span = open_event.span();
+
+ let mut state = State::default();
+ while let Some(event) = input.next_token() {
+ close_span = event.span();
+ match event.kind() {
+ EventKind::StdTableOpen
+ | EventKind::ArrayTableOpen
+ | EventKind::StdTableClose
+ | EventKind::ArrayClose
+ | EventKind::ArrayTableClose
+ | EventKind::KeySep => {
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("unexpected {event:?}"),
+ anstyle::AnsiColor::Red.on_default(),
+ );
+ break;
+ }
+ EventKind::Error => {
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("unexpected {event:?}"),
+ anstyle::AnsiColor::Red.on_default(),
+ );
+ continue;
+ }
+ EventKind::SimpleKey => {
+ let (path, key) = on_key(event, input, source, errors);
+ state.capture_key(event, path, key);
+ }
+ EventKind::KeyValSep => {
+ state.finish_key(event);
+ }
+ EventKind::InlineTableOpen => {
+ let value = on_inline_table(event, input, source, errors);
+ state.capture_value(event, value);
+ }
+ EventKind::ArrayOpen => {
+ let value = on_array(event, input, source, errors);
+ state.capture_value(event, value);
+ }
+ EventKind::Scalar => {
+ let value = on_scalar(event, source, errors);
+ state.capture_value(event, value);
+ }
+ EventKind::ValueSep => {
+ state.finish_value(event, &mut result, errors);
+ }
+ EventKind::Whitespace | EventKind::Comment | EventKind::Newline => {
+ state.whitespace(event);
+ }
+ EventKind::InlineTableClose => {
+ state.finish_value(event, &mut result, errors);
+ state.close(open_event, event, &mut result);
+ break;
+ }
+ }
+ }
+
+ let span = open_event.span().start()..close_span.end();
+
+ Spanned::new(span, DeValue::Table(result))
+}
+
+#[derive(Default)]
+struct State<'i> {
+ current_key: Option<(Vec<Spanned<DeString<'i>>>, Spanned<DeString<'i>>)>,
+ seen_keyval_sep: bool,
+ current_value: Option<Spanned<DeValue<'i>>>,
+}
+
+impl<'i> State<'i> {
+ fn whitespace(&mut self, _event: &toml_parser::parser::Event) {}
+
+ fn capture_key(
+ &mut self,
+ _event: &toml_parser::parser::Event,
+ path: Vec<Spanned<DeString<'i>>>,
+ key: Option<Spanned<DeString<'i>>>,
+ ) {
+ if let Some(key) = key {
+ self.current_key = Some((path, key));
+ }
+ }
+
+ fn finish_key(&mut self, _event: &toml_parser::parser::Event) {
+ self.seen_keyval_sep = true;
+ }
+
+ fn capture_value(&mut self, _event: &toml_parser::parser::Event, value: Spanned<DeValue<'i>>) {
+ self.current_value = Some(value);
+ }
+
+ fn finish_value(
+ &mut self,
+ _event: &toml_parser::parser::Event,
+ result: &mut DeTable<'i>,
+ errors: &mut dyn ErrorSink,
+ ) {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("inline_table::finish_value");
+ self.seen_keyval_sep = false;
+ if let (Some((path, key)), Some(value)) =
+ (self.current_key.take(), self.current_value.take())
+ {
+ let Some(table) = descend_path(result, &path, true, errors) else {
+ return;
+ };
+
+ // "Likewise, using dotted keys to redefine tables already defined in [table] form is not allowed"
+ let mixed_table_types = table.is_dotted() == path.is_empty();
+ if mixed_table_types {
+ let key_span = get_key_span(&key);
+ errors.report_error(ParseError::new("duplicate key").with_unexpected(key_span));
+ } else {
+ let key_span = get_key_span(&key);
+ match table.entry(key) {
+ Entry::Vacant(o) => {
+ o.insert(value);
+ }
+ Entry::Occupied(o) => {
+ let old_span = get_key_span(o.key());
+ errors.report_error(
+ ParseError::new("duplicate key")
+ .with_unexpected(key_span)
+ .with_context(old_span),
+ );
+ }
+ }
+ }
+ }
+ }
+
+ fn close(
+ &mut self,
+ _open_event: &toml_parser::parser::Event,
+ _close_event: &toml_parser::parser::Event,
+ _result: &mut DeTable<'i>,
+ ) {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("inline_table::close");
+ }
+}
+
+fn descend_path<'a, 'i>(
+ mut table: &'a mut DeTable<'i>,
+ path: &'a [Spanned<DeString<'i>>],
+ dotted: bool,
+ errors: &mut dyn ErrorSink,
+) -> Option<&'a mut DeTable<'i>> {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("inline_table::descend_path");
+ #[cfg(feature = "debug")]
+ trace(
+ &format!(
+ "key={:?}",
+ path.iter().map(|k| k.get_ref()).collect::<Vec<_>>()
+ ),
+ anstyle::AnsiColor::Blue.on_default(),
+ );
+ for key in path.iter() {
+ table = match table.entry(key.clone()) {
+ Entry::Vacant(entry) => {
+ let mut new_table = DeTable::new();
+ new_table.set_implicit(true);
+ new_table.set_dotted(dotted);
+ new_table.set_inline(true);
+ let value = DeValue::Table(new_table);
+ let value = Spanned::new(key.span(), value);
+ let value = entry.insert(value);
+ value.as_mut().as_table_mut().unwrap()
+ }
+ Entry::Occupied(entry) => {
+ let spanned = entry.into_mut();
+ match spanned.as_mut() {
+ DeValue::Table(ref mut sweet_child_of_mine) => {
+ // Since tables cannot be defined more than once, redefining such tables using a
+ // [table] header is not allowed. Likewise, using dotted keys to redefine tables
+ // already defined in [table] form is not allowed.
+ if dotted && !sweet_child_of_mine.is_implicit() {
+ let key_span = get_key_span(key);
+ errors.report_error(
+ ParseError::new("duplicate key").with_unexpected(key_span),
+ );
+ return None;
+ }
+ sweet_child_of_mine
+ }
+ item => {
+ let key_span = get_key_span(key);
+ errors.report_error(
+ ParseError::new(format!(
+ "cannot extend value of type {} with a dotted key",
+ item.type_str()
+ ))
+ .with_unexpected(key_span),
+ );
+ return None;
+ }
+ }
+ }
+ };
+ }
+ Some(table)
+}
+
+fn get_key_span(key: &Spanned<DeString<'_>>) -> toml_parser::Span {
+ let key_span = key.span();
+ toml_parser::Span::new_unchecked(key_span.start, key_span.end)
+}
diff --git a/third_party/rust/toml/src/de/parser/key.rs b/third_party/rust/toml/src/de/parser/key.rs
@@ -0,0 +1,123 @@
+use serde_spanned::Spanned;
+
+use crate::alloc_prelude::*;
+use crate::de::parser::prelude::*;
+use crate::de::DeString;
+
+/// ```bnf
+/// key = simple-key / dotted-key
+/// dotted-key = simple-key 1*( dot-sep simple-key )
+/// ```
+pub(crate) fn on_key<'i>(
+ key_event: &toml_parser::parser::Event,
+ input: &mut Input<'_>,
+ source: toml_parser::Source<'i>,
+ errors: &mut dyn ErrorSink,
+) -> (Vec<Spanned<DeString<'i>>>, Option<Spanned<DeString<'i>>>) {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("key::on_key");
+ let mut result_path = Vec::new();
+ let mut result_key = None;
+
+ let mut state = State::new(key_event);
+ if more_key(input) {
+ while let Some(event) = input.next_token() {
+ match event.kind() {
+ EventKind::StdTableOpen
+ | EventKind::ArrayTableOpen
+ | EventKind::InlineTableOpen
+ | EventKind::InlineTableClose
+ | EventKind::ArrayOpen
+ | EventKind::ArrayClose
+ | EventKind::Scalar
+ | EventKind::ValueSep
+ | EventKind::Comment
+ | EventKind::Newline
+ | EventKind::KeyValSep
+ | EventKind::StdTableClose
+ | EventKind::ArrayTableClose
+ | EventKind::Error => {
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("unexpected {event:?}"),
+ anstyle::AnsiColor::Red.on_default(),
+ );
+ continue;
+ }
+ EventKind::SimpleKey => {
+ state.current_key = Some(*event);
+
+ if !more_key(input) {
+ break;
+ }
+ }
+ EventKind::Whitespace => {
+ state.whitespace(event);
+ }
+ EventKind::KeySep => {
+ state.close_key(&mut result_path, &mut result_key, source, errors);
+ }
+ }
+ }
+ }
+
+ state.close_key(&mut result_path, &mut result_key, source, errors);
+
+ #[cfg(not(feature = "unbounded"))]
+ if super::LIMIT <= result_path.len() as u32 {
+ errors.report_error(ParseError::new("recursion limit"));
+ return (Vec::new(), None);
+ }
+
+ (result_path, result_key)
+}
+
+fn more_key(input: &Input<'_>) -> bool {
+ let first = input.get(0).map(|e| e.kind());
+ let second = input.get(1).map(|e| e.kind());
+ if first == Some(EventKind::KeySep) {
+ true
+ } else if first == Some(EventKind::Whitespace) && second == Some(EventKind::KeySep) {
+ true
+ } else {
+ false
+ }
+}
+
+struct State {
+ current_key: Option<toml_parser::parser::Event>,
+}
+
+impl State {
+ fn new(key_event: &toml_parser::parser::Event) -> Self {
+ Self {
+ current_key: Some(*key_event),
+ }
+ }
+
+ fn whitespace(&mut self, _event: &toml_parser::parser::Event) {}
+
+ fn close_key<'i>(
+ &mut self,
+ result_path: &mut Vec<Spanned<DeString<'i>>>,
+ result_key: &mut Option<Spanned<DeString<'i>>>,
+ source: toml_parser::Source<'i>,
+ errors: &mut dyn ErrorSink,
+ ) {
+ let Some(key) = self.current_key.take() else {
+ return;
+ };
+
+ let key_span = key.span();
+ let key_span = key_span.start()..key_span.end();
+
+ let raw = source.get(key).unwrap();
+ let mut decoded = alloc::borrow::Cow::Borrowed("");
+ raw.decode_key(&mut decoded, errors);
+
+ let key = Spanned::new(key_span, decoded);
+ if let Some(last_key) = result_key.replace(key) {
+ result_path.push(last_key);
+ }
+ }
+}
diff --git a/third_party/rust/toml/src/de/parser/mod.rs b/third_party/rust/toml/src/de/parser/mod.rs
@@ -0,0 +1,85 @@
+#![allow(clippy::type_complexity)]
+
+use serde_spanned::Spanned;
+#[cfg(not(feature = "unbounded"))]
+use toml_parser::parser::RecursionGuard;
+use toml_parser::parser::ValidateWhitespace;
+
+pub use dearray::DeArray;
+pub use detable::DeTable;
+pub use devalue::DeFloat;
+pub use devalue::DeInteger;
+pub use devalue::DeString;
+pub use devalue::DeValue;
+
+use crate::alloc_prelude::*;
+
+pub(crate) mod array;
+pub(crate) mod dearray;
+#[cfg(feature = "debug")]
+pub(crate) mod debug;
+pub(crate) mod detable;
+pub(crate) mod devalue;
+pub(crate) mod document;
+pub(crate) mod inline_table;
+pub(crate) mod key;
+pub(crate) mod value;
+
+pub(crate) fn parse_document<'i>(
+ source: toml_parser::Source<'i>,
+ errors: &mut dyn prelude::ErrorSink,
+) -> Spanned<DeTable<'i>> {
+ let tokens = source.lex().into_vec();
+
+ let mut events = Vec::with_capacity(tokens.len());
+ let mut receiver = ValidateWhitespace::new(&mut events, source);
+ #[cfg(not(feature = "unbounded"))]
+ let mut receiver = RecursionGuard::new(&mut receiver, LIMIT);
+ #[cfg(not(feature = "unbounded"))]
+ let receiver = &mut receiver;
+ #[cfg(feature = "unbounded")]
+ let receiver = &mut receiver;
+ toml_parser::parser::parse_document(&tokens, receiver, errors);
+
+ let mut input = prelude::Input::new(&events);
+ let doc = document::document(&mut input, source, errors);
+ doc
+}
+
+pub(crate) fn parse_value<'i>(
+ source: toml_parser::Source<'i>,
+ errors: &mut dyn prelude::ErrorSink,
+) -> Spanned<DeValue<'i>> {
+ let tokens = source.lex().into_vec();
+
+ let mut events = Vec::with_capacity(tokens.len());
+ let mut receiver = ValidateWhitespace::new(&mut events, source);
+ #[cfg(not(feature = "unbounded"))]
+ let mut receiver = RecursionGuard::new(&mut receiver, LIMIT);
+ #[cfg(not(feature = "unbounded"))]
+ let receiver = &mut receiver;
+ #[cfg(feature = "unbounded")]
+ let receiver = &mut receiver;
+ toml_parser::parser::parse_value(&tokens, receiver, errors);
+
+ let mut input = prelude::Input::new(&events);
+ let value = value::value(&mut input, source, errors);
+ value
+}
+
+#[cfg(not(feature = "unbounded"))]
+const LIMIT: u32 = 80;
+
+pub(crate) mod prelude {
+ pub(crate) use toml_parser::parser::EventKind;
+ pub(crate) use toml_parser::ErrorSink;
+ pub(crate) use toml_parser::ParseError;
+ pub(crate) use winnow::stream::Stream as _;
+
+ pub(crate) type Input<'i> = winnow::stream::TokenSlice<'i, toml_parser::parser::Event>;
+
+ #[cfg(feature = "debug")]
+ pub(crate) use super::debug::trace;
+ #[cfg(feature = "debug")]
+ pub(crate) use super::debug::TraceScope;
+}
diff --git a/third_party/rust/toml/src/de/parser/value.rs b/third_party/rust/toml/src/de/parser/value.rs
@@ -0,0 +1,111 @@
+use serde_spanned::Spanned;
+
+use crate::alloc_prelude::*;
+use crate::de::parser::array::on_array;
+use crate::de::parser::inline_table::on_inline_table;
+use crate::de::parser::prelude::*;
+use crate::de::DeFloat;
+use crate::de::DeInteger;
+use crate::de::DeValue;
+
+/// ```bnf
+/// val = string / boolean / array / inline-table / date-time / float / integer
+/// ```
+pub(crate) fn value<'i>(
+ input: &mut Input<'_>,
+ source: toml_parser::Source<'i>,
+ errors: &mut dyn ErrorSink,
+) -> Spanned<DeValue<'i>> {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("value");
+ if let Some(event) = input.next_token() {
+ match event.kind() {
+ EventKind::StdTableOpen
+ | EventKind::ArrayTableOpen
+ | EventKind::InlineTableClose
+ | EventKind::ArrayClose
+ | EventKind::ValueSep
+ | EventKind::Comment
+ | EventKind::Newline
+ | EventKind::Error
+ | EventKind::SimpleKey
+ | EventKind::KeySep
+ | EventKind::KeyValSep
+ | EventKind::StdTableClose
+ | EventKind::ArrayTableClose => {
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("unexpected {event:?}"),
+ anstyle::AnsiColor::Red.on_default(),
+ );
+ }
+ EventKind::Whitespace => {
+ #[cfg(feature = "debug")]
+ trace(
+ &format!("unexpected {event:?}"),
+ anstyle::AnsiColor::Red.on_default(),
+ );
+ }
+ EventKind::InlineTableOpen => {
+ return on_inline_table(event, input, source, errors);
+ }
+ EventKind::ArrayOpen => {
+ return on_array(event, input, source, errors);
+ }
+ EventKind::Scalar => {
+ return on_scalar(event, source, errors);
+ }
+ }
+ }
+
+ Spanned::new(0..0, DeValue::Integer(Default::default()))
+}
+
+pub(crate) fn on_scalar<'i>(
+ event: &toml_parser::parser::Event,
+ source: toml_parser::Source<'i>,
+ errors: &mut dyn ErrorSink,
+) -> Spanned<DeValue<'i>> {
+ #[cfg(feature = "debug")]
+ let _scope = TraceScope::new("on_scalar");
+ let value_span = event.span();
+ let value_span = value_span.start()..value_span.end();
+
+ let raw = source.get(event).unwrap();
+ let mut decoded = alloc::borrow::Cow::Borrowed("");
+ let kind = raw.decode_scalar(&mut decoded, errors);
+ match kind {
+ toml_parser::decoder::ScalarKind::String => {
+ Spanned::new(value_span, DeValue::String(decoded))
+ }
+ toml_parser::decoder::ScalarKind::Boolean(value) => {
+ Spanned::new(value_span, DeValue::Boolean(value))
+ }
+ toml_parser::decoder::ScalarKind::DateTime => {
+ let value = match decoded.parse::<toml_datetime::Datetime>() {
+ Ok(value) => value,
+ Err(err) => {
+ errors.report_error(
+ ParseError::new(err.to_string()).with_unexpected(event.span()),
+ );
+ toml_datetime::Datetime {
+ date: None,
+ time: None,
+ offset: None,
+ }
+ }
+ };
+ Spanned::new(value_span, DeValue::Datetime(value))
+ }
+ toml_parser::decoder::ScalarKind::Float => {
+ Spanned::new(value_span, DeValue::Float(DeFloat { inner: decoded }))
+ }
+ toml_parser::decoder::ScalarKind::Integer(radix) => Spanned::new(
+ value_span,
+ DeValue::Integer(DeInteger {
+ inner: decoded,
+ radix: radix.value(),
+ }),
+ ),
+ }
+}
diff --git a/third_party/rust/toml/src/lib.rs b/third_party/rust/toml/src/lib.rs
@@ -5,8 +5,6 @@
//! ```toml
//! [package]
//! name = "toml"
-//! version = "0.4.2"
-//! authors = ["Alex Crichton <alex@alexcrichton.com>"]
//!
//! [dependencies]
//! serde = "1.0"
@@ -17,9 +15,11 @@
//!
//! ## TOML values
//!
-//! A value in TOML is represented with the [`Value`] enum in this crate:
+//! A TOML document is represented with the [`Table`] type which maps `String` to the [`Value`] enum:
//!
-//! ```rust,ignore
+#![cfg_attr(not(feature = "default"), doc = " ```ignore")]
+#![cfg_attr(feature = "default", doc = " ```")]
+//! # use toml::value::{Datetime, Array, Table};
//! pub enum Value {
//! String(String),
//! Integer(i64),
@@ -31,25 +31,22 @@
//! }
//! ```
//!
-//! TOML is similar to JSON with the notable addition of a [`Datetime`]
-//! type. In general, TOML and JSON are interchangeable in terms of
-//! formats.
-//!
//! ## Parsing TOML
//!
-//! The easiest way to parse a TOML document is via the [`Value`] type:
+//! The easiest way to parse a TOML document is via the [`Table`] type:
//!
-//! ```rust
-//! use toml::Value;
+#![cfg_attr(not(feature = "default"), doc = " ```ignore")]
+#![cfg_attr(feature = "default", doc = " ```")]
+//! use toml::Table;
//!
-//! let value = "foo = 'bar'".parse::<Value>().unwrap();
+//! let value = "foo = 'bar'".parse::<Table>().unwrap();
//!
//! assert_eq!(value["foo"].as_str(), Some("bar"));
//! ```
//!
-//! The [`Value`] type implements a number of convenience methods and
+//! The [`Table`] type implements a number of convenience methods and
//! traits; the example above uses [`FromStr`] to parse a [`str`] into a
-//! [`Value`].
+//! [`Table`].
//!
//! ## Deserialization and Serialization
//!
@@ -57,23 +54,27 @@
//! implementations of the `Deserialize`, `Serialize`, `Deserializer`, and
//! `Serializer` traits. Namely, you'll find:
//!
+//! * `Deserialize for Table`
+//! * `Serialize for Table`
//! * `Deserialize for Value`
//! * `Serialize for Value`
//! * `Deserialize for Datetime`
//! * `Serialize for Datetime`
//! * `Deserializer for de::Deserializer`
//! * `Serializer for ser::Serializer`
+//! * `Deserializer for Table`
//! * `Deserializer for Value`
//!
//! This means that you can use Serde to deserialize/serialize the
-//! [`Value`] type as well as the [`Datetime`] type in this crate. You can also
-//! use the [`Deserializer`], [`Serializer`], or [`Value`] type itself to act as
+//! [`Table`] type as well as [`Value`] and [`Datetime`] type in this crate. You can also
+//! use the [`Deserializer`], [`Serializer`], or [`Table`] type itself to act as
//! a deserializer/serializer for arbitrary types.
//!
//! An example of deserializing with TOML is:
//!
-//! ```rust
-//! use serde_derive::Deserialize;
+#![cfg_attr(not(feature = "default"), doc = " ```ignore")]
+#![cfg_attr(feature = "default", doc = " ```")]
+//! use serde::Deserialize;
//!
//! #[derive(Deserialize)]
//! struct Config {
@@ -88,26 +89,25 @@
//! travis: Option<String>,
//! }
//!
-//! fn main() {
-//! let config: Config = toml::from_str(r#"
-//! ip = '127.0.0.1'
+//! let config: Config = toml::from_str(r#"
+//! ip = '127.0.0.1'
//!
-//! [keys]
-//! github = 'xxxxxxxxxxxxxxxxx'
-//! travis = 'yyyyyyyyyyyyyyyyy'
-//! "#).unwrap();
+//! [keys]
+//! github = 'xxxxxxxxxxxxxxxxx'
+//! travis = 'yyyyyyyyyyyyyyyyy'
+//! "#).unwrap();
//!
-//! assert_eq!(config.ip, "127.0.0.1");
-//! assert_eq!(config.port, None);
-//! assert_eq!(config.keys.github, "xxxxxxxxxxxxxxxxx");
-//! assert_eq!(config.keys.travis.as_ref().unwrap(), "yyyyyyyyyyyyyyyyy");
-//! }
+//! assert_eq!(config.ip, "127.0.0.1");
+//! assert_eq!(config.port, None);
+//! assert_eq!(config.keys.github, "xxxxxxxxxxxxxxxxx");
+//! assert_eq!(config.keys.travis.as_ref().unwrap(), "yyyyyyyyyyyyyyyyy");
//! ```
//!
//! You can serialize types in a similar fashion:
//!
-//! ```rust
-//! use serde_derive::Serialize;
+#![cfg_attr(not(feature = "default"), doc = " ```ignore")]
+#![cfg_attr(feature = "default", doc = " ```")]
+//! use serde::Serialize;
//!
//! #[derive(Serialize)]
//! struct Config {
@@ -122,18 +122,16 @@
//! travis: Option<String>,
//! }
//!
-//! fn main() {
-//! let config = Config {
-//! ip: "127.0.0.1".to_string(),
-//! port: None,
-//! keys: Keys {
-//! github: "xxxxxxxxxxxxxxxxx".to_string(),
-//! travis: Some("yyyyyyyyyyyyyyyyy".to_string()),
-//! },
-//! };
-//!
-//! let toml = toml::to_string(&config).unwrap();
-//! }
+//! let config = Config {
+//! ip: "127.0.0.1".to_string(),
+//! port: None,
+//! keys: Keys {
+//! github: "xxxxxxxxxxxxxxxxx".to_string(),
+//! travis: Some("yyyyyyyyyyyyyyyyy".to_string()),
+//! },
+//! };
+//!
+//! let toml = toml::to_string(&config).unwrap();
//! ```
//!
//! [TOML]: https://github.com/toml-lang/toml
@@ -141,36 +139,66 @@
//! [`serde`]: https://serde.rs/
//! [serde]: https://serde.rs/
-#![deny(missing_docs)]
-#![warn(rust_2018_idioms)]
+#![cfg_attr(docsrs, feature(doc_cfg))]
+#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
+#![warn(clippy::std_instead_of_core)]
+#![warn(clippy::std_instead_of_alloc)]
// Makes rustc abort compilation if there are any unsafe blocks in the crate.
// Presence of this annotation is picked up by tools such as cargo-geiger
// and lets them ensure that there is indeed no unsafe code as opposed to
// something they couldn't detect (e.g. unsafe added via macro expansion, etc).
#![forbid(unsafe_code)]
+#![warn(missing_docs)]
+#![warn(clippy::print_stderr)]
+#![warn(clippy::print_stdout)]
+
+#[allow(unused_extern_crates)]
+extern crate alloc;
+
+pub(crate) mod alloc_prelude {
+ pub(crate) use alloc::borrow::ToOwned as _;
+ pub(crate) use alloc::format;
+ pub(crate) use alloc::string::String;
+ pub(crate) use alloc::string::ToString as _;
+ pub(crate) use alloc::vec::Vec;
+}
pub mod map;
+#[cfg(feature = "serde")]
pub mod value;
-#[doc(no_inline)]
-pub use crate::value::Value;
-mod datetime;
-pub mod ser;
-#[doc(no_inline)]
-pub use crate::ser::{to_string, to_string_pretty, to_vec, Serializer};
pub mod de;
-#[doc(no_inline)]
-pub use crate::de::{from_slice, from_str, Deserializer};
-mod tokens;
+#[cfg(feature = "serde")]
+pub mod ser;
#[doc(hidden)]
+#[cfg(feature = "serde")]
pub mod macros;
-mod spanned;
-pub use crate::spanned::Spanned;
+#[cfg(feature = "serde")]
+mod table;
-// Just for rustdoc
-#[allow(unused_imports)]
-use crate::datetime::Datetime;
+#[doc(inline)]
+#[cfg(feature = "parse")]
+#[cfg(feature = "serde")]
+pub use crate::de::{from_slice, from_str, Deserializer};
+#[doc(inline)]
+#[cfg(feature = "display")]
+#[cfg(feature = "serde")]
+pub use crate::ser::{to_string, to_string_pretty, Serializer};
+#[doc(inline)]
+#[cfg(feature = "serde")]
+pub use crate::value::Value;
+pub use serde_spanned::Spanned;
+#[cfg(feature = "serde")]
+pub use table::Table;
+
+// Shortcuts for the module doc-comment
#[allow(unused_imports)]
use core::str::FromStr;
+#[allow(unused_imports)]
+use toml_datetime::Datetime;
+
+#[doc = include_str!("../README.md")]
+#[cfg(doctest)]
+pub struct ReadmeDoctests;
diff --git a/third_party/rust/toml/src/macros.rs b/third_party/rust/toml/src/macros.rs
@@ -1,20 +1,14 @@
-pub use serde::de::{Deserialize, IntoDeserializer};
+pub use serde_core::de::{Deserialize, IntoDeserializer};
+use crate::alloc_prelude::*;
use crate::value::{Array, Table, Value};
-/// Construct a [`toml::Value`] from TOML syntax.
-///
-/// [`toml::Value`]: value/enum.Value.html
+/// Construct a [`Table`] from TOML syntax.
///
/// ```rust
/// let cargo_toml = toml::toml! {
/// [package]
/// name = "toml"
-/// version = "0.4.5"
-/// authors = ["Alex Crichton <alex@alexcrichton.com>"]
-///
-/// [badges]
-/// travis-ci = { repository = "alexcrichton/toml-rs" }
///
/// [dependencies]
/// serde = "1.0"
@@ -32,7 +26,10 @@ macro_rules! toml {
let table = $crate::value::Table::new();
let mut root = $crate::Value::Table(table);
$crate::toml_internal!(@toplevel root [] $($toml)+);
- root
+ match root {
+ $crate::Value::Table(table) => table,
+ _ => unreachable!(),
+ }
}};
}
@@ -197,27 +194,27 @@ macro_rules! toml_internal {
}};
(@value (-nan)) => {
- $crate::Value::Float(-::std::f64::NAN)
+ $crate::Value::Float(::core::f64::NAN.copysign(-1.0))
};
(@value (nan)) => {
- $crate::Value::Float(::std::f64::NAN)
+ $crate::Value::Float(::core::f64::NAN.copysign(1.0))
};
(@value nan) => {
- $crate::Value::Float(::std::f64::NAN)
+ $crate::Value::Float(::core::f64::NAN.copysign(1.0))
};
(@value (-inf)) => {
- $crate::Value::Float(::std::f64::NEG_INFINITY)
+ $crate::Value::Float(::core::f64::NEG_INFINITY)
};
(@value (inf)) => {
- $crate::Value::Float(::std::f64::INFINITY)
+ $crate::Value::Float(::core::f64::INFINITY)
};
(@value inf) => {
- $crate::Value::Float(::std::f64::INFINITY)
+ $crate::Value::Float(::core::f64::INFINITY)
};
// Construct a Value from any other type, probably string or boolean or number.
diff --git a/third_party/rust/toml/src/map.rs b/third_party/rust/toml/src/map.rs
@@ -6,24 +6,21 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! A map of String to toml::Value.
+//! A map of `String` to [Value][crate::Value].
//!
//! By default the map is backed by a [`BTreeMap`]. Enable the `preserve_order`
-//! feature of toml-rs to use [`LinkedHashMap`] instead.
+//! feature of toml-rs to use [`IndexMap`] instead.
//!
//! [`BTreeMap`]: https://doc.rust-lang.org/std/collections/struct.BTreeMap.html
-//! [`LinkedHashMap`]: https://docs.rs/linked-hash-map/*/linked_hash_map/struct.LinkedHashMap.html
-
-use crate::value::Value;
-use serde::{de, ser};
-use std::borrow::Borrow;
-use std::fmt::{self, Debug};
-use std::hash::Hash;
-use std::iter::FromIterator;
-use std::ops;
+//! [`IndexMap`]: https://docs.rs/indexmap
#[cfg(not(feature = "preserve_order"))]
-use std::collections::{btree_map, BTreeMap};
+use alloc::collections::{btree_map, BTreeMap};
+use core::borrow::Borrow;
+use core::fmt::{self, Debug};
+use core::hash::Hash;
+use core::iter::FromIterator;
+use core::ops;
#[cfg(feature = "preserve_order")]
use indexmap::{self, IndexMap};
@@ -31,19 +28,35 @@ use indexmap::{self, IndexMap};
/// Represents a TOML key/value type.
pub struct Map<K, V> {
map: MapImpl<K, V>,
+ dotted: bool,
+ implicit: bool,
+ inline: bool,
}
#[cfg(not(feature = "preserve_order"))]
type MapImpl<K, V> = BTreeMap<K, V>;
+#[cfg(all(feature = "preserve_order", not(feature = "fast_hash")))]
+type RandomState = std::collections::hash_map::RandomState;
+#[cfg(all(feature = "preserve_order", feature = "fast_hash"))]
+type RandomState = foldhash::fast::RandomState;
#[cfg(feature = "preserve_order")]
-type MapImpl<K, V> = IndexMap<K, V>;
+type MapImpl<K, V> = IndexMap<K, V, RandomState>;
-impl Map<String, Value> {
+impl<K, V> Map<K, V>
+where
+ K: Ord + Hash,
+{
/// Makes a new empty Map.
#[inline]
pub fn new() -> Self {
- Map {
+ Self {
+ #[cfg(feature = "preserve_order")]
+ map: MapImpl::with_hasher(RandomState::default()),
+ #[cfg(not(feature = "preserve_order"))]
map: MapImpl::new(),
+ dotted: false,
+ implicit: false,
+ inline: false,
}
}
@@ -53,24 +66,25 @@ impl Map<String, Value> {
pub fn with_capacity(capacity: usize) -> Self {
// does not support with_capacity
let _ = capacity;
- Map {
- map: BTreeMap::new(),
- }
+ Self::new()
}
#[cfg(feature = "preserve_order")]
/// Makes a new empty Map with the given initial capacity.
#[inline]
pub fn with_capacity(capacity: usize) -> Self {
- Map {
- map: IndexMap::with_capacity(capacity),
+ Self {
+ map: IndexMap::with_capacity_and_hasher(capacity, RandomState::default()),
+ dotted: false,
+ implicit: false,
+ inline: false,
}
}
/// Clears the map, removing all values.
#[inline]
pub fn clear(&mut self) {
- self.map.clear()
+ self.map.clear();
}
/// Returns a reference to the value corresponding to the key.
@@ -78,10 +92,10 @@ impl Map<String, Value> {
/// The key may be any borrowed form of the map's key type, but the ordering
/// on the borrowed form *must* match the ordering on the key type.
#[inline]
- pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&Value>
+ pub fn get<Q>(&self, key: &Q) -> Option<&V>
where
- String: Borrow<Q>,
- Q: Ord + Eq + Hash,
+ K: Borrow<Q>,
+ Q: Ord + Eq + Hash + ?Sized,
{
self.map.get(key)
}
@@ -91,10 +105,10 @@ impl Map<String, Value> {
/// The key may be any borrowed form of the map's key type, but the ordering
/// on the borrowed form *must* match the ordering on the key type.
#[inline]
- pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
+ pub fn contains_key<Q>(&self, key: &Q) -> bool
where
- String: Borrow<Q>,
- Q: Ord + Eq + Hash,
+ K: Borrow<Q>,
+ Q: Ord + Eq + Hash + ?Sized,
{
self.map.contains_key(key)
}
@@ -104,14 +118,27 @@ impl Map<String, Value> {
/// The key may be any borrowed form of the map's key type, but the ordering
/// on the borrowed form *must* match the ordering on the key type.
#[inline]
- pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut Value>
+ pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
where
- String: Borrow<Q>,
- Q: Ord + Eq + Hash,
+ K: Borrow<Q>,
+ Q: Ord + Eq + Hash + ?Sized,
{
self.map.get_mut(key)
}
+ /// Returns the key-value pair matching the given key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ #[inline]
+ pub fn get_key_value<Q>(&self, key: &Q) -> Option<(&K, &V)>
+ where
+ K: Borrow<Q>,
+ Q: ?Sized + Ord + Eq + Hash,
+ {
+ self.map.get_key_value(key)
+ }
+
/// Inserts a key-value pair into the map.
///
/// If the map did not have this key present, `None` is returned.
@@ -120,7 +147,7 @@ impl Map<String, Value> {
/// value is returned. The key is not updated, though; this matters for
/// types that can be `==` without being identical.
#[inline]
- pub fn insert(&mut self, k: String, v: Value) -> Option<Value> {
+ pub fn insert(&mut self, k: K, v: V) -> Option<V> {
self.map.insert(k, v)
}
@@ -130,24 +157,63 @@ impl Map<String, Value> {
/// The key may be any borrowed form of the map's key type, but the ordering
/// on the borrowed form *must* match the ordering on the key type.
#[inline]
- pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<Value>
+ pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
where
- String: Borrow<Q>,
- Q: Ord + Eq + Hash,
+ K: Borrow<Q>,
+ Q: Ord + Eq + Hash + ?Sized,
{
- self.map.remove(key)
+ #[cfg(not(feature = "preserve_order"))]
+ {
+ self.map.remove(key)
+ }
+ #[cfg(feature = "preserve_order")]
+ {
+ self.map.shift_remove(key)
+ }
+ }
+
+ /// Removes a key from the map, returning the stored key and value if the key was previously in the map.
+ #[inline]
+ pub fn remove_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
+ where
+ K: Borrow<Q>,
+ Q: Ord + Eq + Hash + ?Sized,
+ {
+ #[cfg(not(feature = "preserve_order"))]
+ {
+ self.map.remove_entry(key)
+ }
+ #[cfg(feature = "preserve_order")]
+ {
+ self.map.shift_remove_entry(key)
+ }
+ }
+
+ /// Retains only the elements specified by the `keep` predicate.
+ ///
+ /// In other words, remove all pairs `(k, v)` for which `keep(&k, &mut v)`
+ /// returns `false`.
+ ///
+ /// The elements are visited in iteration order.
+ #[inline]
+ pub fn retain<F>(&mut self, mut keep: F)
+ where
+ K: AsRef<str>,
+ F: FnMut(&str, &mut V) -> bool,
+ {
+ self.map.retain(|key, value| keep(key.as_ref(), value));
}
/// Gets the given key's corresponding entry in the map for in-place
/// manipulation.
- pub fn entry<S>(&mut self, key: S) -> Entry<'_>
+ pub fn entry<S>(&mut self, key: S) -> Entry<'_, K, V>
where
- S: Into<String>,
+ S: Into<K>,
{
+ #[cfg(not(feature = "preserve_order"))]
+ use alloc::collections::btree_map::Entry as EntryImpl;
#[cfg(feature = "preserve_order")]
use indexmap::map::Entry as EntryImpl;
- #[cfg(not(feature = "preserve_order"))]
- use std::collections::btree_map::Entry as EntryImpl;
match self.map.entry(key.into()) {
EntryImpl::Vacant(vacant) => Entry::Vacant(VacantEntry { vacant }),
@@ -169,7 +235,7 @@ impl Map<String, Value> {
/// Gets an iterator over the entries of the map.
#[inline]
- pub fn iter(&self) -> Iter<'_> {
+ pub fn iter(&self) -> Iter<'_, K, V> {
Iter {
iter: self.map.iter(),
}
@@ -177,7 +243,7 @@ impl Map<String, Value> {
/// Gets a mutable iterator over the entries of the map.
#[inline]
- pub fn iter_mut(&mut self) -> IterMut<'_> {
+ pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
IterMut {
iter: self.map.iter_mut(),
}
@@ -185,7 +251,7 @@ impl Map<String, Value> {
/// Gets an iterator over the keys of the map.
#[inline]
- pub fn keys(&self) -> Keys<'_> {
+ pub fn keys(&self) -> Keys<'_, K, V> {
Keys {
iter: self.map.keys(),
}
@@ -193,32 +259,96 @@ impl Map<String, Value> {
/// Gets an iterator over the values of the map.
#[inline]
- pub fn values(&self) -> Values<'_> {
+ pub fn values(&self) -> Values<'_, K, V> {
Values {
iter: self.map.values(),
}
}
+
+ /// Scan through each key-value pair in the map and keep those where the
+ /// closure `keep` returns `true`.
+ ///
+ /// The elements are visited in order, and remaining elements keep their
+ /// order.
+ ///
+ /// Computes in **O(n)** time (average).
+ #[allow(unused_mut)]
+ pub(crate) fn mut_entries<F>(&mut self, mut op: F)
+ where
+ F: FnMut(&mut K, &mut V),
+ {
+ #[cfg(feature = "preserve_order")]
+ {
+ use indexmap::map::MutableKeys as _;
+ for (key, value) in self.map.iter_mut2() {
+ op(key, value);
+ }
+ }
+ #[cfg(not(feature = "preserve_order"))]
+ {
+ self.map = core::mem::take(&mut self.map)
+ .into_iter()
+ .map(move |(mut k, mut v)| {
+ op(&mut k, &mut v);
+ (k, v)
+ })
+ .collect();
+ }
+ }
+}
+
+impl<K, V> Map<K, V>
+where
+ K: Ord,
+{
+ pub(crate) fn is_dotted(&self) -> bool {
+ self.dotted
+ }
+
+ pub(crate) fn is_implicit(&self) -> bool {
+ self.implicit
+ }
+
+ pub(crate) fn is_inline(&self) -> bool {
+ self.inline
+ }
+
+ pub(crate) fn set_implicit(&mut self, yes: bool) {
+ self.implicit = yes;
+ }
+
+ pub(crate) fn set_dotted(&mut self, yes: bool) {
+ self.dotted = yes;
+ }
+
+ pub(crate) fn set_inline(&mut self, yes: bool) {
+ self.inline = yes;
+ }
}
-impl Default for Map<String, Value> {
+impl<K, V> Default for Map<K, V>
+where
+ K: Ord + Hash,
+{
#[inline]
fn default() -> Self {
- Map {
- map: MapImpl::new(),
- }
+ Self::new()
}
}
-impl Clone for Map<String, Value> {
+impl<K: Clone, V: Clone> Clone for Map<K, V> {
#[inline]
fn clone(&self) -> Self {
- Map {
+ Self {
map: self.map.clone(),
+ dotted: self.dotted,
+ implicit: self.implicit,
+ inline: self.inline,
}
}
}
-impl PartialEq for Map<String, Value> {
+impl<K: Eq + Hash, V: PartialEq> PartialEq for Map<K, V> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.map.eq(&other.map)
@@ -227,110 +357,55 @@ impl PartialEq for Map<String, Value> {
/// Access an element of this map. Panics if the given key is not present in the
/// map.
-impl<'a, Q: ?Sized> ops::Index<&'a Q> for Map<String, Value>
+impl<K, V, Q> ops::Index<&Q> for Map<K, V>
where
- String: Borrow<Q>,
- Q: Ord + Eq + Hash,
+ K: Borrow<Q> + Ord,
+ Q: Ord + Eq + Hash + ?Sized,
{
- type Output = Value;
+ type Output = V;
- fn index(&self, index: &Q) -> &Value {
+ fn index(&self, index: &Q) -> &V {
self.map.index(index)
}
}
/// Mutably access an element of this map. Panics if the given key is not
/// present in the map.
-impl<'a, Q: ?Sized> ops::IndexMut<&'a Q> for Map<String, Value>
+impl<K, V, Q> ops::IndexMut<&Q> for Map<K, V>
where
- String: Borrow<Q>,
- Q: Ord + Eq + Hash,
+ K: Borrow<Q> + Ord,
+ Q: Ord + Eq + Hash + ?Sized,
{
- fn index_mut(&mut self, index: &Q) -> &mut Value {
+ fn index_mut(&mut self, index: &Q) -> &mut V {
self.map.get_mut(index).expect("no entry found for key")
}
}
-impl Debug for Map<String, Value> {
+impl<K: Debug, V: Debug> Debug for Map<K, V> {
#[inline]
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
self.map.fmt(formatter)
}
}
-impl ser::Serialize for Map<String, Value> {
- #[inline]
- fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
- where
- S: ser::Serializer,
- {
- use serde::ser::SerializeMap;
- let mut map = serializer.serialize_map(Some(self.len()))?;
- for (k, v) in self {
- map.serialize_key(k)?;
- map.serialize_value(v)?;
- }
- map.end()
- }
-}
-
-impl<'de> de::Deserialize<'de> for Map<String, Value> {
- #[inline]
- fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where
- D: de::Deserializer<'de>,
- {
- struct Visitor;
-
- impl<'de> de::Visitor<'de> for Visitor {
- type Value = Map<String, Value>;
-
- fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
- formatter.write_str("a map")
- }
-
- #[inline]
- fn visit_unit<E>(self) -> Result<Self::Value, E>
- where
- E: de::Error,
- {
- Ok(Map::new())
- }
-
- #[inline]
- fn visit_map<V>(self, mut visitor: V) -> Result<Self::Value, V::Error>
- where
- V: de::MapAccess<'de>,
- {
- let mut values = Map::new();
-
- while let Some((key, value)) = visitor.next_entry()? {
- values.insert(key, value);
- }
-
- Ok(values)
- }
- }
-
- deserializer.deserialize_map(Visitor)
- }
-}
-
-impl FromIterator<(String, Value)> for Map<String, Value> {
+impl<K: Ord + Hash, V> FromIterator<(K, V)> for Map<K, V> {
fn from_iter<T>(iter: T) -> Self
where
- T: IntoIterator<Item = (String, Value)>,
+ T: IntoIterator<Item = (K, V)>,
{
- Map {
+ Self {
map: FromIterator::from_iter(iter),
+ dotted: false,
+ implicit: false,
+ inline: false,
}
}
}
-impl Extend<(String, Value)> for Map<String, Value> {
+impl<K: Ord + Hash, V> Extend<(K, V)> for Map<K, V> {
fn extend<T>(&mut self, iter: T)
where
- T: IntoIterator<Item = (String, Value)>,
+ T: IntoIterator<Item = (K, V)>,
{
self.map.extend(iter);
}
@@ -373,40 +448,40 @@ macro_rules! delegate_iterator {
///
/// [`entry`]: struct.Map.html#method.entry
/// [`Map`]: struct.Map.html
-pub enum Entry<'a> {
+pub enum Entry<'a, K, V> {
/// A vacant Entry.
- Vacant(VacantEntry<'a>),
+ Vacant(VacantEntry<'a, K, V>),
/// An occupied Entry.
- Occupied(OccupiedEntry<'a>),
+ Occupied(OccupiedEntry<'a, K, V>),
}
/// A vacant Entry. It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
-pub struct VacantEntry<'a> {
- vacant: VacantEntryImpl<'a>,
+pub struct VacantEntry<'a, K, V> {
+ vacant: VacantEntryImpl<'a, K, V>,
}
/// An occupied Entry. It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
-pub struct OccupiedEntry<'a> {
- occupied: OccupiedEntryImpl<'a>,
+pub struct OccupiedEntry<'a, K, V> {
+ occupied: OccupiedEntryImpl<'a, K, V>,
}
#[cfg(not(feature = "preserve_order"))]
-type VacantEntryImpl<'a> = btree_map::VacantEntry<'a, String, Value>;
+type VacantEntryImpl<'a, K, V> = btree_map::VacantEntry<'a, K, V>;
#[cfg(feature = "preserve_order")]
-type VacantEntryImpl<'a> = indexmap::map::VacantEntry<'a, String, Value>;
+type VacantEntryImpl<'a, K, V> = indexmap::map::VacantEntry<'a, K, V>;
#[cfg(not(feature = "preserve_order"))]
-type OccupiedEntryImpl<'a> = btree_map::OccupiedEntry<'a, String, Value>;
+type OccupiedEntryImpl<'a, K, V> = btree_map::OccupiedEntry<'a, K, V>;
#[cfg(feature = "preserve_order")]
-type OccupiedEntryImpl<'a> = indexmap::map::OccupiedEntry<'a, String, Value>;
+type OccupiedEntryImpl<'a, K, V> = indexmap::map::OccupiedEntry<'a, K, V>;
-impl<'a> Entry<'a> {
+impl<'a, K: Ord, V> Entry<'a, K, V> {
/// Returns a reference to this entry's key.
- pub fn key(&self) -> &String {
+ pub fn key(&self) -> &K {
match *self {
Entry::Vacant(ref e) => e.key(),
Entry::Occupied(ref e) => e.key(),
@@ -415,7 +490,7 @@ impl<'a> Entry<'a> {
/// Ensures a value is in the entry by inserting the default if empty, and
/// returns a mutable reference to the value in the entry.
- pub fn or_insert(self, default: Value) -> &'a mut Value {
+ pub fn or_insert(self, default: V) -> &'a mut V {
match self {
Entry::Vacant(entry) => entry.insert(default),
Entry::Occupied(entry) => entry.into_mut(),
@@ -425,9 +500,9 @@ impl<'a> Entry<'a> {
/// Ensures a value is in the entry by inserting the result of the default
/// function if empty, and returns a mutable reference to the value in the
/// entry.
- pub fn or_insert_with<F>(self, default: F) -> &'a mut Value
+ pub fn or_insert_with<F>(self, default: F) -> &'a mut V
where
- F: FnOnce() -> Value,
+ F: FnOnce() -> V,
{
match self {
Entry::Vacant(entry) => entry.insert(default()),
@@ -436,66 +511,73 @@ impl<'a> Entry<'a> {
}
}
-impl<'a> VacantEntry<'a> {
+impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
/// Gets a reference to the key that would be used when inserting a value
- /// through the VacantEntry.
+ /// through the `VacantEntry`.
#[inline]
- pub fn key(&self) -> &String {
+ pub fn key(&self) -> &K {
self.vacant.key()
}
- /// Sets the value of the entry with the VacantEntry's key, and returns a
+ /// Sets the value of the entry with the `VacantEntry`'s key, and returns a
/// mutable reference to it.
#[inline]
- pub fn insert(self, value: Value) -> &'a mut Value {
+ pub fn insert(self, value: V) -> &'a mut V {
self.vacant.insert(value)
}
}
-impl<'a> OccupiedEntry<'a> {
+impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
/// Gets a reference to the key in the entry.
#[inline]
- pub fn key(&self) -> &String {
+ pub fn key(&self) -> &K {
self.occupied.key()
}
/// Gets a reference to the value in the entry.
#[inline]
- pub fn get(&self) -> &Value {
+ pub fn get(&self) -> &V {
self.occupied.get()
}
/// Gets a mutable reference to the value in the entry.
#[inline]
- pub fn get_mut(&mut self) -> &mut Value {
+ pub fn get_mut(&mut self) -> &mut V {
self.occupied.get_mut()
}
/// Converts the entry into a mutable reference to its value.
#[inline]
- pub fn into_mut(self) -> &'a mut Value {
+ pub fn into_mut(self) -> &'a mut V {
self.occupied.into_mut()
}
/// Sets the value of the entry with the `OccupiedEntry`'s key, and returns
/// the entry's old value.
#[inline]
- pub fn insert(&mut self, value: Value) -> Value {
+ pub fn insert(&mut self, value: V) -> V {
self.occupied.insert(value)
}
/// Takes the value of the entry out of the map, and returns it.
#[inline]
- pub fn remove(self) -> Value {
- self.occupied.remove()
+ pub fn remove(self) -> V {
+ #[cfg(not(feature = "preserve_order"))]
+ {
+ self.occupied.remove()
+ }
+ #[cfg(feature = "preserve_order")]
+ {
+ self.occupied.shift_remove()
+ }
}
}
//////////////////////////////////////////////////////////////////////////////
-impl<'a> IntoIterator for &'a Map<String, Value> {
- type Item = (&'a String, &'a Value);
- type IntoIter = Iter<'a>;
+impl<'a, K, V> IntoIterator for &'a Map<K, V> {
+ type Item = (&'a K, &'a V);
+ type IntoIter = Iter<'a, K, V>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
Iter {
@@ -504,23 +586,23 @@ impl<'a> IntoIterator for &'a Map<String, Value> {
}
}
-/// An iterator over a toml::Map's entries.
-pub struct Iter<'a> {
- iter: IterImpl<'a>,
+/// An iterator over a `toml::Map`'s entries.
+pub struct Iter<'a, K, V> {
+ iter: IterImpl<'a, K, V>,
}
#[cfg(not(feature = "preserve_order"))]
-type IterImpl<'a> = btree_map::Iter<'a, String, Value>;
+type IterImpl<'a, K, V> = btree_map::Iter<'a, K, V>;
#[cfg(feature = "preserve_order")]
-type IterImpl<'a> = indexmap::map::Iter<'a, String, Value>;
+type IterImpl<'a, K, V> = indexmap::map::Iter<'a, K, V>;
-delegate_iterator!((Iter<'a>) => (&'a String, &'a Value));
+delegate_iterator!((Iter<'a, K, V>) => (&'a K, &'a V));
//////////////////////////////////////////////////////////////////////////////
-impl<'a> IntoIterator for &'a mut Map<String, Value> {
- type Item = (&'a String, &'a mut Value);
- type IntoIter = IterMut<'a>;
+impl<'a, K, V> IntoIterator for &'a mut Map<K, V> {
+ type Item = (&'a K, &'a mut V);
+ type IntoIter = IterMut<'a, K, V>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IterMut {
@@ -529,23 +611,23 @@ impl<'a> IntoIterator for &'a mut Map<String, Value> {
}
}
-/// A mutable iterator over a toml::Map's entries.
-pub struct IterMut<'a> {
- iter: IterMutImpl<'a>,
+/// A mutable iterator over a `toml::Map`'s entries.
+pub struct IterMut<'a, K, V> {
+ iter: IterMutImpl<'a, K, V>,
}
#[cfg(not(feature = "preserve_order"))]
-type IterMutImpl<'a> = btree_map::IterMut<'a, String, Value>;
+type IterMutImpl<'a, K, V> = btree_map::IterMut<'a, K, V>;
#[cfg(feature = "preserve_order")]
-type IterMutImpl<'a> = indexmap::map::IterMut<'a, String, Value>;
+type IterMutImpl<'a, K, V> = indexmap::map::IterMut<'a, K, V>;
-delegate_iterator!((IterMut<'a>) => (&'a String, &'a mut Value));
+delegate_iterator!((IterMut<'a, K, V>) => (&'a K, &'a mut V));
//////////////////////////////////////////////////////////////////////////////
-impl IntoIterator for Map<String, Value> {
- type Item = (String, Value);
- type IntoIter = IntoIter;
+impl<K, V> IntoIterator for Map<K, V> {
+ type Item = (K, V);
+ type IntoIter = IntoIter<K, V>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter {
@@ -554,42 +636,42 @@ impl IntoIterator for Map<String, Value> {
}
}
-/// An owning iterator over a toml::Map's entries.
-pub struct IntoIter {
- iter: IntoIterImpl,
+/// An owning iterator over a `toml::Map`'s entries.
+pub struct IntoIter<K, V> {
+ iter: IntoIterImpl<K, V>,
}
#[cfg(not(feature = "preserve_order"))]
-type IntoIterImpl = btree_map::IntoIter<String, Value>;
+type IntoIterImpl<K, V> = btree_map::IntoIter<K, V>;
#[cfg(feature = "preserve_order")]
-type IntoIterImpl = indexmap::map::IntoIter<String, Value>;
+type IntoIterImpl<K, V> = indexmap::map::IntoIter<K, V>;
-delegate_iterator!((IntoIter) => (String, Value));
+delegate_iterator!((IntoIter<K,V>) => (K, V));
//////////////////////////////////////////////////////////////////////////////
-/// An iterator over a toml::Map's keys.
-pub struct Keys<'a> {
- iter: KeysImpl<'a>,
+/// An iterator over a `toml::Map`'s keys.
+pub struct Keys<'a, K, V> {
+ iter: KeysImpl<'a, K, V>,
}
#[cfg(not(feature = "preserve_order"))]
-type KeysImpl<'a> = btree_map::Keys<'a, String, Value>;
+type KeysImpl<'a, K, V> = btree_map::Keys<'a, K, V>;
#[cfg(feature = "preserve_order")]
-type KeysImpl<'a> = indexmap::map::Keys<'a, String, Value>;
+type KeysImpl<'a, K, V> = indexmap::map::Keys<'a, K, V>;
-delegate_iterator!((Keys<'a>) => &'a String);
+delegate_iterator!((Keys<'a, K, V>) => &'a K);
//////////////////////////////////////////////////////////////////////////////
-/// An iterator over a toml::Map's values.
-pub struct Values<'a> {
- iter: ValuesImpl<'a>,
+/// An iterator over a `toml::Map`'s values.
+pub struct Values<'a, K, V> {
+ iter: ValuesImpl<'a, K, V>,
}
#[cfg(not(feature = "preserve_order"))]
-type ValuesImpl<'a> = btree_map::Values<'a, String, Value>;
+type ValuesImpl<'a, K, V> = btree_map::Values<'a, K, V>;
#[cfg(feature = "preserve_order")]
-type ValuesImpl<'a> = indexmap::map::Values<'a, String, Value>;
+type ValuesImpl<'a, K, V> = indexmap::map::Values<'a, K, V>;
-delegate_iterator!((Values<'a>) => &'a Value);
+delegate_iterator!((Values<'a, K, V>) => &'a V);
diff --git a/third_party/rust/toml/src/ser.rs b/third_party/rust/toml/src/ser.rs
@@ -1,1853 +0,0 @@
-//! Serializing Rust structures into TOML.
-//!
-//! This module contains all the Serde support for serializing Rust structures
-//! into TOML documents (as strings). Note that some top-level functions here
-//! are also provided at the top of the crate.
-//!
-//! Note that the TOML format has a restriction that if a table itself contains
-//! tables, all keys with non-table values must be emitted first. This is
-//! typically easy to ensure happens when you're defining a `struct` as you can
-//! reorder the fields manually, but when working with maps (such as `BTreeMap`
-//! or `HashMap`) this can lead to serialization errors. In those situations you
-//! may use the `tables_last` function in this module like so:
-//!
-//! ```rust
-//! # use serde_derive::Serialize;
-//! # use std::collections::HashMap;
-//! #[derive(Serialize)]
-//! struct Manifest {
-//! package: Package,
-//! #[serde(serialize_with = "toml::ser::tables_last")]
-//! dependencies: HashMap<String, Dependency>,
-//! }
-//! # type Package = String;
-//! # type Dependency = String;
-//! # fn main() {}
-//! ```
-
-use std::cell::Cell;
-use std::error;
-use std::fmt::{self, Write};
-use std::marker;
-use std::rc::Rc;
-
-use crate::datetime;
-use serde::ser;
-
-/// Serialize the given data structure as a TOML byte vector.
-///
-/// Serialization can fail if `T`'s implementation of `Serialize` decides to
-/// fail, if `T` contains a map with non-string keys, or if `T` attempts to
-/// serialize an unsupported datatype such as an enum, tuple, or tuple struct.
-pub fn to_vec<T: ?Sized>(value: &T) -> Result<Vec<u8>, Error>
-where
- T: ser::Serialize,
-{
- to_string(value).map(|e| e.into_bytes())
-}
-
-/// Serialize the given data structure as a String of TOML.
-///
-/// Serialization can fail if `T`'s implementation of `Serialize` decides to
-/// fail, if `T` contains a map with non-string keys, or if `T` attempts to
-/// serialize an unsupported datatype such as an enum, tuple, or tuple struct.
-///
-/// # Examples
-///
-/// ```
-/// use serde_derive::Serialize;
-///
-/// #[derive(Serialize)]
-/// struct Config {
-/// database: Database,
-/// }
-///
-/// #[derive(Serialize)]
-/// struct Database {
-/// ip: String,
-/// port: Vec<u16>,
-/// connection_max: u32,
-/// enabled: bool,
-/// }
-///
-/// let config = Config {
-/// database: Database {
-/// ip: "192.168.1.1".to_string(),
-/// port: vec![8001, 8002, 8003],
-/// connection_max: 5000,
-/// enabled: false,
-/// },
-/// };
-///
-/// let toml = toml::to_string(&config).unwrap();
-/// println!("{}", toml)
-/// ```
-pub fn to_string<T: ?Sized>(value: &T) -> Result<String, Error>
-where
- T: ser::Serialize,
-{
- let mut dst = String::with_capacity(128);
- value.serialize(&mut Serializer::new(&mut dst))?;
- Ok(dst)
-}
-
-/// Serialize the given data structure as a "pretty" String of TOML.
-///
-/// This is identical to `to_string` except the output string has a more
-/// "pretty" output. See `Serializer::pretty` for more details.
-pub fn to_string_pretty<T: ?Sized>(value: &T) -> Result<String, Error>
-where
- T: ser::Serialize,
-{
- let mut dst = String::with_capacity(128);
- value.serialize(&mut Serializer::pretty(&mut dst))?;
- Ok(dst)
-}
-
-/// Errors that can occur when serializing a type.
-#[derive(Debug, PartialEq, Eq, Clone)]
-#[non_exhaustive]
-pub enum Error {
- /// Indicates that a Rust type was requested to be serialized but it was not
- /// supported.
- ///
- /// Currently the TOML format does not support serializing types such as
- /// enums, tuples and tuple structs.
- UnsupportedType,
-
- /// The key of all TOML maps must be strings, but serialization was
- /// attempted where the key of a map was not a string.
- KeyNotString,
-
- /// An error that we never omit but keep for backwards compatibility
- #[doc(hidden)]
- KeyNewline,
-
- /// An array had to be homogeneous, but now it is allowed to be heterogeneous.
- #[doc(hidden)]
- ArrayMixedType,
-
- /// All values in a TOML table must be emitted before further tables are
- /// emitted. If a value is emitted *after* a table then this error is
- /// generated.
- ValueAfterTable,
-
- /// A serialized date was invalid.
- DateInvalid,
-
- /// A serialized number was invalid.
- NumberInvalid,
-
- /// None was attempted to be serialized, but it's not supported.
- UnsupportedNone,
-
- /// A custom error which could be generated when serializing a particular
- /// type.
- Custom(String),
-}
-
-#[derive(Debug, Default, Clone)]
-/// Internal place for holding array settings
-struct ArraySettings {
- indent: usize,
- trailing_comma: bool,
-}
-
-impl ArraySettings {
- fn pretty() -> ArraySettings {
- ArraySettings {
- indent: 4,
- trailing_comma: true,
- }
- }
-}
-
-#[derive(Debug, Default, Clone)]
-/// String settings
-struct StringSettings {
- /// Whether to use literal strings when possible
- literal: bool,
-}
-
-impl StringSettings {
- fn pretty() -> StringSettings {
- StringSettings { literal: true }
- }
-}
-
-#[derive(Debug, Default, Clone)]
-/// Internal struct for holding serialization settings
-struct Settings {
- array: Option<ArraySettings>,
- string: Option<StringSettings>,
-}
-
-/// Serialization implementation for TOML.
-///
-/// This structure implements serialization support for TOML to serialize an
-/// arbitrary type to TOML. Note that the TOML format does not support all
-/// datatypes in Rust, such as enums, tuples, and tuple structs. These types
-/// will generate an error when serialized.
-///
-/// Currently a serializer always writes its output to an in-memory `String`,
-/// which is passed in when creating the serializer itself.
-pub struct Serializer<'a> {
- dst: &'a mut String,
- state: State<'a>,
- settings: Rc<Settings>,
-}
-
-#[derive(Debug, Copy, Clone)]
-enum ArrayState {
- Started,
- StartedAsATable,
-}
-
-#[derive(Debug, Clone)]
-enum State<'a> {
- Table {
- key: &'a str,
- parent: &'a State<'a>,
- first: &'a Cell<bool>,
- table_emitted: &'a Cell<bool>,
- },
- Array {
- parent: &'a State<'a>,
- first: &'a Cell<bool>,
- type_: &'a Cell<Option<ArrayState>>,
- len: Option<usize>,
- },
- End,
-}
-
-#[doc(hidden)]
-pub struct SerializeSeq<'a, 'b> {
- ser: &'b mut Serializer<'a>,
- first: Cell<bool>,
- type_: Cell<Option<ArrayState>>,
- len: Option<usize>,
-}
-
-#[doc(hidden)]
-pub enum SerializeTable<'a, 'b> {
- Datetime(&'b mut Serializer<'a>),
- Table {
- ser: &'b mut Serializer<'a>,
- key: String,
- first: Cell<bool>,
- table_emitted: Cell<bool>,
- },
-}
-
-impl<'a> Serializer<'a> {
- /// Creates a new serializer which will emit TOML into the buffer provided.
- ///
- /// The serializer can then be used to serialize a type after which the data
- /// will be present in `dst`.
- pub fn new(dst: &'a mut String) -> Serializer<'a> {
- Serializer {
- dst,
- state: State::End,
- settings: Rc::new(Settings::default()),
- }
- }
-
- /// Instantiate a "pretty" formatter
- ///
- /// By default this will use:
- ///
- /// - pretty strings: strings with newlines will use the `'''` syntax. See
- /// `Serializer::pretty_string`
- /// - pretty arrays: each item in arrays will be on a newline, have an indentation of 4 and
- /// have a trailing comma. See `Serializer::pretty_array`
- pub fn pretty(dst: &'a mut String) -> Serializer<'a> {
- Serializer {
- dst,
- state: State::End,
- settings: Rc::new(Settings {
- array: Some(ArraySettings::pretty()),
- string: Some(StringSettings::pretty()),
- }),
- }
- }
-
- /// Enable or Disable pretty strings
- ///
- /// If enabled, literal strings will be used when possible and strings with
- /// one or more newlines will use triple quotes (i.e.: `'''` or `"""`)
- ///
- /// # Examples
- ///
- /// Instead of:
- ///
- /// ```toml,ignore
- /// single = "no newlines"
- /// text = "\nfoo\nbar\n"
- /// ```
- ///
- /// You will have:
- ///
- /// ```toml,ignore
- /// single = 'no newlines'
- /// text = '''
- /// foo
- /// bar
- /// '''
- /// ```
- pub fn pretty_string(&mut self, value: bool) -> &mut Self {
- Rc::get_mut(&mut self.settings).unwrap().string = if value {
- Some(StringSettings::pretty())
- } else {
- None
- };
- self
- }
-
- /// Enable or Disable Literal strings for pretty strings
- ///
- /// If enabled, literal strings will be used when possible and strings with
- /// one or more newlines will use triple quotes (i.e.: `'''` or `"""`)
- ///
- /// If disabled, literal strings will NEVER be used and strings with one or
- /// more newlines will use `"""`
- ///
- /// # Examples
- ///
- /// Instead of:
- ///
- /// ```toml,ignore
- /// single = "no newlines"
- /// text = "\nfoo\nbar\n"
- /// ```
- ///
- /// You will have:
- ///
- /// ```toml,ignore
- /// single = "no newlines"
- /// text = """
- /// foo
- /// bar
- /// """
- /// ```
- pub fn pretty_string_literal(&mut self, value: bool) -> &mut Self {
- let use_default = if let Some(ref mut s) = Rc::get_mut(&mut self.settings).unwrap().string {
- s.literal = value;
- false
- } else {
- true
- };
-
- if use_default {
- let mut string = StringSettings::pretty();
- string.literal = value;
- Rc::get_mut(&mut self.settings).unwrap().string = Some(string);
- }
- self
- }
-
- /// Enable or Disable pretty arrays
- ///
- /// If enabled, arrays will always have each item on their own line.
- ///
- /// Some specific features can be controlled via other builder methods:
- ///
- /// - `Serializer::pretty_array_indent`: set the indent to a value other
- /// than 4.
- /// - `Serializer::pretty_array_trailing_comma`: enable/disable the trailing
- /// comma on the last item.
- ///
- /// # Examples
- ///
- /// Instead of:
- ///
- /// ```toml,ignore
- /// array = ["foo", "bar"]
- /// ```
- ///
- /// You will have:
- ///
- /// ```toml,ignore
- /// array = [
- /// "foo",
- /// "bar",
- /// ]
- /// ```
- pub fn pretty_array(&mut self, value: bool) -> &mut Self {
- Rc::get_mut(&mut self.settings).unwrap().array = if value {
- Some(ArraySettings::pretty())
- } else {
- None
- };
- self
- }
-
- /// Set the indent for pretty arrays
- ///
- /// See `Serializer::pretty_array` for more details.
- pub fn pretty_array_indent(&mut self, value: usize) -> &mut Self {
- let use_default = if let Some(ref mut a) = Rc::get_mut(&mut self.settings).unwrap().array {
- a.indent = value;
- false
- } else {
- true
- };
-
- if use_default {
- let mut array = ArraySettings::pretty();
- array.indent = value;
- Rc::get_mut(&mut self.settings).unwrap().array = Some(array);
- }
- self
- }
-
- /// Specify whether to use a trailing comma when serializing pretty arrays
- ///
- /// See `Serializer::pretty_array` for more details.
- pub fn pretty_array_trailing_comma(&mut self, value: bool) -> &mut Self {
- let use_default = if let Some(ref mut a) = Rc::get_mut(&mut self.settings).unwrap().array {
- a.trailing_comma = value;
- false
- } else {
- true
- };
-
- if use_default {
- let mut array = ArraySettings::pretty();
- array.trailing_comma = value;
- Rc::get_mut(&mut self.settings).unwrap().array = Some(array);
- }
- self
- }
-
- fn display<T: fmt::Display>(&mut self, t: T, type_: ArrayState) -> Result<(), Error> {
- self.emit_key(type_)?;
- write!(self.dst, "{}", t).map_err(ser::Error::custom)?;
- if let State::Table { .. } = self.state {
- self.dst.push('\n');
- }
- Ok(())
- }
-
- fn emit_key(&mut self, type_: ArrayState) -> Result<(), Error> {
- self.array_type(type_)?;
- let state = self.state.clone();
- self._emit_key(&state)
- }
-
- // recursive implementation of `emit_key` above
- fn _emit_key(&mut self, state: &State<'_>) -> Result<(), Error> {
- match *state {
- State::End => Ok(()),
- State::Array {
- parent,
- first,
- type_,
- len,
- } => {
- assert!(type_.get().is_some());
- if first.get() {
- self._emit_key(parent)?;
- }
- self.emit_array(first, len)
- }
- State::Table {
- parent,
- first,
- table_emitted,
- key,
- } => {
- if table_emitted.get() {
- return Err(Error::ValueAfterTable);
- }
- if first.get() {
- self.emit_table_header(parent)?;
- first.set(false);
- }
- self.escape_key(key)?;
- self.dst.push_str(" = ");
- Ok(())
- }
- }
- }
-
- fn emit_array(&mut self, first: &Cell<bool>, len: Option<usize>) -> Result<(), Error> {
- match (len, &self.settings.array) {
- (Some(0..=1), _) | (_, &None) => {
- if first.get() {
- self.dst.push('[')
- } else {
- self.dst.push_str(", ")
- }
- }
- (_, &Some(ref a)) => {
- if first.get() {
- self.dst.push_str("[\n")
- } else {
- self.dst.push_str(",\n")
- }
- for _ in 0..a.indent {
- self.dst.push(' ');
- }
- }
- }
- Ok(())
- }
-
- fn array_type(&mut self, type_: ArrayState) -> Result<(), Error> {
- let prev = match self.state {
- State::Array { type_, .. } => type_,
- _ => return Ok(()),
- };
- if prev.get().is_none() {
- prev.set(Some(type_));
- }
- Ok(())
- }
-
- fn escape_key(&mut self, key: &str) -> Result<(), Error> {
- let ok = !key.is_empty()
- && key
- .chars()
- .all(|c| matches!(c,'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '_'));
- if ok {
- write!(self.dst, "{}", key).map_err(ser::Error::custom)?;
- } else {
- self.emit_str(key, true)?;
- }
- Ok(())
- }
-
- fn emit_str(&mut self, value: &str, is_key: bool) -> Result<(), Error> {
- #[derive(PartialEq)]
- enum Type {
- NewlineTripple,
- OnelineTripple,
- OnelineSingle,
- }
-
- enum Repr {
- /// represent as a literal string (using '')
- Literal(String, Type),
- /// represent the std way (using "")
- Std(Type),
- }
-
- fn do_pretty(value: &str) -> Repr {
- // For doing pretty prints we store in a new String
- // because there are too many cases where pretty cannot
- // work. We need to determine:
- // - if we are a "multi-line" pretty (if there are \n)
- // - if ['''] appears if multi or ['] if single
- // - if there are any invalid control characters
- //
- // Doing it any other way would require multiple passes
- // to determine if a pretty string works or not.
- let mut out = String::with_capacity(value.len() * 2);
- let mut ty = Type::OnelineSingle;
- // found consecutive single quotes
- let mut max_found_singles = 0;
- let mut found_singles = 0;
- let mut can_be_pretty = true;
-
- for ch in value.chars() {
- if can_be_pretty {
- if ch == '\'' {
- found_singles += 1;
- if found_singles >= 3 {
- can_be_pretty = false;
- }
- } else {
- if found_singles > max_found_singles {
- max_found_singles = found_singles;
- }
- found_singles = 0
- }
- match ch {
- '\t' => {}
- '\n' => ty = Type::NewlineTripple,
- // Escape codes are needed if any ascii control
- // characters are present, including \b \f \r.
- c if c <= '\u{1f}' || c == '\u{7f}' => can_be_pretty = false,
- _ => {}
- }
- out.push(ch);
- } else {
- // the string cannot be represented as pretty,
- // still check if it should be multiline
- if ch == '\n' {
- ty = Type::NewlineTripple;
- }
- }
- }
- if can_be_pretty && found_singles > 0 && value.ends_with('\'') {
- // We cannot escape the ending quote so we must use """
- can_be_pretty = false;
- }
- if !can_be_pretty {
- debug_assert!(ty != Type::OnelineTripple);
- return Repr::Std(ty);
- }
- if found_singles > max_found_singles {
- max_found_singles = found_singles;
- }
- debug_assert!(max_found_singles < 3);
- if ty == Type::OnelineSingle && max_found_singles >= 1 {
- // no newlines, but must use ''' because it has ' in it
- ty = Type::OnelineTripple;
- }
- Repr::Literal(out, ty)
- }
-
- let repr = if !is_key && self.settings.string.is_some() {
- match (&self.settings.string, do_pretty(value)) {
- (&Some(StringSettings { literal: false, .. }), Repr::Literal(_, ty)) => {
- Repr::Std(ty)
- }
- (_, r) => r,
- }
- } else {
- Repr::Std(Type::OnelineSingle)
- };
- match repr {
- Repr::Literal(literal, ty) => {
- // A pretty string
- match ty {
- Type::NewlineTripple => self.dst.push_str("'''\n"),
- Type::OnelineTripple => self.dst.push_str("'''"),
- Type::OnelineSingle => self.dst.push('\''),
- }
- self.dst.push_str(&literal);
- match ty {
- Type::OnelineSingle => self.dst.push('\''),
- _ => self.dst.push_str("'''"),
- }
- }
- Repr::Std(ty) => {
- match ty {
- Type::NewlineTripple => self.dst.push_str("\"\"\"\n"),
- // note: OnelineTripple can happen if do_pretty wants to do
- // '''it's one line'''
- // but settings.string.literal == false
- Type::OnelineSingle | Type::OnelineTripple => self.dst.push('"'),
- }
- for ch in value.chars() {
- match ch {
- '\u{8}' => self.dst.push_str("\\b"),
- '\u{9}' => self.dst.push_str("\\t"),
- '\u{a}' => match ty {
- Type::NewlineTripple => self.dst.push('\n'),
- Type::OnelineSingle => self.dst.push_str("\\n"),
- _ => unreachable!(),
- },
- '\u{c}' => self.dst.push_str("\\f"),
- '\u{d}' => self.dst.push_str("\\r"),
- '\u{22}' => self.dst.push_str("\\\""),
- '\u{5c}' => self.dst.push_str("\\\\"),
- c if c <= '\u{1f}' || c == '\u{7f}' => {
- write!(self.dst, "\\u{:04X}", ch as u32).map_err(ser::Error::custom)?;
- }
- ch => self.dst.push(ch),
- }
- }
- match ty {
- Type::NewlineTripple => self.dst.push_str("\"\"\""),
- Type::OnelineSingle | Type::OnelineTripple => self.dst.push('"'),
- }
- }
- }
- Ok(())
- }
-
- fn emit_table_header(&mut self, state: &State<'_>) -> Result<(), Error> {
- let array_of_tables = match *state {
- State::End => return Ok(()),
- State::Array { .. } => true,
- _ => false,
- };
-
- // Unlike [..]s, we can't omit [[..]] ancestors, so be sure to emit table
- // headers for them.
- let mut p = state;
- if let State::Array { first, parent, .. } = *state {
- if first.get() {
- p = parent;
- }
- }
- while let State::Table { first, parent, .. } = *p {
- p = parent;
- if !first.get() {
- break;
- }
- if let State::Array {
- parent: &State::Table { .. },
- ..
- } = *parent
- {
- self.emit_table_header(parent)?;
- break;
- }
- }
-
- match *state {
- State::Table { first, .. } => {
- if !first.get() {
- // Newline if we are a table that is not the first
- // table in the document.
- self.dst.push('\n');
- }
- }
- State::Array { parent, first, .. } => {
- if !first.get() {
- // Always newline if we are not the first item in the
- // table-array
- self.dst.push('\n');
- } else if let State::Table { first, .. } = *parent {
- if !first.get() {
- // Newline if we are not the first item in the document
- self.dst.push('\n');
- }
- }
- }
- _ => {}
- }
- self.dst.push('[');
- if array_of_tables {
- self.dst.push('[');
- }
- self.emit_key_part(state)?;
- if array_of_tables {
- self.dst.push(']');
- }
- self.dst.push_str("]\n");
- Ok(())
- }
-
- fn emit_key_part(&mut self, key: &State<'_>) -> Result<bool, Error> {
- match *key {
- State::Array { parent, .. } => self.emit_key_part(parent),
- State::End => Ok(true),
- State::Table {
- key,
- parent,
- table_emitted,
- ..
- } => {
- table_emitted.set(true);
- let first = self.emit_key_part(parent)?;
- if !first {
- self.dst.push('.');
- }
- self.escape_key(key)?;
- Ok(false)
- }
- }
- }
-}
-
-macro_rules! serialize_float {
- ($this:expr, $v:expr) => {{
- $this.emit_key(ArrayState::Started)?;
- match ($v.is_sign_negative(), $v.is_nan(), $v == 0.0) {
- (true, true, _) => write!($this.dst, "-nan"),
- (false, true, _) => write!($this.dst, "nan"),
- (true, false, true) => write!($this.dst, "-0.0"),
- (false, false, true) => write!($this.dst, "0.0"),
- (_, false, false) => write!($this.dst, "{}", $v).and_then(|_| {
- if $v % 1.0 == 0.0 {
- write!($this.dst, ".0")
- } else {
- Ok(())
- }
- }),
- }
- .map_err(ser::Error::custom)?;
-
- if let State::Table { .. } = $this.state {
- $this.dst.push_str("\n");
- }
- return Ok(());
- }};
-}
-
-impl<'a, 'b> ser::Serializer for &'b mut Serializer<'a> {
- type Ok = ();
- type Error = Error;
- type SerializeSeq = SerializeSeq<'a, 'b>;
- type SerializeTuple = SerializeSeq<'a, 'b>;
- type SerializeTupleStruct = SerializeSeq<'a, 'b>;
- type SerializeTupleVariant = SerializeSeq<'a, 'b>;
- type SerializeMap = SerializeTable<'a, 'b>;
- type SerializeStruct = SerializeTable<'a, 'b>;
- type SerializeStructVariant = ser::Impossible<(), Error>;
-
- fn serialize_bool(self, v: bool) -> Result<(), Self::Error> {
- self.display(v, ArrayState::Started)
- }
-
- fn serialize_i8(self, v: i8) -> Result<(), Self::Error> {
- self.display(v, ArrayState::Started)
- }
-
- fn serialize_i16(self, v: i16) -> Result<(), Self::Error> {
- self.display(v, ArrayState::Started)
- }
-
- fn serialize_i32(self, v: i32) -> Result<(), Self::Error> {
- self.display(v, ArrayState::Started)
- }
-
- fn serialize_i64(self, v: i64) -> Result<(), Self::Error> {
- self.display(v, ArrayState::Started)
- }
-
- fn serialize_u8(self, v: u8) -> Result<(), Self::Error> {
- self.display(v, ArrayState::Started)
- }
-
- fn serialize_u16(self, v: u16) -> Result<(), Self::Error> {
- self.display(v, ArrayState::Started)
- }
-
- fn serialize_u32(self, v: u32) -> Result<(), Self::Error> {
- self.display(v, ArrayState::Started)
- }
-
- fn serialize_u64(self, v: u64) -> Result<(), Self::Error> {
- self.display(v, ArrayState::Started)
- }
-
- fn serialize_f32(self, v: f32) -> Result<(), Self::Error> {
- serialize_float!(self, v)
- }
-
- fn serialize_f64(self, v: f64) -> Result<(), Self::Error> {
- serialize_float!(self, v)
- }
-
- fn serialize_char(self, v: char) -> Result<(), Self::Error> {
- let mut buf = [0; 4];
- self.serialize_str(v.encode_utf8(&mut buf))
- }
-
- fn serialize_str(self, value: &str) -> Result<(), Self::Error> {
- self.emit_key(ArrayState::Started)?;
- self.emit_str(value, false)?;
- if let State::Table { .. } = self.state {
- self.dst.push('\n');
- }
- Ok(())
- }
-
- fn serialize_bytes(self, value: &[u8]) -> Result<(), Self::Error> {
- use serde::ser::Serialize;
- value.serialize(self)
- }
-
- fn serialize_none(self) -> Result<(), Self::Error> {
- Err(Error::UnsupportedNone)
- }
-
- fn serialize_some<T: ?Sized>(self, value: &T) -> Result<(), Self::Error>
- where
- T: ser::Serialize,
- {
- value.serialize(self)
- }
-
- fn serialize_unit(self) -> Result<(), Self::Error> {
- Err(Error::UnsupportedType)
- }
-
- fn serialize_unit_struct(self, _name: &'static str) -> Result<(), Self::Error> {
- Err(Error::UnsupportedType)
- }
-
- fn serialize_unit_variant(
- self,
- _name: &'static str,
- _variant_index: u32,
- variant: &'static str,
- ) -> Result<(), Self::Error> {
- self.serialize_str(variant)
- }
-
- fn serialize_newtype_struct<T: ?Sized>(
- self,
- _name: &'static str,
- value: &T,
- ) -> Result<(), Self::Error>
- where
- T: ser::Serialize,
- {
- value.serialize(self)
- }
-
- fn serialize_newtype_variant<T: ?Sized>(
- self,
- _name: &'static str,
- _variant_index: u32,
- _variant: &'static str,
- _value: &T,
- ) -> Result<(), Self::Error>
- where
- T: ser::Serialize,
- {
- Err(Error::UnsupportedType)
- }
-
- fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
- self.array_type(ArrayState::Started)?;
- Ok(SerializeSeq {
- ser: self,
- first: Cell::new(true),
- type_: Cell::new(None),
- len,
- })
- }
-
- fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
- self.serialize_seq(Some(len))
- }
-
- fn serialize_tuple_struct(
- self,
- _name: &'static str,
- len: usize,
- ) -> Result<Self::SerializeTupleStruct, Self::Error> {
- self.serialize_seq(Some(len))
- }
-
- fn serialize_tuple_variant(
- self,
- _name: &'static str,
- _variant_index: u32,
- _variant: &'static str,
- len: usize,
- ) -> Result<Self::SerializeTupleVariant, Self::Error> {
- self.serialize_seq(Some(len))
- }
-
- fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
- self.array_type(ArrayState::StartedAsATable)?;
- Ok(SerializeTable::Table {
- ser: self,
- key: String::new(),
- first: Cell::new(true),
- table_emitted: Cell::new(false),
- })
- }
-
- fn serialize_struct(
- self,
- name: &'static str,
- _len: usize,
- ) -> Result<Self::SerializeStruct, Self::Error> {
- if name == datetime::NAME {
- self.array_type(ArrayState::Started)?;
- Ok(SerializeTable::Datetime(self))
- } else {
- self.array_type(ArrayState::StartedAsATable)?;
- Ok(SerializeTable::Table {
- ser: self,
- key: String::new(),
- first: Cell::new(true),
- table_emitted: Cell::new(false),
- })
- }
- }
-
- fn serialize_struct_variant(
- self,
- _name: &'static str,
- _variant_index: u32,
- _variant: &'static str,
- _len: usize,
- ) -> Result<Self::SerializeStructVariant, Self::Error> {
- Err(Error::UnsupportedType)
- }
-}
-
-impl<'a, 'b> ser::SerializeSeq for SerializeSeq<'a, 'b> {
- type Ok = ();
- type Error = Error;
-
- fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
- where
- T: ser::Serialize,
- {
- value.serialize(&mut Serializer {
- dst: &mut *self.ser.dst,
- state: State::Array {
- parent: &self.ser.state,
- first: &self.first,
- type_: &self.type_,
- len: self.len,
- },
- settings: self.ser.settings.clone(),
- })?;
- self.first.set(false);
- Ok(())
- }
-
- fn end(self) -> Result<(), Error> {
- match self.type_.get() {
- Some(ArrayState::StartedAsATable) => return Ok(()),
- Some(ArrayState::Started) => match (self.len, &self.ser.settings.array) {
- (Some(0..=1), _) | (_, &None) => {
- self.ser.dst.push(']');
- }
- (_, &Some(ref a)) => {
- if a.trailing_comma {
- self.ser.dst.push(',');
- }
- self.ser.dst.push_str("\n]");
- }
- },
- None => {
- assert!(self.first.get());
- self.ser.emit_key(ArrayState::Started)?;
- self.ser.dst.push_str("[]")
- }
- }
- if let State::Table { .. } = self.ser.state {
- self.ser.dst.push('\n');
- }
- Ok(())
- }
-}
-
-impl<'a, 'b> ser::SerializeTuple for SerializeSeq<'a, 'b> {
- type Ok = ();
- type Error = Error;
-
- fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
- where
- T: ser::Serialize,
- {
- ser::SerializeSeq::serialize_element(self, value)
- }
-
- fn end(self) -> Result<(), Error> {
- ser::SerializeSeq::end(self)
- }
-}
-
-impl<'a, 'b> ser::SerializeTupleVariant for SerializeSeq<'a, 'b> {
- type Ok = ();
- type Error = Error;
-
- fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
- where
- T: ser::Serialize,
- {
- ser::SerializeSeq::serialize_element(self, value)
- }
-
- fn end(self) -> Result<(), Error> {
- ser::SerializeSeq::end(self)
- }
-}
-
-impl<'a, 'b> ser::SerializeTupleStruct for SerializeSeq<'a, 'b> {
- type Ok = ();
- type Error = Error;
-
- fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
- where
- T: ser::Serialize,
- {
- ser::SerializeSeq::serialize_element(self, value)
- }
-
- fn end(self) -> Result<(), Error> {
- ser::SerializeSeq::end(self)
- }
-}
-
-impl<'a, 'b> ser::SerializeMap for SerializeTable<'a, 'b> {
- type Ok = ();
- type Error = Error;
-
- fn serialize_key<T: ?Sized>(&mut self, input: &T) -> Result<(), Error>
- where
- T: ser::Serialize,
- {
- match *self {
- SerializeTable::Datetime(_) => panic!(), // shouldn't be possible
- SerializeTable::Table { ref mut key, .. } => {
- key.truncate(0);
- *key = input.serialize(StringExtractor)?;
- }
- }
- Ok(())
- }
-
- fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
- where
- T: ser::Serialize,
- {
- match *self {
- SerializeTable::Datetime(_) => panic!(), // shouldn't be possible
- SerializeTable::Table {
- ref mut ser,
- ref key,
- ref first,
- ref table_emitted,
- ..
- } => {
- let res = value.serialize(&mut Serializer {
- dst: &mut *ser.dst,
- state: State::Table {
- key,
- parent: &ser.state,
- first,
- table_emitted,
- },
- settings: ser.settings.clone(),
- });
- match res {
- Ok(()) => first.set(false),
- Err(Error::UnsupportedNone) => {}
- Err(e) => return Err(e),
- }
- }
- }
- Ok(())
- }
-
- fn end(self) -> Result<(), Error> {
- match self {
- SerializeTable::Datetime(_) => panic!(), // shouldn't be possible
- SerializeTable::Table { ser, first, .. } => {
- if first.get() {
- let state = ser.state.clone();
- ser.emit_table_header(&state)?;
- }
- }
- }
- Ok(())
- }
-}
-
-impl<'a, 'b> ser::SerializeStruct for SerializeTable<'a, 'b> {
- type Ok = ();
- type Error = Error;
-
- fn serialize_field<T: ?Sized>(&mut self, key: &'static str, value: &T) -> Result<(), Error>
- where
- T: ser::Serialize,
- {
- match *self {
- SerializeTable::Datetime(ref mut ser) => {
- if key == datetime::FIELD {
- value.serialize(DateStrEmitter(*ser))?;
- } else {
- return Err(Error::DateInvalid);
- }
- }
- SerializeTable::Table {
- ref mut ser,
- ref first,
- ref table_emitted,
- ..
- } => {
- let res = value.serialize(&mut Serializer {
- dst: &mut *ser.dst,
- state: State::Table {
- key,
- parent: &ser.state,
- first,
- table_emitted,
- },
- settings: ser.settings.clone(),
- });
- match res {
- Ok(()) => first.set(false),
- Err(Error::UnsupportedNone) => {}
- Err(e) => return Err(e),
- }
- }
- }
- Ok(())
- }
-
- fn end(self) -> Result<(), Error> {
- match self {
- SerializeTable::Datetime(_) => {}
- SerializeTable::Table { ser, first, .. } => {
- if first.get() {
- let state = ser.state.clone();
- ser.emit_table_header(&state)?;
- }
- }
- }
- Ok(())
- }
-}
-
-struct DateStrEmitter<'a, 'b>(&'b mut Serializer<'a>);
-
-impl<'a, 'b> ser::Serializer for DateStrEmitter<'a, 'b> {
- type Ok = ();
- type Error = Error;
- type SerializeSeq = ser::Impossible<(), Error>;
- type SerializeTuple = ser::Impossible<(), Error>;
- type SerializeTupleStruct = ser::Impossible<(), Error>;
- type SerializeTupleVariant = ser::Impossible<(), Error>;
- type SerializeMap = ser::Impossible<(), Error>;
- type SerializeStruct = ser::Impossible<(), Error>;
- type SerializeStructVariant = ser::Impossible<(), Error>;
-
- fn serialize_bool(self, _v: bool) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_i8(self, _v: i8) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_i16(self, _v: i16) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_i32(self, _v: i32) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_i64(self, _v: i64) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_u8(self, _v: u8) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_u16(self, _v: u16) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_u32(self, _v: u32) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_u64(self, _v: u64) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_f32(self, _v: f32) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_f64(self, _v: f64) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_char(self, _v: char) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_str(self, value: &str) -> Result<(), Self::Error> {
- self.0.display(value, ArrayState::Started)?;
- Ok(())
- }
-
- fn serialize_bytes(self, _value: &[u8]) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_none(self) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_some<T: ?Sized>(self, _value: &T) -> Result<(), Self::Error>
- where
- T: ser::Serialize,
- {
- Err(Error::DateInvalid)
- }
-
- fn serialize_unit(self) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_unit_struct(self, _name: &'static str) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_unit_variant(
- self,
- _name: &'static str,
- _variant_index: u32,
- _variant: &'static str,
- ) -> Result<(), Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_newtype_struct<T: ?Sized>(
- self,
- _name: &'static str,
- _value: &T,
- ) -> Result<(), Self::Error>
- where
- T: ser::Serialize,
- {
- Err(Error::DateInvalid)
- }
-
- fn serialize_newtype_variant<T: ?Sized>(
- self,
- _name: &'static str,
- _variant_index: u32,
- _variant: &'static str,
- _value: &T,
- ) -> Result<(), Self::Error>
- where
- T: ser::Serialize,
- {
- Err(Error::DateInvalid)
- }
-
- fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_tuple_struct(
- self,
- _name: &'static str,
- _len: usize,
- ) -> Result<Self::SerializeTupleStruct, Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_tuple_variant(
- self,
- _name: &'static str,
- _variant_index: u32,
- _variant: &'static str,
- _len: usize,
- ) -> Result<Self::SerializeTupleVariant, Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_struct(
- self,
- _name: &'static str,
- _len: usize,
- ) -> Result<Self::SerializeStruct, Self::Error> {
- Err(Error::DateInvalid)
- }
-
- fn serialize_struct_variant(
- self,
- _name: &'static str,
- _variant_index: u32,
- _variant: &'static str,
- _len: usize,
- ) -> Result<Self::SerializeStructVariant, Self::Error> {
- Err(Error::DateInvalid)
- }
-}
-
-struct StringExtractor;
-
-impl ser::Serializer for StringExtractor {
- type Ok = String;
- type Error = Error;
- type SerializeSeq = ser::Impossible<String, Error>;
- type SerializeTuple = ser::Impossible<String, Error>;
- type SerializeTupleStruct = ser::Impossible<String, Error>;
- type SerializeTupleVariant = ser::Impossible<String, Error>;
- type SerializeMap = ser::Impossible<String, Error>;
- type SerializeStruct = ser::Impossible<String, Error>;
- type SerializeStructVariant = ser::Impossible<String, Error>;
-
- fn serialize_bool(self, _v: bool) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_i8(self, _v: i8) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_i16(self, _v: i16) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_i32(self, _v: i32) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_i64(self, _v: i64) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_u8(self, _v: u8) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_u16(self, _v: u16) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_u32(self, _v: u32) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_u64(self, _v: u64) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_f32(self, _v: f32) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_f64(self, _v: f64) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_char(self, _v: char) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_str(self, value: &str) -> Result<String, Self::Error> {
- Ok(value.to_string())
- }
-
- fn serialize_bytes(self, _value: &[u8]) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_none(self) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_some<T: ?Sized>(self, _value: &T) -> Result<String, Self::Error>
- where
- T: ser::Serialize,
- {
- Err(Error::KeyNotString)
- }
-
- fn serialize_unit(self) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_unit_struct(self, _name: &'static str) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_unit_variant(
- self,
- _name: &'static str,
- _variant_index: u32,
- _variant: &'static str,
- ) -> Result<String, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_newtype_struct<T: ?Sized>(
- self,
- _name: &'static str,
- value: &T,
- ) -> Result<String, Self::Error>
- where
- T: ser::Serialize,
- {
- value.serialize(self)
- }
-
- fn serialize_newtype_variant<T: ?Sized>(
- self,
- _name: &'static str,
- _variant_index: u32,
- _variant: &'static str,
- _value: &T,
- ) -> Result<String, Self::Error>
- where
- T: ser::Serialize,
- {
- Err(Error::KeyNotString)
- }
-
- fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_tuple_struct(
- self,
- _name: &'static str,
- _len: usize,
- ) -> Result<Self::SerializeTupleStruct, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_tuple_variant(
- self,
- _name: &'static str,
- _variant_index: u32,
- _variant: &'static str,
- _len: usize,
- ) -> Result<Self::SerializeTupleVariant, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_struct(
- self,
- _name: &'static str,
- _len: usize,
- ) -> Result<Self::SerializeStruct, Self::Error> {
- Err(Error::KeyNotString)
- }
-
- fn serialize_struct_variant(
- self,
- _name: &'static str,
- _variant_index: u32,
- _variant: &'static str,
- _len: usize,
- ) -> Result<Self::SerializeStructVariant, Self::Error> {
- Err(Error::KeyNotString)
- }
-}
-
-impl fmt::Display for Error {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match *self {
- Error::UnsupportedType => "unsupported Rust type".fmt(f),
- Error::KeyNotString => "map key was not a string".fmt(f),
- Error::ValueAfterTable => "values must be emitted before tables".fmt(f),
- Error::DateInvalid => "a serialized date was invalid".fmt(f),
- Error::NumberInvalid => "a serialized number was invalid".fmt(f),
- Error::UnsupportedNone => "unsupported None value".fmt(f),
- Error::Custom(ref s) => s.fmt(f),
- Error::KeyNewline => unreachable!(),
- Error::ArrayMixedType => unreachable!(),
- }
- }
-}
-
-impl error::Error for Error {}
-
-impl ser::Error for Error {
- fn custom<T: fmt::Display>(msg: T) -> Error {
- Error::Custom(msg.to_string())
- }
-}
-
-enum Category {
- Primitive,
- Array,
- Table,
-}
-
-/// Convenience function to serialize items in a map in an order valid with
-/// TOML.
-///
-/// TOML carries the restriction that keys in a table must be serialized last if
-/// their value is a table itself. This isn't always easy to guarantee, so this
-/// helper can be used like so:
-///
-/// ```rust
-/// # use serde_derive::Serialize;
-/// # use std::collections::HashMap;
-/// #[derive(Serialize)]
-/// struct Manifest {
-/// package: Package,
-/// #[serde(serialize_with = "toml::ser::tables_last")]
-/// dependencies: HashMap<String, Dependency>,
-/// }
-/// # type Package = String;
-/// # type Dependency = String;
-/// # fn main() {}
-/// ```
-pub fn tables_last<'a, I, K, V, S>(data: &'a I, serializer: S) -> Result<S::Ok, S::Error>
-where
- &'a I: IntoIterator<Item = (K, V)>,
- K: ser::Serialize,
- V: ser::Serialize,
- S: ser::Serializer,
-{
- use serde::ser::SerializeMap;
-
- let mut map = serializer.serialize_map(None)?;
- for (k, v) in data {
- if let Category::Primitive = v.serialize(Categorize::new())? {
- map.serialize_entry(&k, &v)?;
- }
- }
- for (k, v) in data {
- if let Category::Array = v.serialize(Categorize::new())? {
- map.serialize_entry(&k, &v)?;
- }
- }
- for (k, v) in data {
- if let Category::Table = v.serialize(Categorize::new())? {
- map.serialize_entry(&k, &v)?;
- }
- }
- map.end()
-}
-
-struct Categorize<E>(marker::PhantomData<E>);
-
-impl<E> Categorize<E> {
- fn new() -> Self {
- Categorize(marker::PhantomData)
- }
-}
-
-impl<E: ser::Error> ser::Serializer for Categorize<E> {
- type Ok = Category;
- type Error = E;
- type SerializeSeq = Self;
- type SerializeTuple = Self;
- type SerializeTupleStruct = Self;
- type SerializeTupleVariant = Self;
- type SerializeMap = Self;
- type SerializeStruct = Self;
- type SerializeStructVariant = ser::Impossible<Category, E>;
-
- fn serialize_bool(self, _: bool) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_i8(self, _: i8) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_i16(self, _: i16) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_i32(self, _: i32) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_i64(self, _: i64) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_u8(self, _: u8) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_u16(self, _: u16) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_u32(self, _: u32) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_u64(self, _: u64) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_f32(self, _: f32) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_f64(self, _: f64) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_char(self, _: char) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_str(self, _: &str) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Primitive)
- }
-
- fn serialize_bytes(self, _: &[u8]) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Array)
- }
-
- fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
- Err(ser::Error::custom("unsupported"))
- }
-
- fn serialize_some<T: ?Sized + ser::Serialize>(self, v: &T) -> Result<Self::Ok, Self::Error> {
- v.serialize(self)
- }
-
- fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
- Err(ser::Error::custom("unsupported"))
- }
-
- fn serialize_unit_struct(self, _: &'static str) -> Result<Self::Ok, Self::Error> {
- Err(ser::Error::custom("unsupported"))
- }
-
- fn serialize_unit_variant(
- self,
- _: &'static str,
- _: u32,
- _: &'static str,
- ) -> Result<Self::Ok, Self::Error> {
- Err(ser::Error::custom("unsupported"))
- }
-
- fn serialize_newtype_struct<T: ?Sized + ser::Serialize>(
- self,
- _: &'static str,
- v: &T,
- ) -> Result<Self::Ok, Self::Error> {
- v.serialize(self)
- }
-
- fn serialize_newtype_variant<T: ?Sized + ser::Serialize>(
- self,
- _: &'static str,
- _: u32,
- _: &'static str,
- _: &T,
- ) -> Result<Self::Ok, Self::Error> {
- Err(ser::Error::custom("unsupported"))
- }
-
- fn serialize_seq(self, _: Option<usize>) -> Result<Self, Self::Error> {
- Ok(self)
- }
-
- fn serialize_tuple(self, _: usize) -> Result<Self::SerializeTuple, Self::Error> {
- Ok(self)
- }
-
- fn serialize_tuple_struct(
- self,
- _: &'static str,
- _: usize,
- ) -> Result<Self::SerializeTupleStruct, Self::Error> {
- Ok(self)
- }
-
- fn serialize_tuple_variant(
- self,
- _: &'static str,
- _: u32,
- _: &'static str,
- _: usize,
- ) -> Result<Self::SerializeTupleVariant, Self::Error> {
- Ok(self)
- }
-
- fn serialize_map(self, _: Option<usize>) -> Result<Self, Self::Error> {
- Ok(self)
- }
-
- fn serialize_struct(self, _: &'static str, _: usize) -> Result<Self, Self::Error> {
- Ok(self)
- }
-
- fn serialize_struct_variant(
- self,
- _: &'static str,
- _: u32,
- _: &'static str,
- _: usize,
- ) -> Result<Self::SerializeStructVariant, Self::Error> {
- Err(ser::Error::custom("unsupported"))
- }
-}
-
-impl<E: ser::Error> ser::SerializeSeq for Categorize<E> {
- type Ok = Category;
- type Error = E;
-
- fn serialize_element<T: ?Sized + ser::Serialize>(&mut self, _: &T) -> Result<(), Self::Error> {
- Ok(())
- }
-
- fn end(self) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Array)
- }
-}
-
-impl<E: ser::Error> ser::SerializeTuple for Categorize<E> {
- type Ok = Category;
- type Error = E;
-
- fn serialize_element<T: ?Sized + ser::Serialize>(&mut self, _: &T) -> Result<(), Self::Error> {
- Ok(())
- }
-
- fn end(self) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Array)
- }
-}
-
-impl<E: ser::Error> ser::SerializeTupleVariant for Categorize<E> {
- type Ok = Category;
- type Error = E;
-
- fn serialize_field<T: ?Sized + ser::Serialize>(&mut self, _: &T) -> Result<(), Self::Error> {
- Ok(())
- }
-
- fn end(self) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Array)
- }
-}
-
-impl<E: ser::Error> ser::SerializeTupleStruct for Categorize<E> {
- type Ok = Category;
- type Error = E;
-
- fn serialize_field<T: ?Sized + ser::Serialize>(&mut self, _: &T) -> Result<(), Self::Error> {
- Ok(())
- }
-
- fn end(self) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Array)
- }
-}
-
-impl<E: ser::Error> ser::SerializeMap for Categorize<E> {
- type Ok = Category;
- type Error = E;
-
- fn serialize_key<T: ?Sized + ser::Serialize>(&mut self, _: &T) -> Result<(), Self::Error> {
- Ok(())
- }
-
- fn serialize_value<T: ?Sized + ser::Serialize>(&mut self, _: &T) -> Result<(), Self::Error> {
- Ok(())
- }
-
- fn end(self) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Table)
- }
-}
-
-impl<E: ser::Error> ser::SerializeStruct for Categorize<E> {
- type Ok = Category;
- type Error = E;
-
- fn serialize_field<T: ?Sized>(&mut self, _: &'static str, _: &T) -> Result<(), Self::Error>
- where
- T: ser::Serialize,
- {
- Ok(())
- }
-
- fn end(self) -> Result<Self::Ok, Self::Error> {
- Ok(Category::Table)
- }
-}
diff --git a/third_party/rust/toml/src/ser/document/array.rs b/third_party/rust/toml/src/ser/document/array.rs
@@ -0,0 +1,79 @@
+use core::fmt::Write as _;
+
+use toml_writer::TomlWrite as _;
+
+use super::style::Style;
+use super::value::ValueSerializer;
+use super::Buffer;
+use super::Error;
+use super::Table;
+
+#[doc(hidden)]
+pub struct SerializeDocumentTupleVariant<'d> {
+ buf: &'d mut Buffer,
+ table: Table,
+ seen_value: bool,
+ style: Style,
+}
+
+impl<'d> SerializeDocumentTupleVariant<'d> {
+ pub(crate) fn tuple(
+ buf: &'d mut Buffer,
+ mut table: Table,
+ variant: &'static str,
+ _len: usize,
+ style: Style,
+ ) -> Result<Self, Error> {
+ let dst = table.body_mut();
+ dst.key(variant)?;
+ dst.space()?;
+ dst.keyval_sep()?;
+ dst.space()?;
+ dst.open_array()?;
+ Ok(Self {
+ buf,
+ table,
+ seen_value: false,
+ style,
+ })
+ }
+}
+
+impl<'d> serde_core::ser::SerializeTupleVariant for SerializeDocumentTupleVariant<'d> {
+ type Ok = &'d mut Buffer;
+ type Error = Error;
+
+ fn serialize_field<T>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ let dst = self.table.body_mut();
+
+ if self.style.multiline_array {
+ dst.newline()?;
+ write!(dst, " ")?;
+ } else {
+ if self.seen_value {
+ dst.val_sep()?;
+ dst.space()?;
+ }
+ }
+ self.seen_value = true;
+ value.serialize(ValueSerializer::with_style(dst, self.style))?;
+ if self.style.multiline_array {
+ dst.val_sep()?;
+ }
+ Ok(())
+ }
+
+ fn end(mut self) -> Result<Self::Ok, Self::Error> {
+ let dst = self.table.body_mut();
+ if self.style.multiline_array && self.seen_value {
+ dst.newline()?;
+ }
+ dst.close_array()?;
+ dst.newline()?;
+ self.buf.push(self.table);
+ Ok(self.buf)
+ }
+}
diff --git a/third_party/rust/toml/src/ser/document/array_of_tables.rs b/third_party/rust/toml/src/ser/document/array_of_tables.rs
@@ -0,0 +1,275 @@
+use super::style::Style;
+use super::Buffer;
+use super::Error;
+use super::Serializer;
+use super::Table;
+use crate::alloc_prelude::*;
+
+pub(crate) struct ArrayOfTablesSerializer<'d> {
+ buf: &'d mut Buffer,
+ parent: Table,
+ key: String,
+ style: Style,
+}
+
+impl<'d> ArrayOfTablesSerializer<'d> {
+ /// Creates a new serializer which will emit TOML into the buffer provided.
+ ///
+ /// The serializer can then be used to serialize a type after which the data
+ /// will be present in `dst`.
+ pub(crate) fn new(buf: &'d mut Buffer, parent: Table, key: String, style: Style) -> Self {
+ Self {
+ buf,
+ parent,
+ key,
+ style,
+ }
+ }
+}
+
+impl<'d> serde_core::ser::Serializer for ArrayOfTablesSerializer<'d> {
+ type Ok = &'d mut Buffer;
+ type Error = Error;
+ type SerializeSeq = SerializeArrayOfTablesSerializer<'d>;
+ type SerializeTuple = SerializeArrayOfTablesSerializer<'d>;
+ type SerializeTupleStruct = SerializeArrayOfTablesSerializer<'d>;
+ type SerializeTupleVariant = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeMap = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeStruct = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeStructVariant = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+
+ fn serialize_bool(self, _v: bool) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("bool")))
+ }
+
+ fn serialize_i8(self, _v: i8) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("i8")))
+ }
+
+ fn serialize_i16(self, _v: i16) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("i16")))
+ }
+
+ fn serialize_i32(self, _v: i32) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("i32")))
+ }
+
+ fn serialize_i64(self, _v: i64) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("i64")))
+ }
+
+ fn serialize_u8(self, _v: u8) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("u8")))
+ }
+
+ fn serialize_u16(self, _v: u16) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("u16")))
+ }
+
+ fn serialize_u32(self, _v: u32) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("u32")))
+ }
+
+ fn serialize_u64(self, _v: u64) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("u64")))
+ }
+
+ fn serialize_f32(self, _v: f32) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("f32")))
+ }
+
+ fn serialize_f64(self, _v: f64) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("f64")))
+ }
+
+ fn serialize_char(self, _v: char) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("char")))
+ }
+
+ fn serialize_str(self, _v: &str) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("str")))
+ }
+
+ fn serialize_bytes(self, _v: &[u8]) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("bytes")))
+ }
+
+ fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_none())
+ }
+
+ fn serialize_some<T>(self, v: &T) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ v.serialize(self)
+ }
+
+ fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("unit")))
+ }
+
+ fn serialize_unit_struct(self, name: &'static str) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some(name)))
+ }
+
+ fn serialize_unit_variant(
+ self,
+ name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ ) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some(name)))
+ }
+
+ fn serialize_newtype_struct<T>(
+ self,
+ _name: &'static str,
+ v: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ v.serialize(self)
+ }
+
+ fn serialize_newtype_variant<T>(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ _value: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ Err(Error::unsupported_type(Some(variant)))
+ }
+
+ fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
+ Ok(SerializeArrayOfTablesSerializer::seq(
+ self.buf,
+ self.parent,
+ self.key,
+ self.style,
+ ))
+ }
+
+ fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
+ self.serialize_seq(Some(len))
+ }
+
+ fn serialize_tuple_struct(
+ self,
+ _name: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeTupleStruct, Self::Error> {
+ self.serialize_seq(Some(len))
+ }
+
+ fn serialize_tuple_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleVariant, Self::Error> {
+ Err(Error::unsupported_type(Some(variant)))
+ }
+
+ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
+ Err(Error::unsupported_type(Some("map")))
+ }
+
+ fn serialize_struct(
+ self,
+ name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStruct, Self::Error> {
+ Err(Error::unsupported_type(Some(name)))
+ }
+
+ fn serialize_struct_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStructVariant, Self::Error> {
+ Err(Error::unsupported_type(Some(variant)))
+ }
+}
+
+#[doc(hidden)]
+pub(crate) struct SerializeArrayOfTablesSerializer<'d> {
+ buf: &'d mut Buffer,
+ parent: Table,
+ key: String,
+ style: Style,
+}
+
+impl<'d> SerializeArrayOfTablesSerializer<'d> {
+ pub(crate) fn seq(buf: &'d mut Buffer, parent: Table, key: String, style: Style) -> Self {
+ Self {
+ buf,
+ parent,
+ key,
+ style,
+ }
+ }
+
+ fn end(self) -> Result<&'d mut Buffer, Error> {
+ Ok(self.buf)
+ }
+}
+
+impl<'d> serde_core::ser::SerializeSeq for SerializeArrayOfTablesSerializer<'d> {
+ type Ok = &'d mut Buffer;
+ type Error = Error;
+
+ fn serialize_element<T>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ let child = self.buf.element_table(&mut self.parent, self.key.clone());
+ let value_serializer = Serializer::with_table(self.buf, child, self.style);
+ value.serialize(value_serializer)?;
+ Ok(())
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ self.end()
+ }
+}
+
+impl<'d> serde_core::ser::SerializeTuple for SerializeArrayOfTablesSerializer<'d> {
+ type Ok = &'d mut Buffer;
+ type Error = Error;
+
+ fn serialize_element<T>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ serde_core::ser::SerializeSeq::serialize_element(self, value)
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ serde_core::ser::SerializeSeq::end(self)
+ }
+}
+
+impl<'d> serde_core::ser::SerializeTupleStruct for SerializeArrayOfTablesSerializer<'d> {
+ type Ok = &'d mut Buffer;
+ type Error = Error;
+
+ fn serialize_field<T>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ serde_core::ser::SerializeSeq::serialize_element(self, value)
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ serde_core::ser::SerializeSeq::end(self)
+ }
+}
diff --git a/third_party/rust/toml/src/ser/document/buffer.rs b/third_party/rust/toml/src/ser/document/buffer.rs
@@ -0,0 +1,131 @@
+use toml_writer::TomlWrite as _;
+
+use crate::alloc_prelude::*;
+
+/// TOML Document serialization buffer
+#[derive(Debug, Default)]
+pub struct Buffer {
+ tables: Vec<Option<Table>>,
+}
+
+impl Buffer {
+ /// Initialize a new serialization buffer
+ pub fn new() -> Self {
+ Default::default()
+ }
+
+ /// Reset the buffer for serializing another document
+ pub fn clear(&mut self) {
+ self.tables.clear();
+ }
+
+ pub(crate) fn root_table(&mut self) -> Table {
+ self.new_table(None)
+ }
+
+ pub(crate) fn child_table(&mut self, parent: &mut Table, key: String) -> Table {
+ parent.has_children = true;
+ let mut key_path = parent.key.clone();
+ key_path.get_or_insert_with(Vec::new).push(key);
+ self.new_table(key_path)
+ }
+
+ pub(crate) fn element_table(&mut self, parent: &mut Table, key: String) -> Table {
+ let mut table = self.child_table(parent, key);
+ table.array = true;
+ table
+ }
+
+ pub(crate) fn new_table(&mut self, key: Option<Vec<String>>) -> Table {
+ let pos = self.tables.len();
+ let table = Table {
+ key,
+ body: String::new(),
+ has_children: false,
+ pos,
+ array: false,
+ };
+ self.tables.push(None);
+ table
+ }
+
+ pub(crate) fn push(&mut self, table: Table) {
+ let pos = table.pos;
+ self.tables[pos] = Some(table);
+ }
+}
+
+impl core::fmt::Display for Buffer {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ let mut tables = self
+ .tables
+ .iter()
+ .filter_map(|t| t.as_ref())
+ .filter(|t| required_table(t));
+ if let Some(table) = tables.next() {
+ table.fmt(f)?;
+ }
+ for table in tables {
+ f.newline()?;
+ table.fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+fn required_table(table: &Table) -> bool {
+ if table.key.is_none() {
+ !table.body.is_empty()
+ } else {
+ table.array || !table.body.is_empty() || !table.has_children
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Table {
+ key: Option<Vec<String>>,
+ body: String,
+ has_children: bool,
+ array: bool,
+ pos: usize,
+}
+
+impl Table {
+ pub(crate) fn body_mut(&mut self) -> &mut String {
+ &mut self.body
+ }
+
+ pub(crate) fn has_children(&mut self, yes: bool) {
+ self.has_children = yes;
+ }
+}
+
+impl core::fmt::Display for Table {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ if let Some(key) = &self.key {
+ if self.array {
+ f.open_array_of_tables_header()?;
+ } else {
+ f.open_table_header()?;
+ }
+ let mut key = key.iter();
+ if let Some(key) = key.next() {
+ write!(f, "{key}")?;
+ }
+ for key in key {
+ f.key_sep()?;
+ write!(f, "{key}")?;
+ }
+ if self.array {
+ f.close_array_of_tables_header()?;
+ } else {
+ f.close_table_header()?;
+ }
+ f.newline()?;
+ }
+
+ self.body.fmt(f)?;
+
+ Ok(())
+ }
+}
diff --git a/third_party/rust/toml/src/ser/document/map.rs b/third_party/rust/toml/src/ser/document/map.rs
@@ -0,0 +1,167 @@
+use core::fmt::Write as _;
+
+use toml_writer::TomlWrite as _;
+
+use super::array_of_tables::ArrayOfTablesSerializer;
+use super::style::Style;
+use super::value::KeySerializer;
+use super::value::ValueSerializer;
+use super::Buffer;
+use super::Error;
+use super::SerializationStrategy;
+use super::Serializer;
+use super::Table;
+use crate::alloc_prelude::*;
+
+#[doc(hidden)]
+pub struct SerializeDocumentTable<'d> {
+ buf: &'d mut Buffer,
+ table: Table,
+ key: Option<String>,
+ style: Style,
+}
+
+impl<'d> SerializeDocumentTable<'d> {
+ pub(crate) fn map(buf: &'d mut Buffer, table: Table, style: Style) -> Result<Self, Error> {
+ Ok(Self {
+ buf,
+ table,
+ key: None,
+ style,
+ })
+ }
+
+ fn end(self) -> Result<&'d mut Buffer, Error> {
+ self.buf.push(self.table);
+ Ok(self.buf)
+ }
+}
+
+impl<'d> serde_core::ser::SerializeMap for SerializeDocumentTable<'d> {
+ type Ok = &'d mut Buffer;
+ type Error = Error;
+
+ fn serialize_key<T>(&mut self, input: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ let mut encoded_key = String::new();
+ input.serialize(KeySerializer {
+ dst: &mut encoded_key,
+ })?;
+ self.key = Some(encoded_key);
+ Ok(())
+ }
+
+ fn serialize_value<T>(&mut self, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ let encoded_key = self
+ .key
+ .take()
+ .expect("always called after `serialize_key`");
+ match SerializationStrategy::from(value) {
+ SerializationStrategy::Value => {
+ let dst = self.table.body_mut();
+
+ write!(dst, "{encoded_key}")?;
+ dst.space()?;
+ dst.keyval_sep()?;
+ dst.space()?;
+ let value_serializer = ValueSerializer::with_style(dst, self.style);
+ let dst = value.serialize(value_serializer)?;
+ dst.newline()?;
+ }
+ SerializationStrategy::ArrayOfTables => {
+ self.table.has_children(true);
+ let value_serializer = ArrayOfTablesSerializer::new(
+ self.buf,
+ self.table.clone(),
+ encoded_key,
+ self.style,
+ );
+ value.serialize(value_serializer)?;
+ }
+ SerializationStrategy::Table | SerializationStrategy::Unknown => {
+ let child = self.buf.child_table(&mut self.table, encoded_key);
+ let value_serializer = Serializer::with_table(self.buf, child, self.style);
+ value.serialize(value_serializer)?;
+ }
+ SerializationStrategy::Skip => {
+ // silently drop these key-value pairs
+ }
+ }
+ Ok(())
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ self.end()
+ }
+}
+
+impl<'d> serde_core::ser::SerializeStruct for SerializeDocumentTable<'d> {
+ type Ok = &'d mut Buffer;
+ type Error = Error;
+
+ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ match SerializationStrategy::from(value) {
+ SerializationStrategy::Value => {
+ let dst = self.table.body_mut();
+
+ dst.key(key)?;
+ dst.space()?;
+ dst.keyval_sep()?;
+ dst.space()?;
+ let value_serializer = ValueSerializer::with_style(dst, self.style);
+ let dst = value.serialize(value_serializer)?;
+ dst.newline()?;
+ }
+ SerializationStrategy::ArrayOfTables => {
+ self.table.has_children(true);
+ let value_serializer = ArrayOfTablesSerializer::new(
+ self.buf,
+ self.table.clone(),
+ key.to_owned(),
+ self.style,
+ );
+ value.serialize(value_serializer)?;
+ }
+ SerializationStrategy::Table | SerializationStrategy::Unknown => {
+ let child = self.buf.child_table(&mut self.table, key.to_owned());
+ let value_serializer = Serializer::with_table(self.buf, child, self.style);
+ value.serialize(value_serializer)?;
+ }
+ SerializationStrategy::Skip => {
+ // silently drop these key-value pairs
+ }
+ }
+
+ Ok(())
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ self.end()
+ }
+}
+
+impl<'d> serde_core::ser::SerializeStructVariant for SerializeDocumentTable<'d> {
+ type Ok = &'d mut Buffer;
+ type Error = Error;
+
+ #[inline]
+ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ serde_core::ser::SerializeStruct::serialize_field(self, key, value)
+ }
+
+ #[inline]
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ self.end()
+ }
+}
diff --git a/third_party/rust/toml/src/ser/document/mod.rs b/third_party/rust/toml/src/ser/document/mod.rs
@@ -0,0 +1,266 @@
+//! Serializing Rust structures into TOML.
+//!
+//! This module contains all the Serde support for serializing Rust structures
+//! into TOML documents (as strings). Note that some top-level functions here
+//! are also provided at the top of the crate.
+
+mod array;
+mod array_of_tables;
+mod buffer;
+mod map;
+mod strategy;
+
+use toml_writer::TomlWrite as _;
+
+use super::style;
+use super::value;
+use super::Error;
+use crate::alloc_prelude::*;
+use buffer::Table;
+use strategy::SerializationStrategy;
+
+pub use buffer::Buffer;
+
+/// Serialization for TOML documents.
+///
+/// This structure implements serialization support for TOML to serialize an
+/// arbitrary type to TOML. Note that the TOML format does not support all
+/// datatypes in Rust, such as enums, tuples, and tuple structs. These types
+/// will generate an error when serialized.
+///
+/// Currently a serializer always writes its output to an in-memory `String`,
+/// which is passed in when creating the serializer itself.
+///
+/// To serialize TOML values, instead of documents, see
+/// [`ValueSerializer`][super::value::ValueSerializer].
+pub struct Serializer<'d> {
+ buf: &'d mut Buffer,
+ style: style::Style,
+ table: Table,
+}
+
+impl<'d> Serializer<'d> {
+ /// Creates a new serializer which will emit TOML into the buffer provided.
+ ///
+ /// The serializer can then be used to serialize a type after which the data
+ /// will be present in `buf`.
+ pub fn new(buf: &'d mut Buffer) -> Self {
+ let table = buf.root_table();
+ Self {
+ buf,
+ style: Default::default(),
+ table,
+ }
+ }
+
+ /// Apply a default "pretty" policy to the document
+ ///
+ /// For greater customization, instead serialize to a
+ /// [`toml_edit::DocumentMut`](https://docs.rs/toml_edit/latest/toml_edit/struct.DocumentMut.html).
+ pub fn pretty(buf: &'d mut Buffer) -> Self {
+ let mut ser = Serializer::new(buf);
+ ser.style.multiline_array = true;
+ ser
+ }
+
+ pub(crate) fn with_table(buf: &'d mut Buffer, table: Table, style: style::Style) -> Self {
+ Self { buf, style, table }
+ }
+
+ fn end(self) -> Result<&'d mut Buffer, Error> {
+ self.buf.push(self.table);
+ Ok(self.buf)
+ }
+}
+
+impl<'d> serde_core::ser::Serializer for Serializer<'d> {
+ type Ok = &'d mut Buffer;
+ type Error = Error;
+ type SerializeSeq = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeTuple = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeTupleStruct = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeTupleVariant = array::SerializeDocumentTupleVariant<'d>;
+ type SerializeMap = map::SerializeDocumentTable<'d>;
+ type SerializeStruct = map::SerializeDocumentTable<'d>;
+ type SerializeStructVariant = map::SerializeDocumentTable<'d>;
+
+ fn serialize_bool(self, _v: bool) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("bool")))
+ }
+
+ fn serialize_i8(self, _v: i8) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("i8")))
+ }
+
+ fn serialize_i16(self, _v: i16) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("i16")))
+ }
+
+ fn serialize_i32(self, _v: i32) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("i32")))
+ }
+
+ fn serialize_i64(self, _v: i64) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("i64")))
+ }
+
+ fn serialize_u8(self, _v: u8) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("u8")))
+ }
+
+ fn serialize_u16(self, _v: u16) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("u16")))
+ }
+
+ fn serialize_u32(self, _v: u32) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("u32")))
+ }
+
+ fn serialize_u64(self, _v: u64) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("u64")))
+ }
+
+ fn serialize_f32(self, _v: f32) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("f32")))
+ }
+
+ fn serialize_f64(self, _v: f64) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("f64")))
+ }
+
+ fn serialize_char(self, _v: char) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("char")))
+ }
+
+ fn serialize_str(self, _v: &str) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("str")))
+ }
+
+ fn serialize_bytes(self, _v: &[u8]) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("bytes")))
+ }
+
+ fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_none())
+ }
+
+ fn serialize_some<T>(self, v: &T) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ v.serialize(self)
+ }
+
+ fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("unit")))
+ }
+
+ fn serialize_unit_struct(self, name: &'static str) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some(name)))
+ }
+
+ fn serialize_unit_variant(
+ self,
+ name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ ) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some(name)))
+ }
+
+ fn serialize_newtype_struct<T>(
+ self,
+ _name: &'static str,
+ v: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ v.serialize(self)
+ }
+
+ fn serialize_newtype_variant<T>(
+ mut self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ value: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ match SerializationStrategy::from(value) {
+ SerializationStrategy::Value | SerializationStrategy::ArrayOfTables => {
+ let dst = self.table.body_mut();
+
+ dst.key(variant)?;
+ dst.space()?;
+ dst.keyval_sep()?;
+ dst.space()?;
+ let value_serializer = value::ValueSerializer::with_style(dst, self.style);
+ let dst = value.serialize(value_serializer)?;
+ dst.newline()?;
+ }
+ SerializationStrategy::Table | SerializationStrategy::Unknown => {
+ let child = self.buf.child_table(&mut self.table, variant.to_owned());
+ let value_serializer = Serializer::with_table(self.buf, child, self.style);
+ value.serialize(value_serializer)?;
+ }
+ SerializationStrategy::Skip => {
+ // silently drop these key-value pairs
+ }
+ }
+
+ self.end()
+ }
+
+ fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
+ Err(Error::unsupported_type(Some("array")))
+ }
+
+ fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
+ self.serialize_seq(Some(len))
+ }
+
+ fn serialize_tuple_struct(
+ self,
+ _name: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeTupleStruct, Self::Error> {
+ self.serialize_seq(Some(len))
+ }
+
+ fn serialize_tuple_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeTupleVariant, Self::Error> {
+ array::SerializeDocumentTupleVariant::tuple(self.buf, self.table, variant, len, self.style)
+ }
+
+ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
+ map::SerializeDocumentTable::map(self.buf, self.table, self.style)
+ }
+
+ fn serialize_struct(
+ self,
+ _name: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeStruct, Self::Error> {
+ self.serialize_map(Some(len))
+ }
+
+ fn serialize_struct_variant(
+ mut self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStructVariant, Self::Error> {
+ let child = self.buf.child_table(&mut self.table, variant.to_owned());
+ self.buf.push(self.table);
+ map::SerializeDocumentTable::map(self.buf, child, self.style)
+ }
+}
diff --git a/third_party/rust/toml/src/ser/document/strategy.rs b/third_party/rust/toml/src/ser/document/strategy.rs
@@ -0,0 +1,343 @@
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub(crate) enum SerializationStrategy {
+ Value,
+ Table,
+ ArrayOfTables,
+ Skip,
+ Unknown,
+}
+
+impl<T> From<&T> for SerializationStrategy
+where
+ T: serde_core::ser::Serialize + ?Sized,
+{
+ fn from(value: &T) -> Self {
+ value.serialize(WalkValue).unwrap_err()
+ }
+}
+
+impl serde_core::ser::Error for SerializationStrategy {
+ fn custom<T>(_msg: T) -> Self
+ where
+ T: core::fmt::Display,
+ {
+ Self::Unknown
+ }
+}
+
+impl core::fmt::Display for SerializationStrategy {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ "error".fmt(f)
+ }
+}
+
+#[cfg(feature = "std")]
+impl std::error::Error for SerializationStrategy {}
+#[cfg(not(feature = "std"))]
+impl serde_core::de::StdError for SerializationStrategy {}
+
+struct WalkValue;
+
+impl serde_core::ser::Serializer for WalkValue {
+ type Ok = core::convert::Infallible;
+ type Error = SerializationStrategy;
+ type SerializeSeq = ArrayWalkValue;
+ type SerializeTuple = ArrayWalkValue;
+ type SerializeTupleStruct = ArrayWalkValue;
+ type SerializeTupleVariant = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeMap = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeStruct = StructWalkValue;
+ type SerializeStructVariant = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+
+ fn serialize_bool(self, _v: bool) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_i8(self, _v: i8) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_i16(self, _v: i16) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_i32(self, _v: i32) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_i64(self, _v: i64) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_i128(self, _v: i128) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_u8(self, _v: u8) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_u16(self, _v: u16) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_u32(self, _v: u32) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_u64(self, _v: u64) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_u128(self, _v: u128) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_f32(self, _v: f32) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_f64(self, _v: f64) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_char(self, _v: char) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_str(self, _v: &str) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_bytes(self, _v: &[u8]) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Skip)
+ }
+
+ fn serialize_some<T>(self, v: &T) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ v.serialize(self)
+ }
+
+ fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_unit_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ ) -> Result<Self::Ok, Self::Error> {
+ Err(SerializationStrategy::Value)
+ }
+
+ fn serialize_newtype_struct<T>(
+ self,
+ _name: &'static str,
+ v: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ v.serialize(self)
+ }
+
+ fn serialize_newtype_variant<T>(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _value: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ Err(SerializationStrategy::Table)
+ }
+
+ fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
+ Ok(ArrayWalkValue::new())
+ }
+
+ fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
+ self.serialize_seq(Some(len))
+ }
+
+ fn serialize_tuple_struct(
+ self,
+ _name: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeTupleStruct, Self::Error> {
+ self.serialize_seq(Some(len))
+ }
+
+ fn serialize_tuple_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleVariant, Self::Error> {
+ Err(SerializationStrategy::Table)
+ }
+
+ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
+ Err(SerializationStrategy::Table)
+ }
+
+ fn serialize_struct(
+ self,
+ name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStruct, Self::Error> {
+ if toml_datetime::ser::is_datetime(name) {
+ Ok(StructWalkValue)
+ } else {
+ Err(SerializationStrategy::Table)
+ }
+ }
+
+ fn serialize_struct_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStructVariant, Self::Error> {
+ Err(SerializationStrategy::Table)
+ }
+}
+
+#[doc(hidden)]
+pub(crate) struct ArrayWalkValue {
+ is_empty: bool,
+}
+
+impl ArrayWalkValue {
+ fn new() -> Self {
+ Self { is_empty: true }
+ }
+
+ fn serialize_element<T>(&mut self, value: &T) -> Result<(), SerializationStrategy>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ self.is_empty = false;
+ match SerializationStrategy::from(value) {
+ SerializationStrategy::Value
+ | SerializationStrategy::ArrayOfTables
+ | SerializationStrategy::Unknown
+ | SerializationStrategy::Skip => Err(SerializationStrategy::Value),
+ SerializationStrategy::Table => Ok(()),
+ }
+ }
+
+ fn end(self) -> Result<core::convert::Infallible, SerializationStrategy> {
+ if self.is_empty {
+ Err(SerializationStrategy::Value)
+ } else {
+ Err(SerializationStrategy::ArrayOfTables)
+ }
+ }
+}
+
+impl serde_core::ser::SerializeSeq for ArrayWalkValue {
+ type Ok = core::convert::Infallible;
+ type Error = SerializationStrategy;
+
+ fn serialize_element<T>(&mut self, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ self.serialize_element(value)
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ self.end()
+ }
+}
+
+impl serde_core::ser::SerializeTuple for ArrayWalkValue {
+ type Ok = core::convert::Infallible;
+ type Error = SerializationStrategy;
+
+ fn serialize_element<T>(&mut self, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ self.serialize_element(value)
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ self.end()
+ }
+}
+
+impl serde_core::ser::SerializeTupleStruct for ArrayWalkValue {
+ type Ok = core::convert::Infallible;
+ type Error = SerializationStrategy;
+
+ fn serialize_field<T>(&mut self, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ self.serialize_element(value)
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ self.end()
+ }
+}
+
+pub(crate) struct StructWalkValue;
+
+impl serde_core::ser::SerializeMap for StructWalkValue {
+ type Ok = core::convert::Infallible;
+ type Error = SerializationStrategy;
+
+ fn serialize_key<T>(&mut self, _input: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ Ok(())
+ }
+
+ fn serialize_value<T>(&mut self, _value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ Ok(())
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ // is date time
+ Err(SerializationStrategy::Value)
+ }
+}
+
+impl serde_core::ser::SerializeStruct for StructWalkValue {
+ type Ok = core::convert::Infallible;
+ type Error = SerializationStrategy;
+
+ fn serialize_field<T>(&mut self, _key: &'static str, _value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ Ok(())
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ // is date time
+ Err(SerializationStrategy::Value)
+ }
+}
diff --git a/third_party/rust/toml/src/ser/error.rs b/third_party/rust/toml/src/ser/error.rs
@@ -0,0 +1,111 @@
+use crate::alloc_prelude::*;
+
+/// Errors that can occur when serializing a type.
+#[derive(Clone, PartialEq, Eq)]
+pub struct Error {
+ pub(crate) inner: ErrorInner,
+}
+
+impl Error {
+ pub(crate) fn new(inner: impl core::fmt::Display) -> Self {
+ Self {
+ inner: ErrorInner::Custom(inner.to_string()),
+ }
+ }
+
+ pub(crate) fn unsupported_type(t: Option<&'static str>) -> Self {
+ Self {
+ inner: ErrorInner::UnsupportedType(t),
+ }
+ }
+
+ pub(crate) fn out_of_range(t: Option<&'static str>) -> Self {
+ Self {
+ inner: ErrorInner::OutOfRange(t),
+ }
+ }
+
+ pub(crate) fn unsupported_none() -> Self {
+ Self {
+ inner: ErrorInner::UnsupportedNone,
+ }
+ }
+
+ pub(crate) fn key_not_string() -> Self {
+ Self {
+ inner: ErrorInner::KeyNotString,
+ }
+ }
+
+ #[cfg(feature = "display")]
+ pub(crate) fn date_invalid() -> Self {
+ Self {
+ inner: ErrorInner::DateInvalid,
+ }
+ }
+}
+
+impl From<core::fmt::Error> for Error {
+ fn from(_: core::fmt::Error) -> Self {
+ Self::new("an error occurred when writing a value")
+ }
+}
+
+impl serde_core::ser::Error for Error {
+ fn custom<T>(msg: T) -> Self
+ where
+ T: core::fmt::Display,
+ {
+ Self::new(msg)
+ }
+}
+
+impl core::fmt::Display for Error {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+impl core::fmt::Debug for Error {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+#[cfg(feature = "std")]
+impl std::error::Error for Error {}
+#[cfg(not(feature = "std"))]
+impl serde_core::de::StdError for Error {}
+
+/// Errors that can occur when deserializing a type.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[non_exhaustive]
+pub(crate) enum ErrorInner {
+ /// Type could not be serialized to TOML
+ UnsupportedType(Option<&'static str>),
+ /// Value was out of range for the given type
+ OutOfRange(Option<&'static str>),
+ /// `None` could not be serialized to TOML
+ UnsupportedNone,
+ /// Key was not convertible to `String` for serializing to TOML
+ KeyNotString,
+ /// A serialized date was invalid
+ DateInvalid,
+ /// Other serialization error
+ Custom(String),
+}
+
+impl core::fmt::Display for ErrorInner {
+ fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ Self::UnsupportedType(Some(t)) => write!(formatter, "unsupported {t} type"),
+ Self::UnsupportedType(None) => write!(formatter, "unsupported rust type"),
+ Self::OutOfRange(Some(t)) => write!(formatter, "out-of-range value for {t} type"),
+ Self::OutOfRange(None) => write!(formatter, "out-of-range value"),
+ Self::UnsupportedNone => "unsupported None value".fmt(formatter),
+ Self::KeyNotString => "map key was not a string".fmt(formatter),
+ Self::DateInvalid => "a serialized date was invalid".fmt(formatter),
+ Self::Custom(s) => s.fmt(formatter),
+ }
+ }
+}
diff --git a/third_party/rust/toml/src/ser/mod.rs b/third_party/rust/toml/src/ser/mod.rs
@@ -0,0 +1,93 @@
+//! Serializing Rust structures into TOML.
+//!
+//! This module contains all the Serde support for serializing Rust structures
+//! into TOML documents (as strings). Note that some top-level functions here
+//! are also provided at the top of the crate.
+
+#[cfg(feature = "display")]
+mod document;
+mod error;
+#[cfg(feature = "display")]
+mod style;
+#[cfg(feature = "display")]
+mod value;
+
+use crate::alloc_prelude::*;
+
+#[cfg(feature = "display")]
+pub use document::Buffer;
+#[cfg(feature = "display")]
+pub use document::Serializer;
+pub use error::Error;
+pub(crate) use error::ErrorInner;
+#[cfg(feature = "display")]
+pub use value::ValueSerializer;
+
+/// Serialize the given data structure as a String of TOML.
+///
+/// Serialization can fail if `T`'s implementation of `Serialize` decides to
+/// fail, if `T` contains a map with non-string keys, or if `T` attempts to
+/// serialize an unsupported datatype such as an enum, tuple, or tuple struct.
+///
+/// To serialize TOML values, instead of documents, see [`ValueSerializer`].
+///
+/// # Examples
+///
+/// ```
+/// use serde::Serialize;
+///
+/// #[derive(Serialize)]
+/// struct Config {
+/// database: Database,
+/// }
+///
+/// #[derive(Serialize)]
+/// struct Database {
+/// ip: String,
+/// port: Vec<u16>,
+/// connection_max: u32,
+/// enabled: bool,
+/// }
+///
+/// let config = Config {
+/// database: Database {
+/// ip: "192.168.1.1".to_string(),
+/// port: vec![8001, 8002, 8003],
+/// connection_max: 5000,
+/// enabled: false,
+/// },
+/// };
+///
+/// let toml = toml::to_string(&config).unwrap();
+/// println!("{}", toml)
+/// ```
+#[cfg(feature = "display")]
+pub fn to_string<T>(value: &T) -> Result<String, Error>
+where
+ T: serde_core::ser::Serialize + ?Sized,
+{
+ let mut output = Buffer::new();
+ let serializer = Serializer::new(&mut output);
+ value.serialize(serializer)?;
+ Ok(output.to_string())
+}
+
+/// Serialize the given data structure as a "pretty" String of TOML.
+///
+/// This is identical to `to_string` except the output string has a more
+/// "pretty" output. See `Serializer::pretty` for more details.
+///
+/// To serialize TOML values, instead of documents, see [`ValueSerializer`].
+///
+/// For greater customization, instead serialize to a
+/// [`toml_edit::DocumentMut`](https://docs.rs/toml_edit/latest/toml_edit/struct.DocumentMut.html).
+#[cfg(feature = "display")]
+pub fn to_string_pretty<T>(value: &T) -> Result<String, Error>
+where
+ T: serde_core::ser::Serialize + ?Sized,
+{
+ let mut output = Buffer::new();
+ let serializer = Serializer::pretty(&mut output);
+ value.serialize(serializer)?;
+ Ok(output.to_string())
+}
diff --git a/third_party/rust/toml/src/ser/style.rs b/third_party/rust/toml/src/ser/style.rs
@@ -0,0 +1,4 @@
+#[derive(Copy, Clone, Default)]
+pub(crate) struct Style {
+ pub(crate) multiline_array: bool,
+}
diff --git a/third_party/rust/toml/src/ser/value/array.rs b/third_party/rust/toml/src/ser/value/array.rs
@@ -0,0 +1,147 @@
+use core::fmt::Write as _;
+
+use toml_writer::TomlWrite as _;
+
+use super::Error;
+use super::Style;
+use crate::alloc_prelude::*;
+
+#[doc(hidden)]
+pub struct SerializeValueArray<'d> {
+ dst: &'d mut String,
+ seen_value: bool,
+ style: Style,
+ len: Option<usize>,
+}
+
+impl<'d> SerializeValueArray<'d> {
+ pub(crate) fn seq(
+ dst: &'d mut String,
+ style: Style,
+ len: Option<usize>,
+ ) -> Result<Self, Error> {
+ dst.open_array()?;
+ Ok(Self {
+ dst,
+ seen_value: false,
+ style,
+ len,
+ })
+ }
+
+ fn end(self) -> Result<&'d mut String, Error> {
+ if self.multiline_array() && self.seen_value {
+ self.dst.newline()?;
+ }
+ self.dst.close_array()?;
+ Ok(self.dst)
+ }
+
+ fn multiline_array(&self) -> bool {
+ self.style.multiline_array && 2 <= self.len.unwrap_or(usize::MAX)
+ }
+}
+
+impl<'d> serde_core::ser::SerializeSeq for SerializeValueArray<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+
+ fn serialize_element<T>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ if self.multiline_array() {
+ self.dst.newline()?;
+ write!(self.dst, " ")?;
+ } else {
+ if self.seen_value {
+ self.dst.val_sep()?;
+ self.dst.space()?;
+ }
+ }
+ self.seen_value = true;
+ value.serialize(super::ValueSerializer::with_style(self.dst, self.style))?;
+ if self.multiline_array() {
+ self.dst.val_sep()?;
+ }
+ Ok(())
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ self.end()
+ }
+}
+
+impl<'d> serde_core::ser::SerializeTuple for SerializeValueArray<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+
+ fn serialize_element<T>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ serde_core::ser::SerializeSeq::serialize_element(self, value)
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ serde_core::ser::SerializeSeq::end(self)
+ }
+}
+
+impl<'d> serde_core::ser::SerializeTupleStruct for SerializeValueArray<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+
+ fn serialize_field<T>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ serde_core::ser::SerializeSeq::serialize_element(self, value)
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ serde_core::ser::SerializeSeq::end(self)
+ }
+}
+
+pub struct SerializeTupleVariant<'d> {
+ inner: SerializeValueArray<'d>,
+}
+
+impl<'d> SerializeTupleVariant<'d> {
+ pub(crate) fn tuple(
+ dst: &'d mut String,
+ variant: &'static str,
+ len: usize,
+ style: Style,
+ ) -> Result<Self, Error> {
+ dst.open_inline_table()?;
+ dst.space()?;
+ dst.key(variant)?;
+ dst.space()?;
+ dst.keyval_sep()?;
+ dst.space()?;
+ Ok(Self {
+ inner: SerializeValueArray::seq(dst, style, Some(len))?,
+ })
+ }
+}
+
+impl<'d> serde_core::ser::SerializeTupleVariant for SerializeTupleVariant<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+
+ fn serialize_field<T>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ serde_core::ser::SerializeSeq::serialize_element(&mut self.inner, value)
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ let dst = self.inner.end()?;
+ dst.space()?;
+ dst.close_inline_table()?;
+ Ok(dst)
+ }
+}
diff --git a/third_party/rust/toml/src/ser/value/key.rs b/third_party/rust/toml/src/ser/value/key.rs
@@ -0,0 +1,200 @@
+use toml_writer::TomlWrite as _;
+
+use super::Error;
+use crate::alloc_prelude::*;
+
+pub(crate) struct KeySerializer<'d> {
+ pub(crate) dst: &'d mut String,
+}
+
+impl serde_core::ser::Serializer for KeySerializer<'_> {
+ type Ok = ();
+ type Error = Error;
+ type SerializeSeq = serde_core::ser::Impossible<Self::Ok, Error>;
+ type SerializeTuple = serde_core::ser::Impossible<Self::Ok, Error>;
+ type SerializeTupleStruct = serde_core::ser::Impossible<Self::Ok, Error>;
+ type SerializeTupleVariant = serde_core::ser::Impossible<Self::Ok, Error>;
+ type SerializeMap = serde_core::ser::Impossible<Self::Ok, Error>;
+ type SerializeStruct = serde_core::ser::Impossible<Self::Ok, Error>;
+ type SerializeStructVariant = serde_core::ser::Impossible<Self::Ok, Error>;
+
+ fn serialize_bool(self, v: bool) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(v.to_string())?;
+ Ok(())
+ }
+
+ fn serialize_i8(self, v: i8) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(v.to_string())?;
+ Ok(())
+ }
+
+ fn serialize_i16(self, v: i16) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(v.to_string())?;
+ Ok(())
+ }
+
+ fn serialize_i32(self, v: i32) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(v.to_string())?;
+ Ok(())
+ }
+
+ fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(v.to_string())?;
+ Ok(())
+ }
+
+ fn serialize_i128(self, v: i128) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(v.to_string())?;
+ Ok(())
+ }
+
+ fn serialize_u8(self, v: u8) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(v.to_string())?;
+ Ok(())
+ }
+
+ fn serialize_u16(self, v: u16) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(v.to_string())?;
+ Ok(())
+ }
+
+ fn serialize_u32(self, v: u32) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(v.to_string())?;
+ Ok(())
+ }
+
+ fn serialize_u64(self, v: u64) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(v.to_string())?;
+ Ok(())
+ }
+
+ fn serialize_u128(self, v: u128) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(v.to_string())?;
+ Ok(())
+ }
+
+ fn serialize_f32(self, _v: f32) -> Result<Self::Ok, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_f64(self, _v: f64) -> Result<Self::Ok, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_char(self, v: char) -> Result<Self::Ok, Self::Error> {
+ let mut b = [0; 4];
+ let result = v.encode_utf8(&mut b);
+ self.dst.key(&*result)?;
+ Ok(())
+ }
+
+ fn serialize_str(self, value: &str) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(value)?;
+ Ok(())
+ }
+
+ fn serialize_bytes(self, _value: &[u8]) -> Result<Self::Ok, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_some<T>(self, _value: &T) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_unit_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ ) -> Result<Self::Ok, Self::Error> {
+ self.dst.key(variant)?;
+ Ok(())
+ }
+
+ fn serialize_newtype_struct<T>(
+ self,
+ _name: &'static str,
+ value: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ value.serialize(self)
+ }
+
+ fn serialize_newtype_variant<T>(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _value: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_tuple_struct(
+ self,
+ _name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleStruct, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_tuple_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleVariant, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_struct(
+ self,
+ _name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStruct, Self::Error> {
+ Err(Error::key_not_string())
+ }
+
+ fn serialize_struct_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStructVariant, Self::Error> {
+ Err(Error::key_not_string())
+ }
+}
diff --git a/third_party/rust/toml/src/ser/value/map.rs b/third_party/rust/toml/src/ser/value/map.rs
@@ -0,0 +1,532 @@
+use core::fmt::Write as _;
+
+use toml_writer::TomlWrite as _;
+
+use super::array::SerializeTupleVariant;
+use super::array::SerializeValueArray;
+use super::key::KeySerializer;
+use super::Error;
+use super::Style;
+use super::ValueSerializer;
+use crate::alloc_prelude::*;
+
+#[doc(hidden)]
+#[allow(clippy::large_enum_variant)]
+pub enum SerializeMap<'d> {
+ Datetime(SerializeDatetime<'d>),
+ Table(SerializeTable<'d>),
+}
+
+impl<'d> SerializeMap<'d> {
+ pub(crate) fn map(dst: &'d mut String, style: Style) -> Result<Self, Error> {
+ Ok(Self::Table(SerializeTable::map(dst, style)?))
+ }
+
+ pub(crate) fn struct_(
+ name: &'static str,
+ dst: &'d mut String,
+ style: Style,
+ ) -> Result<Self, Error> {
+ if toml_datetime::ser::is_datetime(name) {
+ Ok(Self::Datetime(SerializeDatetime::new(dst)))
+ } else {
+ Ok(Self::map(dst, style)?)
+ }
+ }
+}
+
+impl<'d> serde_core::ser::SerializeMap for SerializeMap<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+
+ fn serialize_key<T>(&mut self, input: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ match self {
+ Self::Datetime(s) => s.serialize_key(input),
+ Self::Table(s) => s.serialize_key(input),
+ }
+ }
+
+ fn serialize_value<T>(&mut self, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ match self {
+ Self::Datetime(s) => s.serialize_value(value),
+ Self::Table(s) => s.serialize_value(value),
+ }
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ match self {
+ Self::Datetime(s) => s.end(),
+ Self::Table(s) => s.end(),
+ }
+ }
+}
+
+impl<'d> serde_core::ser::SerializeStruct for SerializeMap<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+
+ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ match self {
+ Self::Datetime(s) => s.serialize_field(key, value),
+ Self::Table(s) => s.serialize_field(key, value),
+ }
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ match self {
+ Self::Datetime(s) => s.end(),
+ Self::Table(s) => s.end(),
+ }
+ }
+}
+
+#[doc(hidden)]
+pub struct SerializeDatetime<'d> {
+ dst: &'d mut String,
+ inner: toml_datetime::ser::DatetimeSerializer,
+}
+
+impl<'d> SerializeDatetime<'d> {
+ pub(crate) fn new(dst: &'d mut String) -> Self {
+ Self {
+ dst,
+ inner: toml_datetime::ser::DatetimeSerializer::new(),
+ }
+ }
+}
+
+impl<'d> serde_core::ser::SerializeMap for SerializeDatetime<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+
+ fn serialize_key<T>(&mut self, _input: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ unreachable!("datetimes should only be serialized as structs, not maps")
+ }
+
+ fn serialize_value<T>(&mut self, _value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ unreachable!("datetimes should only be serialized as structs, not maps")
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ unreachable!("datetimes should only be serialized as structs, not maps")
+ }
+}
+
+impl<'d> serde_core::ser::SerializeStruct for SerializeDatetime<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+
+ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ self.inner.serialize_field(key, value).map_err(dt_err)?;
+
+ Ok(())
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ let value = self.inner.end().map_err(dt_err)?;
+ write!(self.dst, "{value}")?;
+ Ok(self.dst)
+ }
+}
+
+fn dt_err(err: toml_datetime::ser::SerializerError) -> Error {
+ match err {
+ toml_datetime::ser::SerializerError::InvalidFormat(err) => Error::new(err),
+ _ => Error::date_invalid(),
+ }
+}
+
+#[doc(hidden)]
+pub struct SerializeTable<'d> {
+ dst: &'d mut String,
+ seen_value: bool,
+ key: Option<String>,
+ style: Style,
+}
+
+impl<'d> SerializeTable<'d> {
+ pub(crate) fn map(dst: &'d mut String, style: Style) -> Result<Self, Error> {
+ dst.open_inline_table()?;
+ Ok(Self {
+ dst,
+ seen_value: false,
+ key: None,
+ style,
+ })
+ }
+
+ pub(crate) fn end(self) -> Result<&'d mut String, Error> {
+ if self.seen_value {
+ self.dst.space()?;
+ }
+ self.dst.close_inline_table()?;
+ Ok(self.dst)
+ }
+}
+
+impl<'d> serde_core::ser::SerializeMap for SerializeTable<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+
+ fn serialize_key<T>(&mut self, input: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ let mut encoded_key = String::new();
+ input.serialize(KeySerializer {
+ dst: &mut encoded_key,
+ })?;
+ self.key = Some(encoded_key);
+ Ok(())
+ }
+
+ fn serialize_value<T>(&mut self, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ let encoded_key = self
+ .key
+ .take()
+ .expect("always called after `serialize_key`");
+ let mut encoded_value = String::new();
+ let mut is_none = false;
+ let value_serializer =
+ MapValueSerializer::new(&mut encoded_value, &mut is_none, self.style);
+ let res = value.serialize(value_serializer);
+ match res {
+ Ok(_) => {
+ use core::fmt::Write as _;
+
+ if self.seen_value {
+ self.dst.val_sep()?;
+ }
+ self.seen_value = true;
+ self.dst.space()?;
+ write!(self.dst, "{encoded_key}")?;
+ self.dst.space()?;
+ self.dst.keyval_sep()?;
+ self.dst.space()?;
+ write!(self.dst, "{encoded_value}")?;
+ }
+ Err(e) => {
+ if !(e == Error::unsupported_none() && is_none) {
+ return Err(e);
+ }
+ }
+ }
+ Ok(())
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ self.end()
+ }
+}
+
+impl<'d> serde_core::ser::SerializeStruct for SerializeTable<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+
+ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ let mut encoded_value = String::new();
+ let mut is_none = false;
+ let value_serializer =
+ MapValueSerializer::new(&mut encoded_value, &mut is_none, self.style);
+ let res = value.serialize(value_serializer);
+ match res {
+ Ok(_) => {
+ use core::fmt::Write as _;
+
+ if self.seen_value {
+ self.dst.val_sep()?;
+ }
+ self.seen_value = true;
+ self.dst.space()?;
+ self.dst.key(key)?;
+ self.dst.space()?;
+ self.dst.keyval_sep()?;
+ self.dst.space()?;
+ write!(self.dst, "{encoded_value}")?;
+ }
+ Err(e) => {
+ if !(e == Error::unsupported_none() && is_none) {
+ return Err(e);
+ }
+ }
+ }
+
+ Ok(())
+ }
+
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ self.end()
+ }
+}
+
+pub(crate) struct MapValueSerializer<'d> {
+ dst: &'d mut String,
+ is_none: &'d mut bool,
+ style: Style,
+}
+
+impl<'d> MapValueSerializer<'d> {
+ pub(crate) fn new(dst: &'d mut String, is_none: &'d mut bool, style: Style) -> Self {
+ Self {
+ dst,
+ is_none,
+ style,
+ }
+ }
+}
+
+impl<'d> serde_core::ser::Serializer for MapValueSerializer<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+ type SerializeSeq = SerializeValueArray<'d>;
+ type SerializeTuple = SerializeValueArray<'d>;
+ type SerializeTupleStruct = SerializeValueArray<'d>;
+ type SerializeTupleVariant = SerializeTupleVariant<'d>;
+ type SerializeMap = SerializeMap<'d>;
+ type SerializeStruct = SerializeMap<'d>;
+ type SerializeStructVariant = SerializeStructVariant<'d>;
+
+ fn serialize_bool(self, v: bool) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_bool(v)
+ }
+
+ fn serialize_i8(self, v: i8) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_i8(v)
+ }
+
+ fn serialize_i16(self, v: i16) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_i16(v)
+ }
+
+ fn serialize_i32(self, v: i32) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_i32(v)
+ }
+
+ fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_i64(v)
+ }
+
+ fn serialize_u8(self, v: u8) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_u8(v)
+ }
+
+ fn serialize_u16(self, v: u16) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_u16(v)
+ }
+
+ fn serialize_u32(self, v: u32) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_u32(v)
+ }
+
+ fn serialize_u64(self, v: u64) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_u64(v)
+ }
+
+ fn serialize_f32(self, v: f32) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_f32(v)
+ }
+
+ fn serialize_f64(self, v: f64) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_f64(v)
+ }
+
+ fn serialize_char(self, v: char) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_char(v)
+ }
+
+ fn serialize_str(self, v: &str) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_str(v)
+ }
+
+ fn serialize_bytes(self, value: &[u8]) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_bytes(value)
+ }
+
+ fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
+ *self.is_none = true;
+ Err(Error::unsupported_none())
+ }
+
+ fn serialize_some<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ ValueSerializer::with_style(self.dst, self.style).serialize_some(value)
+ }
+
+ fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_unit()
+ }
+
+ fn serialize_unit_struct(self, name: &'static str) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_unit_struct(name)
+ }
+
+ fn serialize_unit_variant(
+ self,
+ name: &'static str,
+ variant_index: u32,
+ variant: &'static str,
+ ) -> Result<Self::Ok, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_unit_variant(
+ name,
+ variant_index,
+ variant,
+ )
+ }
+
+ fn serialize_newtype_struct<T>(
+ self,
+ _name: &'static str,
+ value: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ value.serialize(self)
+ }
+
+ fn serialize_newtype_variant<T>(
+ self,
+ name: &'static str,
+ variant_index: u32,
+ variant: &'static str,
+ value: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ ValueSerializer::with_style(self.dst, self.style).serialize_newtype_variant(
+ name,
+ variant_index,
+ variant,
+ value,
+ )
+ }
+
+ fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_seq(len)
+ }
+
+ fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_tuple(len)
+ }
+
+ fn serialize_tuple_struct(
+ self,
+ name: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeTupleStruct, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_tuple_struct(name, len)
+ }
+
+ fn serialize_tuple_variant(
+ self,
+ name: &'static str,
+ variant_index: u32,
+ variant: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeTupleVariant, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_tuple_variant(
+ name,
+ variant_index,
+ variant,
+ len,
+ )
+ }
+
+ fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_map(len)
+ }
+
+ fn serialize_struct(
+ self,
+ name: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeStruct, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_struct(name, len)
+ }
+
+ fn serialize_struct_variant(
+ self,
+ name: &'static str,
+ variant_index: u32,
+ variant: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeStructVariant, Self::Error> {
+ ValueSerializer::with_style(self.dst, self.style).serialize_struct_variant(
+ name,
+ variant_index,
+ variant,
+ len,
+ )
+ }
+}
+
+pub struct SerializeStructVariant<'d> {
+ inner: SerializeTable<'d>,
+}
+
+impl<'d> SerializeStructVariant<'d> {
+ pub(crate) fn struct_(
+ dst: &'d mut String,
+ variant: &'static str,
+ _len: usize,
+ style: Style,
+ ) -> Result<Self, Error> {
+ dst.open_inline_table()?;
+ dst.space()?;
+ dst.key(variant)?;
+ dst.space()?;
+ dst.keyval_sep()?;
+ dst.space()?;
+ Ok(Self {
+ inner: SerializeTable::map(dst, style)?,
+ })
+ }
+}
+
+impl<'d> serde_core::ser::SerializeStructVariant for SerializeStructVariant<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+
+ #[inline]
+ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ serde_core::ser::SerializeStruct::serialize_field(&mut self.inner, key, value)
+ }
+
+ #[inline]
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ let dst = serde_core::ser::SerializeStruct::end(self.inner)?;
+ dst.space()?;
+ dst.close_inline_table()?;
+ Ok(dst)
+ }
+}
diff --git a/third_party/rust/toml/src/ser/value/mod.rs b/third_party/rust/toml/src/ser/value/mod.rs
@@ -0,0 +1,294 @@
+mod array;
+mod key;
+mod map;
+
+use toml_writer::TomlWrite as _;
+
+use super::style::Style;
+use super::Error;
+use crate::alloc_prelude::*;
+#[allow(clippy::wildcard_imports)]
+pub(crate) use array::*;
+#[allow(clippy::wildcard_imports)]
+pub(crate) use key::*;
+#[allow(clippy::wildcard_imports)]
+pub(crate) use map::*;
+
+/// Serialization for TOML [values][crate::Value].
+///
+/// This structure implements serialization support for TOML to serialize an
+/// arbitrary type to TOML. Note that the TOML format does not support all
+/// datatypes in Rust, such as enums, tuples, and tuple structs. These types
+/// will generate an error when serialized.
+///
+/// Currently a serializer always writes its output to an in-memory `String`,
+/// which is passed in when creating the serializer itself.
+///
+/// # Examples
+///
+/// ```
+/// use serde::Serialize;
+///
+/// #[derive(Serialize)]
+/// struct Config {
+/// database: Database,
+/// }
+///
+/// #[derive(Serialize)]
+/// struct Database {
+/// ip: String,
+/// port: Vec<u16>,
+/// connection_max: u32,
+/// enabled: bool,
+/// }
+///
+/// let config = Config {
+/// database: Database {
+/// ip: "192.168.1.1".to_string(),
+/// port: vec![8001, 8002, 8003],
+/// connection_max: 5000,
+/// enabled: false,
+/// },
+/// };
+///
+/// let mut value = String::new();
+/// serde::Serialize::serialize(
+/// &config,
+/// toml::ser::ValueSerializer::new(&mut value)
+/// ).unwrap();
+/// println!("{}", value)
+/// ```
+pub struct ValueSerializer<'d> {
+ dst: &'d mut String,
+ style: Style,
+}
+
+impl<'d> ValueSerializer<'d> {
+ /// Creates a new serializer which will emit TOML into the buffer provided.
+ ///
+ /// The serializer can then be used to serialize a type after which the data
+ /// will be present in `dst`.
+ pub fn new(dst: &'d mut String) -> Self {
+ Self {
+ dst,
+ style: Default::default(),
+ }
+ }
+
+ pub(crate) fn with_style(dst: &'d mut String, style: Style) -> Self {
+ Self { dst, style }
+ }
+}
+
+impl<'d> serde_core::ser::Serializer for ValueSerializer<'d> {
+ type Ok = &'d mut String;
+ type Error = Error;
+ type SerializeSeq = SerializeValueArray<'d>;
+ type SerializeTuple = SerializeValueArray<'d>;
+ type SerializeTupleStruct = SerializeValueArray<'d>;
+ type SerializeTupleVariant = SerializeTupleVariant<'d>;
+ type SerializeMap = SerializeMap<'d>;
+ type SerializeStruct = SerializeMap<'d>;
+ type SerializeStructVariant = SerializeStructVariant<'d>;
+
+ fn serialize_bool(self, v: bool) -> Result<Self::Ok, Self::Error> {
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_i8(self, v: i8) -> Result<Self::Ok, Self::Error> {
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_i16(self, v: i16) -> Result<Self::Ok, Self::Error> {
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_i32(self, v: i32) -> Result<Self::Ok, Self::Error> {
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_u8(self, v: u8) -> Result<Self::Ok, Self::Error> {
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_u16(self, v: u16) -> Result<Self::Ok, Self::Error> {
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_u32(self, v: u32) -> Result<Self::Ok, Self::Error> {
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_u64(self, v: u64) -> Result<Self::Ok, Self::Error> {
+ let v: i64 = v
+ .try_into()
+ .map_err(|_err| Error::out_of_range(Some("u64")))?;
+ self.serialize_i64(v)
+ }
+
+ fn serialize_f32(self, mut v: f32) -> Result<Self::Ok, Self::Error> {
+ // Discard sign of NaN when serialized using Serde.
+ //
+ // In all likelihood the sign of NaNs is not meaningful in the user's
+ // program. Ending up with `-nan` in the TOML document would usually be
+ // surprising and undesirable, when the sign of the NaN was not
+ // intentionally controlled by the caller, or may even be
+ // nondeterministic if it comes from arithmetic operations or a cast.
+ if v.is_nan() {
+ v = v.copysign(1.0);
+ }
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_f64(self, mut v: f64) -> Result<Self::Ok, Self::Error> {
+ // Discard sign of NaN when serialized using Serde.
+ //
+ // In all likelihood the sign of NaNs is not meaningful in the user's
+ // program. Ending up with `-nan` in the TOML document would usually be
+ // surprising and undesirable, when the sign of the NaN was not
+ // intentionally controlled by the caller, or may even be
+ // nondeterministic if it comes from arithmetic operations or a cast.
+ if v.is_nan() {
+ v = v.copysign(1.0);
+ }
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_char(self, v: char) -> Result<Self::Ok, Self::Error> {
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_str(self, v: &str) -> Result<Self::Ok, Self::Error> {
+ self.dst.value(v)?;
+ Ok(self.dst)
+ }
+
+ fn serialize_bytes(self, value: &[u8]) -> Result<Self::Ok, Self::Error> {
+ use serde_core::ser::Serialize;
+ value.serialize(self)
+ }
+
+ fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_none())
+ }
+
+ fn serialize_some<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ value.serialize(self)
+ }
+
+ fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some("unit")))
+ }
+
+ fn serialize_unit_struct(self, name: &'static str) -> Result<Self::Ok, Self::Error> {
+ Err(Error::unsupported_type(Some(name)))
+ }
+
+ fn serialize_unit_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ ) -> Result<Self::Ok, Self::Error> {
+ self.serialize_str(variant)
+ }
+
+ fn serialize_newtype_struct<T>(
+ self,
+ _name: &'static str,
+ value: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ value.serialize(self)
+ }
+
+ fn serialize_newtype_variant<T>(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ value: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ self.dst.open_inline_table()?;
+ self.dst.space()?;
+ self.dst.key(variant)?;
+ self.dst.space()?;
+ self.dst.keyval_sep()?;
+ self.dst.space()?;
+ value.serialize(ValueSerializer::with_style(self.dst, self.style))?;
+ self.dst.space()?;
+ self.dst.close_inline_table()?;
+ Ok(self.dst)
+ }
+
+ fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
+ SerializeValueArray::seq(self.dst, self.style, len)
+ }
+
+ fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
+ self.serialize_seq(Some(len))
+ }
+
+ fn serialize_tuple_struct(
+ self,
+ _name: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeTupleStruct, Self::Error> {
+ self.serialize_seq(Some(len))
+ }
+
+ fn serialize_tuple_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeTupleVariant, Self::Error> {
+ SerializeTupleVariant::tuple(self.dst, variant, len, self.style)
+ }
+
+ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
+ SerializeMap::map(self.dst, self.style)
+ }
+
+ fn serialize_struct(
+ self,
+ name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStruct, Self::Error> {
+ SerializeMap::struct_(name, self.dst, self.style)
+ }
+
+ fn serialize_struct_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeStructVariant, Self::Error> {
+ SerializeStructVariant::struct_(self.dst, variant, len, self.style)
+ }
+}
diff --git a/third_party/rust/toml/src/spanned.rs b/third_party/rust/toml/src/spanned.rs
@@ -1,166 +0,0 @@
-use serde::{de, ser};
-use std::borrow::Borrow;
-use std::cmp::Ordering;
-use std::fmt;
-use std::hash::{Hash, Hasher};
-
-pub(crate) const NAME: &str = "$__toml_private_Spanned";
-pub(crate) const START: &str = "$__toml_private_start";
-pub(crate) const END: &str = "$__toml_private_end";
-pub(crate) const VALUE: &str = "$__toml_private_value";
-
-/// A spanned value, indicating the range at which it is defined in the source.
-///
-/// ```
-/// use serde_derive::Deserialize;
-/// use toml::Spanned;
-///
-/// #[derive(Deserialize)]
-/// struct Value {
-/// s: Spanned<String>,
-/// }
-///
-/// let t = "s = \"value\"\n";
-///
-/// let u: Value = toml::from_str(t).unwrap();
-///
-/// assert_eq!(u.s.start(), 4);
-/// assert_eq!(u.s.end(), 11);
-/// assert_eq!(u.s.get_ref(), "value");
-/// assert_eq!(u.s.into_inner(), String::from("value"));
-/// ```
-#[derive(Clone, Debug)]
-pub struct Spanned<T> {
- /// The start range.
- start: usize,
- /// The end range (exclusive).
- end: usize,
- /// The spanned value.
- value: T,
-}
-
-impl<T> Spanned<T> {
- /// Access the start of the span of the contained value.
- pub fn start(&self) -> usize {
- self.start
- }
-
- /// Access the end of the span of the contained value.
- pub fn end(&self) -> usize {
- self.end
- }
-
- /// Get the span of the contained value.
- pub fn span(&self) -> (usize, usize) {
- (self.start, self.end)
- }
-
- /// Consumes the spanned value and returns the contained value.
- pub fn into_inner(self) -> T {
- self.value
- }
-
- /// Returns a reference to the contained value.
- pub fn get_ref(&self) -> &T {
- &self.value
- }
-
- /// Returns a mutable reference to the contained value.
- pub fn get_mut(&mut self) -> &mut T {
- &mut self.value
- }
-}
-
-impl Borrow<str> for Spanned<String> {
- fn borrow(&self) -> &str {
- self.get_ref()
- }
-}
-
-impl<T: PartialEq> PartialEq for Spanned<T> {
- fn eq(&self, other: &Self) -> bool {
- self.value.eq(&other.value)
- }
-}
-
-impl<T: Eq> Eq for Spanned<T> {}
-
-impl<T: Hash> Hash for Spanned<T> {
- fn hash<H: Hasher>(&self, state: &mut H) {
- self.value.hash(state);
- }
-}
-
-impl<T: PartialOrd> PartialOrd for Spanned<T> {
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- self.value.partial_cmp(&other.value)
- }
-}
-
-impl<T: Ord> Ord for Spanned<T> {
- fn cmp(&self, other: &Self) -> Ordering {
- self.value.cmp(&other.value)
- }
-}
-
-impl<'de, T> de::Deserialize<'de> for Spanned<T>
-where
- T: de::Deserialize<'de>,
-{
- fn deserialize<D>(deserializer: D) -> Result<Spanned<T>, D::Error>
- where
- D: de::Deserializer<'de>,
- {
- struct SpannedVisitor<T>(::std::marker::PhantomData<T>);
-
- impl<'de, T> de::Visitor<'de> for SpannedVisitor<T>
- where
- T: de::Deserialize<'de>,
- {
- type Value = Spanned<T>;
-
- fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
- formatter.write_str("a TOML spanned")
- }
-
- fn visit_map<V>(self, mut visitor: V) -> Result<Spanned<T>, V::Error>
- where
- V: de::MapAccess<'de>,
- {
- if visitor.next_key()? != Some(START) {
- return Err(de::Error::custom("spanned start key not found"));
- }
-
- let start: usize = visitor.next_value()?;
-
- if visitor.next_key()? != Some(END) {
- return Err(de::Error::custom("spanned end key not found"));
- }
-
- let end: usize = visitor.next_value()?;
-
- if visitor.next_key()? != Some(VALUE) {
- return Err(de::Error::custom("spanned value key not found"));
- }
-
- let value: T = visitor.next_value()?;
-
- Ok(Spanned { start, end, value })
- }
- }
-
- let visitor = SpannedVisitor(::std::marker::PhantomData);
-
- static FIELDS: [&str; 3] = [START, END, VALUE];
- deserializer.deserialize_struct(NAME, &FIELDS, visitor)
- }
-}
-
-impl<T: ser::Serialize> ser::Serialize for Spanned<T> {
- fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
- where
- S: ser::Serializer,
- {
- self.value.serialize(serializer)
- }
-}
diff --git a/third_party/rust/toml/src/table.rs b/third_party/rust/toml/src/table.rs
@@ -0,0 +1,423 @@
+use serde_core::de;
+use serde_core::ser;
+
+use crate::alloc_prelude::*;
+use crate::map::Map;
+use crate::Value;
+
+/// Type representing a TOML table, payload of the `Value::Table` variant.
+///
+/// By default it entries are stored in
+/// [lexicographic order](https://doc.rust-lang.org/std/primitive.str.html#impl-Ord-for-str)
+/// of the keys. Enable the `preserve_order` feature to store entries in the order they appear in
+/// the source file.
+pub type Table = Map<String, Value>;
+
+impl Table {
+ /// Convert a `T` into `toml::Table`.
+ ///
+ /// This conversion can fail if `T`'s implementation of `Serialize` decides to
+ /// fail, or if `T` contains a map with non-string keys.
+ pub fn try_from<T>(value: T) -> Result<Self, crate::ser::Error>
+ where
+ T: ser::Serialize,
+ {
+ value.serialize(TableSerializer)
+ }
+
+ /// Interpret a `toml::Table` as an instance of type `T`.
+ ///
+ /// This conversion can fail if the structure of the `Table` does not match the structure
+ /// expected by `T`, for example if `T` is a bool which can't be mapped to a `Table`. It can
+ /// also fail if the structure is correct but `T`'s implementation of `Deserialize` decides
+ /// that something is wrong with the data, for example required struct fields are missing from
+ /// the TOML map or some number is too big to fit in the expected primitive type.
+ pub fn try_into<'de, T>(self) -> Result<T, crate::de::Error>
+ where
+ T: de::Deserialize<'de>,
+ {
+ de::Deserialize::deserialize(self)
+ }
+}
+
+#[cfg(feature = "display")]
+impl core::fmt::Display for Table {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ crate::ser::to_string(self)
+ .expect("Unable to represent value as string")
+ .fmt(f)
+ }
+}
+
+#[cfg(feature = "parse")]
+impl core::str::FromStr for Table {
+ type Err = crate::de::Error;
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ crate::from_str(s)
+ }
+}
+impl ser::Serialize for Table {
+ #[inline]
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ use serde_core::ser::SerializeMap;
+ let mut map = serializer.serialize_map(Some(self.len()))?;
+ for (k, v) in self {
+ map.serialize_key(k)?;
+ map.serialize_value(v)?;
+ }
+ map.end()
+ }
+}
+
+impl<'de> de::Deserialize<'de> for Table {
+ #[inline]
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ struct Visitor;
+
+ impl<'de> de::Visitor<'de> for Visitor {
+ type Value = Map<String, Value>;
+
+ fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ formatter.write_str("a map")
+ }
+
+ #[inline]
+ fn visit_unit<E>(self) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ Ok(Map::new())
+ }
+
+ #[inline]
+ fn visit_map<V>(self, mut visitor: V) -> Result<Self::Value, V::Error>
+ where
+ V: de::MapAccess<'de>,
+ {
+ let mut values = Map::new();
+
+ while let Some((key, value)) = visitor.next_entry()? {
+ values.insert(key, value);
+ }
+
+ Ok(values)
+ }
+ }
+
+ deserializer.deserialize_map(Visitor)
+ }
+}
+
+impl<'de> de::Deserializer<'de> for Table {
+ type Error = crate::de::Error;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, crate::de::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ Value::Table(self).deserialize_any(visitor)
+ }
+
+ #[inline]
+ fn deserialize_enum<V>(
+ self,
+ name: &'static str,
+ variants: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, crate::de::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ Value::Table(self).deserialize_enum(name, variants, visitor)
+ }
+
+ // `None` is interpreted as a missing field so be sure to implement `Some`
+ // as a present field.
+ fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, crate::de::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ Value::Table(self).deserialize_option(visitor)
+ }
+
+ fn deserialize_newtype_struct<V>(
+ self,
+ name: &'static str,
+ visitor: V,
+ ) -> Result<V::Value, crate::de::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ Value::Table(self).deserialize_newtype_struct(name, visitor)
+ }
+
+ serde_core::forward_to_deserialize_any! {
+ bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string unit seq
+ bytes byte_buf map unit_struct tuple_struct struct
+ tuple ignored_any identifier
+ }
+}
+
+impl de::IntoDeserializer<'_, crate::de::Error> for Table {
+ type Deserializer = Self;
+
+ fn into_deserializer(self) -> Self {
+ self
+ }
+}
+
+pub(crate) struct TableSerializer;
+
+impl ser::Serializer for TableSerializer {
+ type Ok = Table;
+ type Error = crate::ser::Error;
+
+ type SerializeSeq = ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeTuple = ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeTupleStruct = ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeTupleVariant = ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeMap = SerializeMap;
+ type SerializeStruct = SerializeMap;
+ type SerializeStructVariant = ser::Impossible<Self::Ok, Self::Error>;
+
+ fn serialize_bool(self, _value: bool) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_i8(self, _value: i8) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_i16(self, _value: i16) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_i32(self, _value: i32) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_i64(self, _value: i64) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_u8(self, _value: u8) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_u16(self, _value: u16) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_u32(self, _value: u32) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_u64(self, _value: u64) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_f32(self, _value: f32) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_f64(self, _value: f64) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_char(self, _value: char) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_str(self, _value: &str) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_bytes(self, _value: &[u8]) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_unit(self) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_unit_struct(self, _name: &'static str) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_unit_variant(
+ self,
+ name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ ) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(Some(name)))
+ }
+
+ fn serialize_newtype_struct<T>(
+ self,
+ _name: &'static str,
+ value: &T,
+ ) -> Result<Table, crate::ser::Error>
+ where
+ T: ser::Serialize + ?Sized,
+ {
+ value.serialize(self)
+ }
+
+ fn serialize_newtype_variant<T>(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ value: &T,
+ ) -> Result<Table, crate::ser::Error>
+ where
+ T: ser::Serialize + ?Sized,
+ {
+ let value = value.serialize(crate::value::ValueSerializer)?;
+ let mut table = Table::new();
+ table.insert(variant.to_owned(), value);
+ Ok(table)
+ }
+
+ fn serialize_none(self) -> Result<Table, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_none())
+ }
+
+ fn serialize_some<T>(self, value: &T) -> Result<Table, crate::ser::Error>
+ where
+ T: ser::Serialize + ?Sized,
+ {
+ value.serialize(self)
+ }
+
+ fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(None))
+ }
+
+ fn serialize_tuple_struct(
+ self,
+ name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleStruct, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(Some(name)))
+ }
+
+ fn serialize_tuple_variant(
+ self,
+ name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleVariant, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(Some(name)))
+ }
+
+ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, crate::ser::Error> {
+ Ok(SerializeMap::new())
+ }
+
+ fn serialize_struct(
+ self,
+ _name: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeStruct, crate::ser::Error> {
+ self.serialize_map(Some(len))
+ }
+
+ fn serialize_struct_variant(
+ self,
+ name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStructVariant, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(Some(name)))
+ }
+}
+
+pub(crate) struct SerializeMap {
+ map: Table,
+ next_key: Option<String>,
+}
+
+impl SerializeMap {
+ pub(crate) fn new() -> Self {
+ Self {
+ map: Table::new(),
+ next_key: None,
+ }
+ }
+
+ pub(crate) fn with_capacity(capacity: usize) -> Self {
+ Self {
+ map: Table::with_capacity(capacity),
+ next_key: None,
+ }
+ }
+}
+
+impl ser::SerializeMap for SerializeMap {
+ type Ok = Table;
+ type Error = crate::ser::Error;
+
+ fn serialize_key<T>(&mut self, key: &T) -> Result<(), crate::ser::Error>
+ where
+ T: ser::Serialize + ?Sized,
+ {
+ match Value::try_from(key)? {
+ Value::String(s) => self.next_key = Some(s),
+ _ => return Err(crate::ser::Error::key_not_string()),
+ };
+ Ok(())
+ }
+
+ fn serialize_value<T>(&mut self, value: &T) -> Result<(), crate::ser::Error>
+ where
+ T: ser::Serialize + ?Sized,
+ {
+ let key = self.next_key.take();
+ let key = key.expect("serialize_value called before serialize_key");
+ match Value::try_from(value) {
+ Ok(value) => {
+ self.map.insert(key, value);
+ }
+ Err(crate::ser::Error {
+ inner: crate::ser::ErrorInner::UnsupportedNone,
+ }) => {}
+ Err(e) => return Err(e),
+ }
+ Ok(())
+ }
+
+ fn end(self) -> Result<Table, crate::ser::Error> {
+ Ok(self.map)
+ }
+}
+
+impl ser::SerializeStruct for SerializeMap {
+ type Ok = Table;
+ type Error = crate::ser::Error;
+
+ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), crate::ser::Error>
+ where
+ T: ser::Serialize + ?Sized,
+ {
+ ser::SerializeMap::serialize_key(self, key)?;
+ ser::SerializeMap::serialize_value(self, value)
+ }
+
+ fn end(self) -> Result<Table, crate::ser::Error> {
+ ser::SerializeMap::end(self)
+ }
+}
diff --git a/third_party/rust/toml/src/tokens.rs b/third_party/rust/toml/src/tokens.rs
@@ -1,742 +0,0 @@
-use std::borrow::Cow;
-use std::char;
-use std::str;
-use std::string;
-use std::string::String as StdString;
-
-use self::Token::*;
-
-/// A span, designating a range of bytes where a token is located.
-#[derive(Eq, PartialEq, Debug, Clone, Copy)]
-pub struct Span {
- /// The start of the range.
- pub start: usize,
- /// The end of the range (exclusive).
- pub end: usize,
-}
-
-impl From<Span> for (usize, usize) {
- fn from(Span { start, end }: Span) -> (usize, usize) {
- (start, end)
- }
-}
-
-#[derive(Eq, PartialEq, Debug)]
-pub enum Token<'a> {
- Whitespace(&'a str),
- Newline,
- Comment(&'a str),
-
- Equals,
- Period,
- Comma,
- Colon,
- Plus,
- LeftBrace,
- RightBrace,
- LeftBracket,
- RightBracket,
-
- Keylike(&'a str),
- String {
- src: &'a str,
- val: Cow<'a, str>,
- multiline: bool,
- },
-}
-
-#[derive(Eq, PartialEq, Debug)]
-pub enum Error {
- InvalidCharInString(usize, char),
- InvalidEscape(usize, char),
- InvalidHexEscape(usize, char),
- InvalidEscapeValue(usize, u32),
- NewlineInString(usize),
- Unexpected(usize, char),
- UnterminatedString(usize),
- NewlineInTableKey(usize),
- MultilineStringKey(usize),
- Wanted {
- at: usize,
- expected: &'static str,
- found: &'static str,
- },
-}
-
-#[derive(Clone)]
-pub struct Tokenizer<'a> {
- input: &'a str,
- chars: CrlfFold<'a>,
-}
-
-#[derive(Clone)]
-struct CrlfFold<'a> {
- chars: str::CharIndices<'a>,
-}
-
-#[derive(Debug)]
-enum MaybeString {
- NotEscaped(usize),
- Owned(string::String),
-}
-
-impl<'a> Tokenizer<'a> {
- pub fn new(input: &'a str) -> Tokenizer<'a> {
- let mut t = Tokenizer {
- input,
- chars: CrlfFold {
- chars: input.char_indices(),
- },
- };
- // Eat utf-8 BOM
- t.eatc('\u{feff}');
- t
- }
-
- pub fn next(&mut self) -> Result<Option<(Span, Token<'a>)>, Error> {
- let (start, token) = match self.one() {
- Some((start, '\n')) => (start, Newline),
- Some((start, ' ')) => (start, self.whitespace_token(start)),
- Some((start, '\t')) => (start, self.whitespace_token(start)),
- Some((start, '#')) => (start, self.comment_token(start)),
- Some((start, '=')) => (start, Equals),
- Some((start, '.')) => (start, Period),
- Some((start, ',')) => (start, Comma),
- Some((start, ':')) => (start, Colon),
- Some((start, '+')) => (start, Plus),
- Some((start, '{')) => (start, LeftBrace),
- Some((start, '}')) => (start, RightBrace),
- Some((start, '[')) => (start, LeftBracket),
- Some((start, ']')) => (start, RightBracket),
- Some((start, '\'')) => {
- return self
- .literal_string(start)
- .map(|t| Some((self.step_span(start), t)))
- }
- Some((start, '"')) => {
- return self
- .basic_string(start)
- .map(|t| Some((self.step_span(start), t)))
- }
- Some((start, ch)) if is_keylike(ch) => (start, self.keylike(start)),
-
- Some((start, ch)) => return Err(Error::Unexpected(start, ch)),
- None => return Ok(None),
- };
-
- let span = self.step_span(start);
- Ok(Some((span, token)))
- }
-
- pub fn peek(&mut self) -> Result<Option<(Span, Token<'a>)>, Error> {
- self.clone().next()
- }
-
- pub fn eat(&mut self, expected: Token<'a>) -> Result<bool, Error> {
- self.eat_spanned(expected).map(|s| s.is_some())
- }
-
- /// Eat a value, returning it's span if it was consumed.
- pub fn eat_spanned(&mut self, expected: Token<'a>) -> Result<Option<Span>, Error> {
- let span = match self.peek()? {
- Some((span, ref found)) if expected == *found => span,
- Some(_) => return Ok(None),
- None => return Ok(None),
- };
-
- drop(self.next());
- Ok(Some(span))
- }
-
- pub fn expect(&mut self, expected: Token<'a>) -> Result<(), Error> {
- // ignore span
- let _ = self.expect_spanned(expected)?;
- Ok(())
- }
-
- /// Expect the given token returning its span.
- pub fn expect_spanned(&mut self, expected: Token<'a>) -> Result<Span, Error> {
- let current = self.current();
- match self.next()? {
- Some((span, found)) => {
- if expected == found {
- Ok(span)
- } else {
- Err(Error::Wanted {
- at: current,
- expected: expected.describe(),
- found: found.describe(),
- })
- }
- }
- None => Err(Error::Wanted {
- at: self.input.len(),
- expected: expected.describe(),
- found: "eof",
- }),
- }
- }
-
- pub fn table_key(&mut self) -> Result<(Span, Cow<'a, str>), Error> {
- let current = self.current();
- match self.next()? {
- Some((span, Token::Keylike(k))) => Ok((span, k.into())),
- Some((
- span,
- Token::String {
- src,
- val,
- multiline,
- },
- )) => {
- let offset = self.substr_offset(src);
- if multiline {
- return Err(Error::MultilineStringKey(offset));
- }
- match src.find('\n') {
- None => Ok((span, val)),
- Some(i) => Err(Error::NewlineInTableKey(offset + i)),
- }
- }
- Some((_, other)) => Err(Error::Wanted {
- at: current,
- expected: "a table key",
- found: other.describe(),
- }),
- None => Err(Error::Wanted {
- at: self.input.len(),
- expected: "a table key",
- found: "eof",
- }),
- }
- }
-
- pub fn eat_whitespace(&mut self) -> Result<(), Error> {
- while self.eatc(' ') || self.eatc('\t') {
- // ...
- }
- Ok(())
- }
-
- pub fn eat_comment(&mut self) -> Result<bool, Error> {
- if !self.eatc('#') {
- return Ok(false);
- }
- drop(self.comment_token(0));
- self.eat_newline_or_eof().map(|()| true)
- }
-
- pub fn eat_newline_or_eof(&mut self) -> Result<(), Error> {
- let current = self.current();
- match self.next()? {
- None | Some((_, Token::Newline)) => Ok(()),
- Some((_, other)) => Err(Error::Wanted {
- at: current,
- expected: "newline",
- found: other.describe(),
- }),
- }
- }
-
- pub fn skip_to_newline(&mut self) {
- loop {
- match self.one() {
- Some((_, '\n')) | None => break,
- _ => {}
- }
- }
- }
-
- fn eatc(&mut self, ch: char) -> bool {
- match self.chars.clone().next() {
- Some((_, ch2)) if ch == ch2 => {
- self.one();
- true
- }
- _ => false,
- }
- }
-
- pub fn current(&mut self) -> usize {
- self.chars
- .clone()
- .next()
- .map(|i| i.0)
- .unwrap_or_else(|| self.input.len())
- }
-
- pub fn input(&self) -> &'a str {
- self.input
- }
-
- fn whitespace_token(&mut self, start: usize) -> Token<'a> {
- while self.eatc(' ') || self.eatc('\t') {
- // ...
- }
- Whitespace(&self.input[start..self.current()])
- }
-
- fn comment_token(&mut self, start: usize) -> Token<'a> {
- while let Some((_, ch)) = self.chars.clone().next() {
- if ch != '\t' && !('\u{20}'..='\u{10ffff}').contains(&ch) {
- break;
- }
- self.one();
- }
- Comment(&self.input[start..self.current()])
- }
-
- #[allow(clippy::type_complexity)]
- fn read_string(
- &mut self,
- delim: char,
- start: usize,
- new_ch: &mut dyn FnMut(
- &mut Tokenizer<'_>,
- &mut MaybeString,
- bool,
- usize,
- char,
- ) -> Result<(), Error>,
- ) -> Result<Token<'a>, Error> {
- let mut multiline = false;
- if self.eatc(delim) {
- if self.eatc(delim) {
- multiline = true;
- } else {
- return Ok(String {
- src: &self.input[start..start + 2],
- val: Cow::Borrowed(""),
- multiline: false,
- });
- }
- }
- let mut val = MaybeString::NotEscaped(self.current());
- let mut n = 0;
- 'outer: loop {
- n += 1;
- match self.one() {
- Some((i, '\n')) => {
- if multiline {
- if self.input.as_bytes()[i] == b'\r' {
- val.to_owned(&self.input[..i]);
- }
- if n == 1 {
- val = MaybeString::NotEscaped(self.current());
- } else {
- val.push('\n');
- }
- continue;
- } else {
- return Err(Error::NewlineInString(i));
- }
- }
- Some((mut i, ch)) if ch == delim => {
- if multiline {
- if !self.eatc(delim) {
- val.push(delim);
- continue 'outer;
- }
- if !self.eatc(delim) {
- val.push(delim);
- val.push(delim);
- continue 'outer;
- }
- if self.eatc(delim) {
- val.push(delim);
- i += 1;
- }
- if self.eatc(delim) {
- val.push(delim);
- i += 1;
- }
- }
- return Ok(String {
- src: &self.input[start..self.current()],
- val: val.into_cow(&self.input[..i]),
- multiline,
- });
- }
- Some((i, c)) => new_ch(self, &mut val, multiline, i, c)?,
- None => return Err(Error::UnterminatedString(start)),
- }
- }
- }
-
- fn literal_string(&mut self, start: usize) -> Result<Token<'a>, Error> {
- self.read_string('\'', start, &mut |_me, val, _multi, i, ch| {
- if ch == '\u{09}' || (('\u{20}'..='\u{10ffff}').contains(&ch) && ch != '\u{7f}') {
- val.push(ch);
- Ok(())
- } else {
- Err(Error::InvalidCharInString(i, ch))
- }
- })
- }
-
- fn basic_string(&mut self, start: usize) -> Result<Token<'a>, Error> {
- self.read_string('"', start, &mut |me, val, multi, i, ch| match ch {
- '\\' => {
- val.to_owned(&me.input[..i]);
- match me.chars.next() {
- Some((_, '"')) => val.push('"'),
- Some((_, '\\')) => val.push('\\'),
- Some((_, 'b')) => val.push('\u{8}'),
- Some((_, 'f')) => val.push('\u{c}'),
- Some((_, 'n')) => val.push('\n'),
- Some((_, 'r')) => val.push('\r'),
- Some((_, 't')) => val.push('\t'),
- Some((i, c @ 'u')) | Some((i, c @ 'U')) => {
- let len = if c == 'u' { 4 } else { 8 };
- val.push(me.hex(start, i, len)?);
- }
- Some((i, c @ ' ')) | Some((i, c @ '\t')) | Some((i, c @ '\n')) if multi => {
- if c != '\n' {
- while let Some((_, ch)) = me.chars.clone().next() {
- match ch {
- ' ' | '\t' => {
- me.chars.next();
- continue;
- }
- '\n' => {
- me.chars.next();
- break;
- }
- _ => return Err(Error::InvalidEscape(i, c)),
- }
- }
- }
- while let Some((_, ch)) = me.chars.clone().next() {
- match ch {
- ' ' | '\t' | '\n' => {
- me.chars.next();
- }
- _ => break,
- }
- }
- }
- Some((i, c)) => return Err(Error::InvalidEscape(i, c)),
- None => return Err(Error::UnterminatedString(start)),
- }
- Ok(())
- }
- ch if ch == '\u{09}' || (('\u{20}'..='\u{10ffff}').contains(&ch) && ch != '\u{7f}') => {
- val.push(ch);
- Ok(())
- }
- _ => Err(Error::InvalidCharInString(i, ch)),
- })
- }
-
- fn hex(&mut self, start: usize, i: usize, len: usize) -> Result<char, Error> {
- let mut buf = StdString::with_capacity(len);
- for _ in 0..len {
- match self.one() {
- Some((_, ch)) if ch as u32 <= 0x7F && ch.is_ascii_hexdigit() => buf.push(ch),
- Some((i, ch)) => return Err(Error::InvalidHexEscape(i, ch)),
- None => return Err(Error::UnterminatedString(start)),
- }
- }
- let val = u32::from_str_radix(&buf, 16).unwrap();
- match char::from_u32(val) {
- Some(ch) => Ok(ch),
- None => Err(Error::InvalidEscapeValue(i, val)),
- }
- }
-
- fn keylike(&mut self, start: usize) -> Token<'a> {
- while let Some((_, ch)) = self.peek_one() {
- if !is_keylike(ch) {
- break;
- }
- self.one();
- }
- Keylike(&self.input[start..self.current()])
- }
-
- pub fn substr_offset(&self, s: &'a str) -> usize {
- assert!(s.len() <= self.input.len());
- let a = self.input.as_ptr() as usize;
- let b = s.as_ptr() as usize;
- assert!(a <= b);
- b - a
- }
-
- /// Calculate the span of a single character.
- fn step_span(&mut self, start: usize) -> Span {
- let end = self
- .peek_one()
- .map(|t| t.0)
- .unwrap_or_else(|| self.input.len());
- Span { start, end }
- }
-
- /// Peek one char without consuming it.
- fn peek_one(&mut self) -> Option<(usize, char)> {
- self.chars.clone().next()
- }
-
- /// Take one char.
- pub fn one(&mut self) -> Option<(usize, char)> {
- self.chars.next()
- }
-}
-
-impl<'a> Iterator for CrlfFold<'a> {
- type Item = (usize, char);
-
- fn next(&mut self) -> Option<(usize, char)> {
- self.chars.next().map(|(i, c)| {
- if c == '\r' {
- let mut attempt = self.chars.clone();
- if let Some((_, '\n')) = attempt.next() {
- self.chars = attempt;
- return (i, '\n');
- }
- }
- (i, c)
- })
- }
-}
-
-impl MaybeString {
- fn push(&mut self, ch: char) {
- match *self {
- MaybeString::NotEscaped(..) => {}
- MaybeString::Owned(ref mut s) => s.push(ch),
- }
- }
-
- #[allow(clippy::wrong_self_convention)]
- fn to_owned(&mut self, input: &str) {
- match *self {
- MaybeString::NotEscaped(start) => {
- *self = MaybeString::Owned(input[start..].to_owned());
- }
- MaybeString::Owned(..) => {}
- }
- }
-
- fn into_cow(self, input: &str) -> Cow<'_, str> {
- match self {
- MaybeString::NotEscaped(start) => Cow::Borrowed(&input[start..]),
- MaybeString::Owned(s) => Cow::Owned(s),
- }
- }
-}
-
-fn is_keylike(ch: char) -> bool {
- ('A'..='Z').contains(&ch)
- || ('a'..='z').contains(&ch)
- || ('0'..='9').contains(&ch)
- || ch == '-'
- || ch == '_'
-}
-
-impl<'a> Token<'a> {
- pub fn describe(&self) -> &'static str {
- match *self {
- Token::Keylike(_) => "an identifier",
- Token::Equals => "an equals",
- Token::Period => "a period",
- Token::Comment(_) => "a comment",
- Token::Newline => "a newline",
- Token::Whitespace(_) => "whitespace",
- Token::Comma => "a comma",
- Token::RightBrace => "a right brace",
- Token::LeftBrace => "a left brace",
- Token::RightBracket => "a right bracket",
- Token::LeftBracket => "a left bracket",
- Token::String { multiline, .. } => {
- if multiline {
- "a multiline string"
- } else {
- "a string"
- }
- }
- Token::Colon => "a colon",
- Token::Plus => "a plus",
- }
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::{Error, Token, Tokenizer};
- use std::borrow::Cow;
-
- fn err(input: &str, err: Error) {
- let mut t = Tokenizer::new(input);
- let token = t.next().unwrap_err();
- assert_eq!(token, err);
- assert!(t.next().unwrap().is_none());
- }
-
- #[test]
- fn literal_strings() {
- fn t(input: &str, val: &str, multiline: bool) {
- let mut t = Tokenizer::new(input);
- let (_, token) = t.next().unwrap().unwrap();
- assert_eq!(
- token,
- Token::String {
- src: input,
- val: Cow::Borrowed(val),
- multiline,
- }
- );
- assert!(t.next().unwrap().is_none());
- }
-
- t("''", "", false);
- t("''''''", "", true);
- t("'''\n'''", "", true);
- t("'a'", "a", false);
- t("'\"a'", "\"a", false);
- t("''''a'''", "'a", true);
- t("'''\n'a\n'''", "'a\n", true);
- t("'''a\n'a\r\n'''", "a\n'a\n", true);
- }
-
- #[test]
- fn basic_strings() {
- fn t(input: &str, val: &str, multiline: bool) {
- let mut t = Tokenizer::new(input);
- let (_, token) = t.next().unwrap().unwrap();
- assert_eq!(
- token,
- Token::String {
- src: input,
- val: Cow::Borrowed(val),
- multiline,
- }
- );
- assert!(t.next().unwrap().is_none());
- }
-
- t(r#""""#, "", false);
- t(r#""""""""#, "", true);
- t(r#""a""#, "a", false);
- t(r#""""a""""#, "a", true);
- t(r#""\t""#, "\t", false);
- t(r#""\u0000""#, "\0", false);
- t(r#""\U00000000""#, "\0", false);
- t(r#""\U000A0000""#, "\u{A0000}", false);
- t(r#""\\t""#, "\\t", false);
- t("\"\t\"", "\t", false);
- t("\"\"\"\n\t\"\"\"", "\t", true);
- t("\"\"\"\\\n\"\"\"", "", true);
- t(
- "\"\"\"\\\n \t \t \\\r\n \t \n \t \r\n\"\"\"",
- "",
- true,
- );
- t(r#""\r""#, "\r", false);
- t(r#""\n""#, "\n", false);
- t(r#""\b""#, "\u{8}", false);
- t(r#""a\fa""#, "a\u{c}a", false);
- t(r#""\"a""#, "\"a", false);
- t("\"\"\"\na\"\"\"", "a", true);
- t("\"\"\"\n\"\"\"", "", true);
- t(r#""""a\"""b""""#, "a\"\"\"b", true);
- err(r#""\a"#, Error::InvalidEscape(2, 'a'));
- err("\"\\\n", Error::InvalidEscape(2, '\n'));
- err("\"\\\r\n", Error::InvalidEscape(2, '\n'));
- err("\"\\", Error::UnterminatedString(0));
- err("\"\u{0}", Error::InvalidCharInString(1, '\u{0}'));
- err(r#""\U00""#, Error::InvalidHexEscape(5, '"'));
- err(r#""\U00"#, Error::UnterminatedString(0));
- err(r#""\uD800"#, Error::InvalidEscapeValue(2, 0xd800));
- err(r#""\UFFFFFFFF"#, Error::InvalidEscapeValue(2, 0xffff_ffff));
- }
-
- #[test]
- fn keylike() {
- fn t(input: &str) {
- let mut t = Tokenizer::new(input);
- let (_, token) = t.next().unwrap().unwrap();
- assert_eq!(token, Token::Keylike(input));
- assert!(t.next().unwrap().is_none());
- }
- t("foo");
- t("0bar");
- t("bar0");
- t("1234");
- t("a-b");
- t("a_B");
- t("-_-");
- t("___");
- }
-
- #[test]
- fn all() {
- fn t(input: &str, expected: &[((usize, usize), Token<'_>, &str)]) {
- let mut tokens = Tokenizer::new(input);
- let mut actual: Vec<((usize, usize), Token<'_>, &str)> = Vec::new();
- while let Some((span, token)) = tokens.next().unwrap() {
- actual.push((span.into(), token, &input[span.start..span.end]));
- }
- for (a, b) in actual.iter().zip(expected) {
- assert_eq!(a, b);
- }
- assert_eq!(actual.len(), expected.len());
- }
-
- t(
- " a ",
- &[
- ((0, 1), Token::Whitespace(" "), " "),
- ((1, 2), Token::Keylike("a"), "a"),
- ((2, 3), Token::Whitespace(" "), " "),
- ],
- );
-
- t(
- " a\t [[]] \t [] {} , . =\n# foo \r\n#foo \n ",
- &[
- ((0, 1), Token::Whitespace(" "), " "),
- ((1, 2), Token::Keylike("a"), "a"),
- ((2, 4), Token::Whitespace("\t "), "\t "),
- ((4, 5), Token::LeftBracket, "["),
- ((5, 6), Token::LeftBracket, "["),
- ((6, 7), Token::RightBracket, "]"),
- ((7, 8), Token::RightBracket, "]"),
- ((8, 11), Token::Whitespace(" \t "), " \t "),
- ((11, 12), Token::LeftBracket, "["),
- ((12, 13), Token::RightBracket, "]"),
- ((13, 14), Token::Whitespace(" "), " "),
- ((14, 15), Token::LeftBrace, "{"),
- ((15, 16), Token::RightBrace, "}"),
- ((16, 17), Token::Whitespace(" "), " "),
- ((17, 18), Token::Comma, ","),
- ((18, 19), Token::Whitespace(" "), " "),
- ((19, 20), Token::Period, "."),
- ((20, 21), Token::Whitespace(" "), " "),
- ((21, 22), Token::Equals, "="),
- ((22, 23), Token::Newline, "\n"),
- ((23, 29), Token::Comment("# foo "), "# foo "),
- ((29, 31), Token::Newline, "\r\n"),
- ((31, 36), Token::Comment("#foo "), "#foo "),
- ((36, 37), Token::Newline, "\n"),
- ((37, 38), Token::Whitespace(" "), " "),
- ],
- );
- }
-
- #[test]
- fn bare_cr_bad() {
- err("\r", Error::Unexpected(0, '\r'));
- err("'\n", Error::NewlineInString(1));
- err("'\u{0}", Error::InvalidCharInString(1, '\u{0}'));
- err("'", Error::UnterminatedString(0));
- err("\u{0}", Error::Unexpected(0, '\u{0}'));
- }
-
- #[test]
- fn bad_comment() {
- let mut t = Tokenizer::new("#\u{0}");
- t.next().unwrap().unwrap();
- assert_eq!(t.next(), Err(Error::Unexpected(1, '\u{0}')));
- assert!(t.next().unwrap().is_none());
- }
-}
diff --git a/third_party/rust/toml/src/value.rs b/third_party/rust/toml/src/value.rs
@@ -1,21 +1,27 @@
-//! Definition of a TOML value
+//! Definition of a TOML [value][Value]
-use std::collections::{BTreeMap, HashMap};
-use std::fmt;
-use std::hash::Hash;
-use std::mem::discriminant;
-use std::ops;
-use std::str::FromStr;
-use std::vec;
+use alloc::collections::BTreeMap;
+use alloc::vec;
+use core::fmt;
+use core::hash::Hash;
+use core::mem::discriminant;
+use core::ops;
+#[cfg(feature = "std")]
+use std::collections::HashMap;
-use serde::de;
-use serde::de::IntoDeserializer;
-use serde::ser;
+use serde_core::de;
+use serde_core::de::IntoDeserializer;
+use serde_core::ser;
-use crate::datetime::{self, DatetimeFromString};
-pub use crate::datetime::{Date, Datetime, DatetimeParseError, Offset, Time};
+use crate::alloc_prelude::*;
-pub use crate::map::{Entry, Map};
+pub use toml_datetime::{Date, Datetime, DatetimeParseError, Offset, Time};
+
+/// Type representing a TOML array, payload of the `Value::Array` variant
+pub type Array = Vec<Value>;
+
+#[doc(no_inline)]
+pub use crate::Table;
/// Representation of a TOML value.
#[derive(PartialEq, Clone, Debug)]
@@ -36,25 +42,17 @@ pub enum Value {
Table(Table),
}
-/// Type representing a TOML array, payload of the `Value::Array` variant
-pub type Array = Vec<Value>;
-
-/// Type representing a TOML table, payload of the `Value::Table` variant.
-/// By default it is backed by a BTreeMap, enable the `preserve_order` feature
-/// to use a LinkedHashMap instead.
-pub type Table = Map<String, Value>;
-
impl Value {
/// Convert a `T` into `toml::Value` which is an enum that can represent
/// any valid TOML data.
///
/// This conversion can fail if `T`'s implementation of `Serialize` decides to
/// fail, or if `T` contains a map with non-string keys.
- pub fn try_from<T>(value: T) -> Result<Value, crate::ser::Error>
+ pub fn try_from<T>(value: T) -> Result<Self, crate::ser::Error>
where
T: ser::Serialize,
{
- value.serialize(Serializer)
+ value.serialize(ValueSerializer)
}
/// Interpret a `toml::Value` as an instance of type `T`.
@@ -81,7 +79,7 @@ impl Value {
/// index, for example if the index is a string and `self` is an array or a
/// number. Also returns `None` if the given key does not exist in the map
/// or the given index is not within the bounds of the array.
- pub fn get<I: Index>(&self, index: I) -> Option<&Value> {
+ pub fn get<I: Index>(&self, index: I) -> Option<&Self> {
index.index(self)
}
@@ -93,14 +91,14 @@ impl Value {
/// index, for example if the index is a string and `self` is an array or a
/// number. Also returns `None` if the given key does not exist in the map
/// or the given index is not within the bounds of the array.
- pub fn get_mut<I: Index>(&mut self, index: I) -> Option<&mut Value> {
+ pub fn get_mut<I: Index>(&mut self, index: I) -> Option<&mut Self> {
index.index_mut(self)
}
/// Extracts the integer value if it is an integer.
pub fn as_integer(&self) -> Option<i64> {
match *self {
- Value::Integer(i) => Some(i),
+ Self::Integer(i) => Some(i),
_ => None,
}
}
@@ -113,7 +111,7 @@ impl Value {
/// Extracts the float value if it is a float.
pub fn as_float(&self) -> Option<f64> {
match *self {
- Value::Float(f) => Some(f),
+ Self::Float(f) => Some(f),
_ => None,
}
}
@@ -126,7 +124,7 @@ impl Value {
/// Extracts the boolean value if it is a boolean.
pub fn as_bool(&self) -> Option<bool> {
match *self {
- Value::Boolean(b) => Some(b),
+ Self::Boolean(b) => Some(b),
_ => None,
}
}
@@ -139,7 +137,7 @@ impl Value {
/// Extracts the string of this value if it is a string.
pub fn as_str(&self) -> Option<&str> {
match *self {
- Value::String(ref s) => Some(&**s),
+ Self::String(ref s) => Some(&**s),
_ => None,
}
}
@@ -159,7 +157,7 @@ impl Value {
/// ```
pub fn as_datetime(&self) -> Option<&Datetime> {
match *self {
- Value::Datetime(ref s) => Some(s),
+ Self::Datetime(ref s) => Some(s),
_ => None,
}
}
@@ -170,17 +168,17 @@ impl Value {
}
/// Extracts the array value if it is an array.
- pub fn as_array(&self) -> Option<&Vec<Value>> {
+ pub fn as_array(&self) -> Option<&Vec<Self>> {
match *self {
- Value::Array(ref s) => Some(s),
+ Self::Array(ref s) => Some(s),
_ => None,
}
}
/// Extracts the array value if it is an array.
- pub fn as_array_mut(&mut self) -> Option<&mut Vec<Value>> {
+ pub fn as_array_mut(&mut self) -> Option<&mut Vec<Self>> {
match *self {
- Value::Array(ref mut s) => Some(s),
+ Self::Array(ref mut s) => Some(s),
_ => None,
}
}
@@ -193,7 +191,7 @@ impl Value {
/// Extracts the table value if it is a table.
pub fn as_table(&self) -> Option<&Table> {
match *self {
- Value::Table(ref s) => Some(s),
+ Self::Table(ref s) => Some(s),
_ => None,
}
}
@@ -201,7 +199,7 @@ impl Value {
/// Extracts the table value if it is a table.
pub fn as_table_mut(&mut self) -> Option<&mut Table> {
match *self {
- Value::Table(ref mut s) => Some(s),
+ Self::Table(ref mut s) => Some(s),
_ => None,
}
}
@@ -212,20 +210,20 @@ impl Value {
}
/// Tests whether this and another value have the same type.
- pub fn same_type(&self, other: &Value) -> bool {
+ pub fn same_type(&self, other: &Self) -> bool {
discriminant(self) == discriminant(other)
}
/// Returns a human-readable representation of the type of this value.
pub fn type_str(&self) -> &'static str {
match *self {
- Value::String(..) => "string",
- Value::Integer(..) => "integer",
- Value::Float(..) => "float",
- Value::Boolean(..) => "boolean",
- Value::Datetime(..) => "datetime",
- Value::Array(..) => "array",
- Value::Table(..) => "table",
+ Self::String(..) => "string",
+ Self::Integer(..) => "integer",
+ Self::Float(..) => "float",
+ Self::Boolean(..) => "boolean",
+ Self::Datetime(..) => "datetime",
+ Self::Array(..) => "array",
+ Self::Table(..) => "table",
}
}
}
@@ -234,9 +232,9 @@ impl<I> ops::Index<I> for Value
where
I: Index,
{
- type Output = Value;
+ type Output = Self;
- fn index(&self, index: I) -> &Value {
+ fn index(&self, index: I) -> &Self {
self.get(index).expect("index not found")
}
}
@@ -245,37 +243,38 @@ impl<I> ops::IndexMut<I> for Value
where
I: Index,
{
- fn index_mut(&mut self, index: I) -> &mut Value {
+ fn index_mut(&mut self, index: I) -> &mut Self {
self.get_mut(index).expect("index not found")
}
}
impl<'a> From<&'a str> for Value {
#[inline]
- fn from(val: &'a str) -> Value {
- Value::String(val.to_string())
+ fn from(val: &'a str) -> Self {
+ Self::String(val.to_owned())
}
}
-impl<V: Into<Value>> From<Vec<V>> for Value {
- fn from(val: Vec<V>) -> Value {
- Value::Array(val.into_iter().map(|v| v.into()).collect())
+impl<V: Into<Self>> From<Vec<V>> for Value {
+ fn from(val: Vec<V>) -> Self {
+ Self::Array(val.into_iter().map(|v| v.into()).collect())
}
}
-impl<S: Into<String>, V: Into<Value>> From<BTreeMap<S, V>> for Value {
- fn from(val: BTreeMap<S, V>) -> Value {
+impl<S: Into<String>, V: Into<Self>> From<BTreeMap<S, V>> for Value {
+ fn from(val: BTreeMap<S, V>) -> Self {
let table = val.into_iter().map(|(s, v)| (s.into(), v.into())).collect();
- Value::Table(table)
+ Self::Table(table)
}
}
-impl<S: Into<String> + Hash + Eq, V: Into<Value>> From<HashMap<S, V>> for Value {
- fn from(val: HashMap<S, V>) -> Value {
+#[cfg(feature = "std")]
+impl<S: Into<String> + Hash + Eq, V: Into<Self>> From<HashMap<S, V>> for Value {
+ fn from(val: HashMap<S, V>) -> Self {
let table = val.into_iter().map(|(s, v)| (s.into(), v.into())).collect();
- Value::Table(table)
+ Self::Table(table)
}
}
@@ -323,7 +322,7 @@ pub trait Sealed {}
impl Sealed for usize {}
impl Sealed for str {}
impl Sealed for String {}
-impl<'a, T: Sealed + ?Sized> Sealed for &'a T {}
+impl<T: Sealed + ?Sized> Sealed for &T {}
impl Index for usize {
fn index<'a>(&self, val: &'a Value) -> Option<&'a Value> {
@@ -367,9 +366,9 @@ impl Index for String {
}
}
-impl<'s, T: ?Sized> Index for &'s T
+impl<T> Index for &T
where
- T: Index,
+ T: Index + ?Sized,
{
fn index<'a>(&self, val: &'a Value) -> Option<&'a Value> {
(**self).index(val)
@@ -380,18 +379,24 @@ where
}
}
+#[cfg(feature = "display")]
impl fmt::Display for Value {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- crate::ser::to_string(self)
- .expect("Unable to represent value as string")
- .fmt(f)
+ use serde_core::Serialize as _;
+
+ let mut output = String::new();
+ let serializer = crate::ser::ValueSerializer::new(&mut output);
+ self.serialize(serializer).unwrap();
+ output.fmt(f)
}
}
-impl FromStr for Value {
+#[cfg(feature = "parse")]
+impl core::str::FromStr for Value {
type Err = crate::de::Error;
- fn from_str(s: &str) -> Result<Value, Self::Err> {
- crate::from_str(s)
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ use serde_core::Deserialize as _;
+ Self::deserialize(crate::de::ValueDeserializer::parse(s)?)
}
}
@@ -400,50 +405,20 @@ impl ser::Serialize for Value {
where
S: ser::Serializer,
{
- use serde::ser::SerializeMap;
-
match *self {
- Value::String(ref s) => serializer.serialize_str(s),
- Value::Integer(i) => serializer.serialize_i64(i),
- Value::Float(f) => serializer.serialize_f64(f),
- Value::Boolean(b) => serializer.serialize_bool(b),
- Value::Datetime(ref s) => s.serialize(serializer),
- Value::Array(ref a) => a.serialize(serializer),
- Value::Table(ref t) => {
- let mut map = serializer.serialize_map(Some(t.len()))?;
- // Be sure to visit non-tables first (and also non
- // array-of-tables) as all keys must be emitted first.
- for (k, v) in t {
- if !v.is_table() && !v.is_array()
- || (v
- .as_array()
- .map(|a| !a.iter().any(|v| v.is_table()))
- .unwrap_or(false))
- {
- map.serialize_entry(k, v)?;
- }
- }
- for (k, v) in t {
- if v.as_array()
- .map(|a| a.iter().any(|v| v.is_table()))
- .unwrap_or(false)
- {
- map.serialize_entry(k, v)?;
- }
- }
- for (k, v) in t {
- if v.is_table() {
- map.serialize_entry(k, v)?;
- }
- }
- map.end()
- }
+ Self::String(ref s) => serializer.serialize_str(s),
+ Self::Integer(i) => serializer.serialize_i64(i),
+ Self::Float(f) => serializer.serialize_f64(f),
+ Self::Boolean(b) => serializer.serialize_bool(b),
+ Self::Datetime(ref s) => s.serialize(serializer),
+ Self::Array(ref a) => a.serialize(serializer),
+ Self::Table(ref t) => t.serialize(serializer),
}
}
}
impl<'de> de::Deserialize<'de> for Value {
- fn deserialize<D>(deserializer: D) -> Result<Value, D::Error>
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
@@ -465,7 +440,7 @@ impl<'de> de::Deserialize<'de> for Value {
}
fn visit_u64<E: de::Error>(self, value: u64) -> Result<Value, E> {
- if value <= i64::max_value() as u64 {
+ if i64::try_from(value).is_ok() {
Ok(Value::Integer(value as i64))
} else {
Err(de::Error::custom("u64 value was too large"))
@@ -514,23 +489,20 @@ impl<'de> de::Deserialize<'de> for Value {
where
V: de::MapAccess<'de>,
{
- let mut key = String::new();
- let datetime = visitor.next_key_seed(DatetimeOrTable { key: &mut key })?;
- match datetime {
- Some(true) => {
- let date: DatetimeFromString = visitor.next_value()?;
- return Ok(Value::Datetime(date.value));
+ let key = match toml_datetime::de::VisitMap::next_key_seed(&mut visitor)? {
+ Some(toml_datetime::de::VisitMap::Datetime(datetime)) => {
+ return Ok(Value::Datetime(datetime));
}
- None => return Ok(Value::Table(Map::new())),
- Some(false) => {}
- }
- let mut map = Map::new();
- map.insert(key, visitor.next_value()?);
+ None => return Ok(Value::Table(Table::new())),
+ Some(toml_datetime::de::VisitMap::Key(key)) => key,
+ };
+ let mut map = Table::new();
+ map.insert(key.into_owned(), visitor.next_value()?);
while let Some(key) = visitor.next_key::<String>()? {
- if let Entry::Vacant(vacant) = map.entry(&key) {
+ if let crate::map::Entry::Vacant(vacant) = map.entry(&key) {
vacant.insert(visitor.next_value()?);
} else {
- let msg = format!("duplicate key: `{}`", key);
+ let msg = format!("duplicate key: `{key}`");
return Err(de::Error::custom(msg));
}
}
@@ -542,6 +514,7 @@ impl<'de> de::Deserialize<'de> for Value {
}
}
+// This is wrapped by `Table` and any trait methods implemented here need to be wrapped there.
impl<'de> de::Deserializer<'de> for Value {
type Error = crate::de::Error;
@@ -550,12 +523,12 @@ impl<'de> de::Deserializer<'de> for Value {
V: de::Visitor<'de>,
{
match self {
- Value::Boolean(v) => visitor.visit_bool(v),
- Value::Integer(n) => visitor.visit_i64(n),
- Value::Float(n) => visitor.visit_f64(n),
- Value::String(v) => visitor.visit_string(v),
- Value::Datetime(v) => visitor.visit_string(v.to_string()),
- Value::Array(v) => {
+ Self::Boolean(v) => visitor.visit_bool(v),
+ Self::Integer(n) => visitor.visit_i64(n),
+ Self::Float(n) => visitor.visit_f64(n),
+ Self::String(v) => visitor.visit_string(v),
+ Self::Datetime(v) => visitor.visit_string(v.to_string()),
+ Self::Array(v) => {
let len = v.len();
let mut deserializer = SeqDeserializer::new(v);
let seq = visitor.visit_seq(&mut deserializer)?;
@@ -566,7 +539,7 @@ impl<'de> de::Deserializer<'de> for Value {
Err(de::Error::invalid_length(len, &"fewer elements in array"))
}
}
- Value::Table(v) => {
+ Self::Table(v) => {
let len = v.len();
let mut deserializer = MapDeserializer::new(v);
let map = visitor.visit_map(&mut deserializer)?;
@@ -583,7 +556,7 @@ impl<'de> de::Deserializer<'de> for Value {
#[inline]
fn deserialize_enum<V>(
self,
- _name: &str,
+ _name: &'static str,
_variants: &'static [&'static str],
visitor: V,
) -> Result<V::Value, crate::de::Error>
@@ -591,7 +564,23 @@ impl<'de> de::Deserializer<'de> for Value {
V: de::Visitor<'de>,
{
match self {
- Value::String(variant) => visitor.visit_enum(variant.into_deserializer()),
+ Self::String(variant) => visitor.visit_enum(variant.into_deserializer()),
+ Self::Table(variant) => {
+ if variant.is_empty() {
+ Err(crate::de::Error::custom(
+ "wanted exactly 1 element, found 0 elements",
+ None,
+ ))
+ } else if variant.len() != 1 {
+ Err(crate::de::Error::custom(
+ "wanted exactly 1 element, more than 1 element",
+ None,
+ ))
+ } else {
+ let deserializer = MapDeserializer::new(variant);
+ visitor.visit_enum(deserializer)
+ }
+ }
_ => Err(de::Error::invalid_type(
de::Unexpected::UnitVariant,
&"string only",
@@ -619,20 +608,20 @@ impl<'de> de::Deserializer<'de> for Value {
visitor.visit_newtype_struct(self)
}
- serde::forward_to_deserialize_any! {
+ serde_core::forward_to_deserialize_any! {
bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string unit seq
bytes byte_buf map unit_struct tuple_struct struct
tuple ignored_any identifier
}
}
-struct SeqDeserializer {
+pub(crate) struct SeqDeserializer {
iter: vec::IntoIter<Value>,
}
impl SeqDeserializer {
fn new(vec: Vec<Value>) -> Self {
- SeqDeserializer {
+ Self {
iter: vec.into_iter(),
}
}
@@ -659,14 +648,14 @@ impl<'de> de::SeqAccess<'de> for SeqDeserializer {
}
}
-struct MapDeserializer {
- iter: <Map<String, Value> as IntoIterator>::IntoIter,
+pub(crate) struct MapDeserializer {
+ iter: <Table as IntoIterator>::IntoIter,
value: Option<(String, Value)>,
}
impl MapDeserializer {
- fn new(map: Map<String, Value>) -> Self {
- MapDeserializer {
+ fn new(map: Table) -> Self {
+ Self {
iter: map.into_iter(),
value: None,
}
@@ -698,7 +687,7 @@ impl<'de> de::MapAccess<'de> for MapDeserializer {
None => return Err(de::Error::custom("value is missing")),
};
res.map_err(|mut error| {
- error.add_key_context(&key);
+ error.add_key(key);
error
})
}
@@ -711,7 +700,134 @@ impl<'de> de::MapAccess<'de> for MapDeserializer {
}
}
-impl<'de> de::IntoDeserializer<'de, crate::de::Error> for Value {
+impl<'de> de::EnumAccess<'de> for MapDeserializer {
+ type Error = crate::de::Error;
+ type Variant = MapEnumDeserializer;
+
+ fn variant_seed<V>(mut self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>
+ where
+ V: de::DeserializeSeed<'de>,
+ {
+ use de::Error;
+ let (key, value) = match self.iter.next() {
+ Some(pair) => pair,
+ None => {
+ return Err(Error::custom(
+ "expected table with exactly 1 entry, found empty table",
+ ));
+ }
+ };
+
+ let val = seed.deserialize(key.into_deserializer())?;
+
+ let variant = MapEnumDeserializer::new(value);
+
+ Ok((val, variant))
+ }
+}
+
+/// Deserializes table values into enum variants.
+pub(crate) struct MapEnumDeserializer {
+ value: Value,
+}
+
+impl MapEnumDeserializer {
+ pub(crate) fn new(value: Value) -> Self {
+ Self { value }
+ }
+}
+
+impl<'de> de::VariantAccess<'de> for MapEnumDeserializer {
+ type Error = crate::de::Error;
+
+ fn unit_variant(self) -> Result<(), Self::Error> {
+ use de::Error;
+ match self.value {
+ Value::Array(values) => {
+ if values.is_empty() {
+ Ok(())
+ } else {
+ Err(Error::custom("expected empty array"))
+ }
+ }
+ Value::Table(values) => {
+ if values.is_empty() {
+ Ok(())
+ } else {
+ Err(Error::custom("expected empty table"))
+ }
+ }
+ e => Err(Error::custom(format!(
+ "expected table, found {}",
+ e.type_str()
+ ))),
+ }
+ }
+
+ fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value, Self::Error>
+ where
+ T: de::DeserializeSeed<'de>,
+ {
+ seed.deserialize(self.value.into_deserializer())
+ }
+
+ fn tuple_variant<V>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ use de::Error;
+ match self.value {
+ Value::Array(values) => {
+ if values.len() == len {
+ de::Deserializer::deserialize_seq(values.into_deserializer(), visitor)
+ } else {
+ Err(Error::custom(format!("expected tuple with length {len}")))
+ }
+ }
+ Value::Table(values) => {
+ let tuple_values: Result<Vec<_>, _> = values
+ .into_iter()
+ .enumerate()
+ .map(|(index, (key, value))| match key.parse::<usize>() {
+ Ok(key_index) if key_index == index => Ok(value),
+ Ok(_) | Err(_) => Err(Error::custom(format!(
+ "expected table key `{index}`, but was `{key}`"
+ ))),
+ })
+ .collect();
+ let tuple_values = tuple_values?;
+
+ if tuple_values.len() == len {
+ de::Deserializer::deserialize_seq(tuple_values.into_deserializer(), visitor)
+ } else {
+ Err(Error::custom(format!("expected tuple with length {len}")))
+ }
+ }
+ e => Err(Error::custom(format!(
+ "expected table, found {}",
+ e.type_str()
+ ))),
+ }
+ }
+
+ fn struct_variant<V>(
+ self,
+ fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ de::Deserializer::deserialize_struct(
+ self.value.into_deserializer(),
+ "", // TODO: this should be the variant name
+ fields,
+ visitor,
+ )
+ }
+}
+
+impl IntoDeserializer<'_, crate::de::Error> for Value {
type Deserializer = Self;
fn into_deserializer(self) -> Self {
@@ -719,19 +835,19 @@ impl<'de> de::IntoDeserializer<'de, crate::de::Error> for Value {
}
}
-struct Serializer;
+pub(crate) struct ValueSerializer;
-impl ser::Serializer for Serializer {
+impl ser::Serializer for ValueSerializer {
type Ok = Value;
type Error = crate::ser::Error;
- type SerializeSeq = SerializeVec;
- type SerializeTuple = SerializeVec;
- type SerializeTupleStruct = SerializeVec;
- type SerializeTupleVariant = SerializeVec;
- type SerializeMap = SerializeMap;
- type SerializeStruct = SerializeMap;
- type SerializeStructVariant = ser::Impossible<Value, crate::ser::Error>;
+ type SerializeSeq = ValueSerializeVec;
+ type SerializeTuple = ValueSerializeVec;
+ type SerializeTupleStruct = ValueSerializeVec;
+ type SerializeTupleVariant = ValueSerializeTupleVariant;
+ type SerializeMap = ValueSerializeMap;
+ type SerializeStruct = ValueSerializeMap;
+ type SerializeStructVariant = ValueSerializeStructVariant;
fn serialize_bool(self, value: bool) -> Result<Value, crate::ser::Error> {
Ok(Value::Boolean(value))
@@ -766,7 +882,7 @@ impl ser::Serializer for Serializer {
}
fn serialize_u64(self, value: u64) -> Result<Value, crate::ser::Error> {
- if value <= i64::max_value() as u64 {
+ if i64::try_from(value).is_ok() {
self.serialize_i64(value as i64)
} else {
Err(ser::Error::custom("u64 value was too large"))
@@ -774,10 +890,14 @@ impl ser::Serializer for Serializer {
}
fn serialize_f32(self, value: f32) -> Result<Value, crate::ser::Error> {
- self.serialize_f64(value.into())
+ self.serialize_f64(value as f64)
}
- fn serialize_f64(self, value: f64) -> Result<Value, crate::ser::Error> {
+ fn serialize_f64(self, mut value: f64) -> Result<Value, crate::ser::Error> {
+ // Discard sign of NaN. See ValueSerializer::serialize_f64.
+ if value.is_nan() {
+ value = value.copysign(1.0);
+ }
Ok(Value::Float(value))
}
@@ -797,11 +917,11 @@ impl ser::Serializer for Serializer {
}
fn serialize_unit(self) -> Result<Value, crate::ser::Error> {
- Err(crate::ser::Error::UnsupportedType)
+ Err(crate::ser::Error::unsupported_type(Some("unit")))
}
- fn serialize_unit_struct(self, _name: &'static str) -> Result<Value, crate::ser::Error> {
- Err(crate::ser::Error::UnsupportedType)
+ fn serialize_unit_struct(self, name: &'static str) -> Result<Value, crate::ser::Error> {
+ Err(crate::ser::Error::unsupported_type(Some(name)))
}
fn serialize_unit_variant(
@@ -813,43 +933,46 @@ impl ser::Serializer for Serializer {
self.serialize_str(_variant)
}
- fn serialize_newtype_struct<T: ?Sized>(
+ fn serialize_newtype_struct<T>(
self,
_name: &'static str,
value: &T,
) -> Result<Value, crate::ser::Error>
where
- T: ser::Serialize,
+ T: ser::Serialize + ?Sized,
{
value.serialize(self)
}
- fn serialize_newtype_variant<T: ?Sized>(
+ fn serialize_newtype_variant<T>(
self,
_name: &'static str,
_variant_index: u32,
- _variant: &'static str,
- _value: &T,
+ variant: &'static str,
+ value: &T,
) -> Result<Value, crate::ser::Error>
where
- T: ser::Serialize,
+ T: ser::Serialize + ?Sized,
{
- Err(crate::ser::Error::UnsupportedType)
+ let value = value.serialize(Self)?;
+ let mut table = Table::new();
+ table.insert(variant.to_owned(), value);
+ Ok(table.into())
}
fn serialize_none(self) -> Result<Value, crate::ser::Error> {
- Err(crate::ser::Error::UnsupportedNone)
+ Err(crate::ser::Error::unsupported_none())
}
- fn serialize_some<T: ?Sized>(self, value: &T) -> Result<Value, crate::ser::Error>
+ fn serialize_some<T>(self, value: &T) -> Result<Value, crate::ser::Error>
where
- T: ser::Serialize,
+ T: ser::Serialize + ?Sized,
{
value.serialize(self)
}
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, crate::ser::Error> {
- Ok(SerializeVec {
+ Ok(ValueSerializeVec {
vec: Vec::with_capacity(len.unwrap_or(0)),
})
}
@@ -870,16 +993,15 @@ impl ser::Serializer for Serializer {
self,
_name: &'static str,
_variant_index: u32,
- _variant: &'static str,
+ variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant, crate::ser::Error> {
- self.serialize_seq(Some(len))
+ Ok(ValueSerializeTupleVariant::tuple(variant, len))
}
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, crate::ser::Error> {
- Ok(SerializeMap {
- map: Map::new(),
- next_key: None,
+ Ok(ValueSerializeMap {
+ ser: crate::table::SerializeMap::new(),
})
}
@@ -895,29 +1017,24 @@ impl ser::Serializer for Serializer {
self,
_name: &'static str,
_variant_index: u32,
- _variant: &'static str,
- _len: usize,
+ variant: &'static str,
+ len: usize,
) -> Result<Self::SerializeStructVariant, crate::ser::Error> {
- Err(crate::ser::Error::UnsupportedType)
+ Ok(ValueSerializeStructVariant::struct_(variant, len))
}
}
-struct SerializeVec {
+pub(crate) struct ValueSerializeVec {
vec: Vec<Value>,
}
-struct SerializeMap {
- map: Map<String, Value>,
- next_key: Option<String>,
-}
-
-impl ser::SerializeSeq for SerializeVec {
+impl ser::SerializeSeq for ValueSerializeVec {
type Ok = Value;
type Error = crate::ser::Error;
- fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), crate::ser::Error>
+ fn serialize_element<T>(&mut self, value: &T) -> Result<(), crate::ser::Error>
where
- T: ser::Serialize,
+ T: ser::Serialize + ?Sized,
{
self.vec.push(Value::try_from(value)?);
Ok(())
@@ -928,13 +1045,13 @@ impl ser::SerializeSeq for SerializeVec {
}
}
-impl ser::SerializeTuple for SerializeVec {
+impl ser::SerializeTuple for ValueSerializeVec {
type Ok = Value;
type Error = crate::ser::Error;
- fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), crate::ser::Error>
+ fn serialize_element<T>(&mut self, value: &T) -> Result<(), crate::ser::Error>
where
- T: ser::Serialize,
+ T: ser::Serialize + ?Sized,
{
ser::SerializeSeq::serialize_element(self, value)
}
@@ -944,13 +1061,13 @@ impl ser::SerializeTuple for SerializeVec {
}
}
-impl ser::SerializeTupleStruct for SerializeVec {
+impl ser::SerializeTupleStruct for ValueSerializeVec {
type Ok = Value;
type Error = crate::ser::Error;
- fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), crate::ser::Error>
+ fn serialize_field<T>(&mut self, value: &T) -> Result<(), crate::ser::Error>
where
- T: ser::Serialize,
+ T: ser::Serialize + ?Sized,
{
ser::SerializeSeq::serialize_element(self, value)
}
@@ -960,13 +1077,13 @@ impl ser::SerializeTupleStruct for SerializeVec {
}
}
-impl ser::SerializeTupleVariant for SerializeVec {
+impl ser::SerializeTupleVariant for ValueSerializeVec {
type Ok = Value;
type Error = crate::ser::Error;
- fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), crate::ser::Error>
+ fn serialize_field<T>(&mut self, value: &T) -> Result<(), crate::ser::Error>
where
- T: ser::Serialize,
+ T: ser::Serialize + ?Sized,
{
ser::SerializeSeq::serialize_element(self, value)
}
@@ -976,53 +1093,40 @@ impl ser::SerializeTupleVariant for SerializeVec {
}
}
-impl ser::SerializeMap for SerializeMap {
+pub(crate) struct ValueSerializeMap {
+ ser: crate::table::SerializeMap,
+}
+
+impl ser::SerializeMap for ValueSerializeMap {
type Ok = Value;
type Error = crate::ser::Error;
- fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), crate::ser::Error>
+ fn serialize_key<T>(&mut self, key: &T) -> Result<(), crate::ser::Error>
where
- T: ser::Serialize,
+ T: ser::Serialize + ?Sized,
{
- match Value::try_from(key)? {
- Value::String(s) => self.next_key = Some(s),
- _ => return Err(crate::ser::Error::KeyNotString),
- };
- Ok(())
+ self.ser.serialize_key(key)
}
- fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), crate::ser::Error>
+ fn serialize_value<T>(&mut self, value: &T) -> Result<(), crate::ser::Error>
where
- T: ser::Serialize,
+ T: ser::Serialize + ?Sized,
{
- let key = self.next_key.take();
- let key = key.expect("serialize_value called before serialize_key");
- match Value::try_from(value) {
- Ok(value) => {
- self.map.insert(key, value);
- }
- Err(crate::ser::Error::UnsupportedNone) => {}
- Err(e) => return Err(e),
- }
- Ok(())
+ self.ser.serialize_value(value)
}
fn end(self) -> Result<Value, crate::ser::Error> {
- Ok(Value::Table(self.map))
+ self.ser.end().map(Value::Table)
}
}
-impl ser::SerializeStruct for SerializeMap {
+impl ser::SerializeStruct for ValueSerializeMap {
type Ok = Value;
type Error = crate::ser::Error;
- fn serialize_field<T: ?Sized>(
- &mut self,
- key: &'static str,
- value: &T,
- ) -> Result<(), crate::ser::Error>
+ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), crate::ser::Error>
where
- T: ser::Serialize,
+ T: ser::Serialize + ?Sized,
{
ser::SerializeMap::serialize_key(self, key)?;
ser::SerializeMap::serialize_value(self, value)
@@ -1033,49 +1137,72 @@ impl ser::SerializeStruct for SerializeMap {
}
}
-struct DatetimeOrTable<'a> {
- key: &'a mut String,
-}
+type ValueSerializeTupleVariant = ValueSerializeVariant<ValueSerializeVec>;
+type ValueSerializeStructVariant = ValueSerializeVariant<ValueSerializeMap>;
-impl<'a, 'de> de::DeserializeSeed<'de> for DatetimeOrTable<'a> {
- type Value = bool;
+pub(crate) struct ValueSerializeVariant<T> {
+ variant: &'static str,
+ inner: T,
+}
- fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
- where
- D: de::Deserializer<'de>,
- {
- deserializer.deserialize_any(self)
+impl ValueSerializeVariant<ValueSerializeVec> {
+ pub(crate) fn tuple(variant: &'static str, len: usize) -> Self {
+ Self {
+ variant,
+ inner: ValueSerializeVec {
+ vec: Vec::with_capacity(len),
+ },
+ }
}
}
-impl<'a, 'de> de::Visitor<'de> for DatetimeOrTable<'a> {
- type Value = bool;
-
- fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
- formatter.write_str("a string key")
+impl ValueSerializeVariant<ValueSerializeMap> {
+ pub(crate) fn struct_(variant: &'static str, len: usize) -> Self {
+ Self {
+ variant,
+ inner: ValueSerializeMap {
+ ser: crate::table::SerializeMap::with_capacity(len),
+ },
+ }
}
+}
+
+impl ser::SerializeTupleVariant for ValueSerializeVariant<ValueSerializeVec> {
+ type Ok = Value;
+ type Error = crate::ser::Error;
- fn visit_str<E>(self, s: &str) -> Result<bool, E>
+ fn serialize_field<T>(&mut self, value: &T) -> Result<(), Self::Error>
where
- E: de::Error,
+ T: ser::Serialize + ?Sized,
{
- if s == datetime::FIELD {
- Ok(true)
- } else {
- self.key.push_str(s);
- Ok(false)
- }
+ ser::SerializeSeq::serialize_element(&mut self.inner, value)
}
- fn visit_string<E>(self, s: String) -> Result<bool, E>
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ let inner = ser::SerializeSeq::end(self.inner)?;
+ let mut table = Table::new();
+ table.insert(self.variant.to_owned(), inner);
+ Ok(Value::Table(table))
+ }
+}
+
+impl ser::SerializeStructVariant for ValueSerializeVariant<ValueSerializeMap> {
+ type Ok = Value;
+ type Error = crate::ser::Error;
+
+ #[inline]
+ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error>
where
- E: de::Error,
+ T: ser::Serialize + ?Sized,
{
- if s == datetime::FIELD {
- Ok(true)
- } else {
- *self.key = s;
- Ok(false)
- }
+ ser::SerializeStruct::serialize_field(&mut self.inner, key, value)
+ }
+
+ #[inline]
+ fn end(self) -> Result<Self::Ok, Self::Error> {
+ let inner = ser::SerializeStruct::end(self.inner)?;
+ let mut table = Table::new();
+ table.insert(self.variant.to_owned(), inner);
+ Ok(Value::Table(table))
}
}
diff --git a/third_party/rust/toml/tests/enum_external_deserialize.rs b/third_party/rust/toml/tests/enum_external_deserialize.rs
@@ -1,258 +0,0 @@
-#[macro_use]
-extern crate serde_derive;
-extern crate toml;
-
-#[derive(Debug, Deserialize, PartialEq)]
-struct OuterStruct {
- inner: TheEnum,
-}
-
-#[derive(Debug, Deserialize, PartialEq)]
-enum TheEnum {
- Plain,
- Tuple(i64, bool),
- NewType(String),
- Struct { value: i64 },
-}
-
-#[derive(Debug, Deserialize, PartialEq)]
-struct Val {
- val: TheEnum,
-}
-
-#[derive(Debug, Deserialize, PartialEq)]
-struct Multi {
- enums: Vec<TheEnum>,
-}
-
-#[test]
-fn invalid_variant_returns_error_with_good_message_string() {
- let error = toml::from_str::<TheEnum>("\"NonExistent\"").unwrap_err();
-
- assert_eq!(
- error.to_string(),
- "unknown variant `NonExistent`, expected one of `Plain`, `Tuple`, `NewType`, `Struct`"
- );
-}
-
-#[test]
-fn invalid_variant_returns_error_with_good_message_inline_table() {
- let error = toml::from_str::<TheEnum>("{ NonExistent = {} }").unwrap_err();
- assert_eq!(
- error.to_string(),
- "unknown variant `NonExistent`, expected one of `Plain`, `Tuple`, `NewType`, `Struct`"
- );
-}
-
-#[test]
-fn extra_field_returns_expected_empty_table_error() {
- let error = toml::from_str::<TheEnum>("{ Plain = { extra_field = 404 } }").unwrap_err();
-
- assert_eq!(error.to_string(), "expected empty table");
-}
-
-#[test]
-fn extra_field_returns_expected_empty_table_error_struct_variant() {
- let error = toml::from_str::<TheEnum>("{ Struct = { value = 123, extra_0 = 0, extra_1 = 1 } }")
- .unwrap_err();
-
- assert_eq!(
- error.to_string(),
- r#"unexpected keys in table: `["extra_0", "extra_1"]`, available keys: `["value"]`"#
- );
-}
-
-mod enum_unit {
- use super::*;
-
- #[test]
- fn from_str() {
- assert_eq!(TheEnum::Plain, toml::from_str("\"Plain\"").unwrap());
- }
-
- #[test]
- fn from_inline_table() {
- assert_eq!(TheEnum::Plain, toml::from_str("{ Plain = {} }").unwrap());
- assert_eq!(
- Val {
- val: TheEnum::Plain
- },
- toml::from_str("val = { Plain = {} }").unwrap()
- );
- }
-
- #[test]
- fn from_dotted_table() {
- assert_eq!(TheEnum::Plain, toml::from_str("[Plain]\n").unwrap());
- }
-}
-
-mod enum_tuple {
- use super::*;
-
- #[test]
- fn from_inline_table() {
- assert_eq!(
- TheEnum::Tuple(-123, true),
- toml::from_str("{ Tuple = { 0 = -123, 1 = true } }").unwrap()
- );
- assert_eq!(
- Val {
- val: TheEnum::Tuple(-123, true)
- },
- toml::from_str("val = { Tuple = { 0 = -123, 1 = true } }").unwrap()
- );
- }
-
- #[test]
- fn from_dotted_table() {
- assert_eq!(
- TheEnum::Tuple(-123, true),
- toml::from_str(
- r#"[Tuple]
- 0 = -123
- 1 = true
- "#
- )
- .unwrap()
- );
- }
-}
-
-mod enum_newtype {
- use super::*;
-
- #[test]
- fn from_inline_table() {
- assert_eq!(
- TheEnum::NewType("value".to_string()),
- toml::from_str(r#"{ NewType = "value" }"#).unwrap()
- );
- assert_eq!(
- Val {
- val: TheEnum::NewType("value".to_string()),
- },
- toml::from_str(r#"val = { NewType = "value" }"#).unwrap()
- );
- }
-
- #[test]
- #[ignore = "Unimplemented: https://github.com/alexcrichton/toml-rs/pull/264#issuecomment-431707209"]
- fn from_dotted_table() {
- assert_eq!(
- TheEnum::NewType("value".to_string()),
- toml::from_str(r#"NewType = "value""#).unwrap()
- );
- assert_eq!(
- Val {
- val: TheEnum::NewType("value".to_string()),
- },
- toml::from_str(
- r#"[val]
- NewType = "value"
- "#
- )
- .unwrap()
- );
- }
-}
-
-mod enum_struct {
- use super::*;
-
- #[test]
- fn from_inline_table() {
- assert_eq!(
- TheEnum::Struct { value: -123 },
- toml::from_str("{ Struct = { value = -123 } }").unwrap()
- );
- assert_eq!(
- Val {
- val: TheEnum::Struct { value: -123 }
- },
- toml::from_str("val = { Struct = { value = -123 } }").unwrap()
- );
- }
-
- #[test]
- fn from_dotted_table() {
- assert_eq!(
- TheEnum::Struct { value: -123 },
- toml::from_str(
- r#"[Struct]
- value = -123
- "#
- )
- .unwrap()
- );
- }
-
- #[test]
- fn from_nested_dotted_table() {
- assert_eq!(
- OuterStruct {
- inner: TheEnum::Struct { value: -123 }
- },
- toml::from_str(
- r#"[inner.Struct]
- value = -123
- "#
- )
- .unwrap()
- );
- }
-}
-
-mod enum_array {
- use super::*;
-
- #[test]
- fn from_inline_tables() {
- let toml_str = r#"
- enums = [
- { Plain = {} },
- { Tuple = { 0 = -123, 1 = true } },
- { NewType = "value" },
- { Struct = { value = -123 } }
- ]"#;
- assert_eq!(
- Multi {
- enums: vec![
- TheEnum::Plain,
- TheEnum::Tuple(-123, true),
- TheEnum::NewType("value".to_string()),
- TheEnum::Struct { value: -123 },
- ]
- },
- toml::from_str(toml_str).unwrap()
- );
- }
-
- #[test]
- #[ignore = "Unimplemented: https://github.com/alexcrichton/toml-rs/pull/264#issuecomment-431707209"]
- fn from_dotted_table() {
- let toml_str = r#"[[enums]]
- Plain = {}
-
- [[enums]]
- Tuple = { 0 = -123, 1 = true }
-
- [[enums]]
- NewType = "value"
-
- [[enums]]
- Struct = { value = -123 }
- "#;
- assert_eq!(
- Multi {
- enums: vec![
- TheEnum::Plain,
- TheEnum::Tuple(-123, true),
- TheEnum::NewType("value".to_string()),
- TheEnum::Struct { value: -123 },
- ]
- },
- toml::from_str(toml_str).unwrap()
- );
- }
-}
diff --git a/third_party/rust/toml_datetime/.cargo-checksum.json b/third_party/rust/toml_datetime/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.lock":"3d5b5ce9526a741ceeae2406927282054a13c46b4724ab023f476e0f9820f1d8","Cargo.toml":"07414af0ba5a043a6b0dcdef69913ad90f8ca9b2b3bb29121aacff93c531d168","LICENSE-APACHE":"c6596eb7be8581c18be736c846fb9173b69eccf6ef94c5135893ec56bd92ba08","LICENSE-MIT":"6efb0476a1cc085077ed49357026d8c173bf33017278ef440f222fb9cbcb66e6","README.md":"052409149adfc4473429e3fdc3f45913186d3d37bc46bc645cb7f72c5a31efbb","src/datetime.rs":"066dc7b5362332e71dde4c8c2300795c9f8161c28240134d31d6a5cd63c509de","src/de.rs":"6388e9460d15c2a3b5818298396cfa13af4376c7e578a95aedd1eef431411cba","src/lib.rs":"bd75aa36e53f5a870d0605c0850d16e6a6ebda0bee1006a484c4e7fdd949eee3","src/ser.rs":"77e03075bf1c2cf1b31a58277e3308b8f00d2eb4268b0c6b538bfbb7ddeed4ba"},"package":"f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533"}
+\ No newline at end of file
diff --git a/third_party/rust/toml_datetime/Cargo.lock b/third_party/rust/toml_datetime/Cargo.lock
@@ -0,0 +1,254 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "anstream"
+version = "0.6.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192"
+dependencies = [
+ "anstyle",
+ "anstyle-parse",
+ "anstyle-query",
+ "anstyle-wincon",
+ "colorchoice",
+ "is_terminal_polyfill",
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
+
+[[package]]
+name = "anstyle-parse"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
+dependencies = [
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle-query"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2"
+dependencies = [
+ "windows-sys",
+]
+
+[[package]]
+name = "anstyle-wincon"
+version = "3.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a"
+dependencies = [
+ "anstyle",
+ "once_cell_polyfill",
+ "windows-sys",
+]
+
+[[package]]
+name = "colorchoice"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
+
+[[package]]
+name = "is_terminal_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
+
+[[package]]
+name = "normalize-line-endings"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
+
+[[package]]
+name = "once_cell_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.101"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "serde_core"
+version = "1.0.225"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "659356f9a0cb1e529b24c01e43ad2bdf520ec4ceaf83047b83ddcc2251f96383"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.225"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ea936adf78b1f766949a4977b91d2f5595825bd6ec079aa9543ad2685fc4516"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "similar"
+version = "2.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa"
+
+[[package]]
+name = "snapbox"
+version = "0.6.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96dcfc4581e3355d70ac2ee14cfdf81dce3d85c85f1ed9e2c1d3013f53b3436b"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "normalize-line-endings",
+ "similar",
+ "snapbox-macros",
+]
+
+[[package]]
+name = "snapbox-macros"
+version = "0.3.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "16569f53ca23a41bb6f62e0a5084aa1661f4814a67fa33696a79073e03a664af"
+dependencies = [
+ "anstream",
+]
+
+[[package]]
+name = "syn"
+version = "2.0.106"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "toml_datetime"
+version = "0.7.3"
+dependencies = [
+ "serde_core",
+ "snapbox",
+]
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d"
+
+[[package]]
+name = "utf8parse"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
+
+[[package]]
+name = "windows-link"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
+
+[[package]]
+name = "windows-sys"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.53.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91"
+dependencies = [
+ "windows-link",
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
diff --git a/third_party/rust/toml_datetime/Cargo.toml b/third_party/rust/toml_datetime/Cargo.toml
@@ -0,0 +1,182 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.76"
+name = "toml_datetime"
+version = "0.7.3"
+build = false
+include = [
+ "build.rs",
+ "src/**/*",
+ "Cargo.toml",
+ "Cargo.lock",
+ "LICENSE*",
+ "README.md",
+ "examples/**/*",
+]
+autolib = false
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
+description = "A TOML-compatible datetime type"
+readme = "README.md"
+keywords = [
+ "encoding",
+ "toml",
+ "no_std",
+]
+categories = [
+ "encoding",
+ "parser-implementations",
+ "parsing",
+ "config",
+]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/toml-rs/toml"
+
+[package.metadata.docs.rs]
+all-features = true
+rustdoc-args = ["--generate-link-to-definition"]
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "Unreleased"
+replace = "{{version}}"
+min = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = '\.\.\.HEAD'
+replace = "...{{tag_name}}"
+exactly = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "ReleaseDate"
+replace = "{{date}}"
+min = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "<!-- next-header -->"
+replace = """
+<!-- next-header -->
+## [Unreleased] - ReleaseDate
+"""
+exactly = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "<!-- next-url -->"
+replace = """
+<!-- next-url -->
+[Unreleased]: https://github.com/toml-rs/toml/compare/{{tag_name}}...HEAD"""
+exactly = 1
+
+[features]
+alloc = ["serde_core?/alloc"]
+default = ["std"]
+serde = ["dep:serde_core"]
+std = [
+ "alloc",
+ "serde_core?/std",
+]
+
+[lib]
+name = "toml_datetime"
+path = "src/lib.rs"
+
+[dependencies.serde_core]
+version = "1.0.225"
+optional = true
+default-features = false
+
+[dev-dependencies.snapbox]
+version = "0.6.21"
+
+[lints.clippy]
+bool_assert_comparison = "allow"
+branches_sharing_code = "allow"
+checked_conversions = "warn"
+collapsible_else_if = "allow"
+create_dir = "warn"
+dbg_macro = "warn"
+debug_assert_with_mut_call = "warn"
+doc_markdown = "warn"
+empty_enum = "warn"
+enum_glob_use = "warn"
+expl_impl_clone_on_copy = "warn"
+explicit_deref_methods = "warn"
+explicit_into_iter_loop = "warn"
+fallible_impl_from = "warn"
+filter_map_next = "warn"
+flat_map_option = "warn"
+float_cmp_const = "warn"
+fn_params_excessive_bools = "warn"
+from_iter_instead_of_collect = "warn"
+get_first = "allow"
+if_same_then_else = "allow"
+implicit_clone = "warn"
+imprecise_flops = "warn"
+inconsistent_struct_constructor = "warn"
+inefficient_to_string = "warn"
+infinite_loop = "warn"
+invalid_upcast_comparisons = "warn"
+large_digit_groups = "warn"
+large_stack_arrays = "warn"
+large_types_passed_by_value = "warn"
+let_and_return = "allow"
+linkedlist = "warn"
+lossy_float_literal = "warn"
+macro_use_imports = "warn"
+mem_forget = "warn"
+mutex_integer = "warn"
+needless_bool = "allow"
+needless_continue = "allow"
+needless_for_each = "warn"
+negative_feature_names = "warn"
+path_buf_push_overwrite = "warn"
+ptr_as_ptr = "warn"
+rc_mutex = "warn"
+redundant_feature_names = "warn"
+ref_option_ref = "warn"
+rest_pat_in_fully_bound_structs = "warn"
+result_large_err = "allow"
+same_functions_in_if_condition = "warn"
+self_named_module_files = "warn"
+semicolon_if_nothing_returned = "warn"
+str_to_string = "warn"
+string_add = "warn"
+string_add_assign = "warn"
+string_lit_as_bytes = "warn"
+string_to_string = "warn"
+todo = "warn"
+trait_duplication_in_bounds = "warn"
+uninlined_format_args = "warn"
+use_self = "warn"
+verbose_file_reads = "warn"
+wildcard_imports = "warn"
+zero_sized_map_values = "warn"
+
+[lints.rust]
+unnameable_types = "allow"
+unreachable_pub = "warn"
+unsafe_op_in_unsafe_fn = "warn"
+unused_lifetimes = "warn"
+unused_macro_rules = "warn"
+unused_qualifications = "warn"
+
+[lints.rust.rust_2018_idioms]
+level = "warn"
+priority = -1
diff --git a/third_party/rust/toml_datetime/LICENSE-APACHE b/third_party/rust/toml_datetime/LICENSE-APACHE
@@ -0,0 +1,202 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/third_party/rust/toml_datetime/LICENSE-MIT b/third_party/rust/toml_datetime/LICENSE-MIT
@@ -0,0 +1,19 @@
+Copyright (c) Individual contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/rust/toml_datetime/README.md b/third_party/rust/toml_datetime/README.md
@@ -0,0 +1,20 @@
+# toml_datetime
+
+[](https://crates.io/crates/toml_datetime)
+[](https://docs.rs/toml_datetime)
+
+## License
+
+Licensed under either of
+
+* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or <https://www.apache.org/licenses/LICENSE-2.0>)
+* MIT license ([LICENSE-MIT](LICENSE-MIT) or <https://opensource.org/license/mit>)
+
+at your option.
+
+### Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally
+submitted for inclusion in the work by you, as defined in the Apache-2.0
+license, shall be dual-licensed as above, without any additional terms or
+conditions.
diff --git a/third_party/rust/toml_datetime/src/datetime.rs b/third_party/rust/toml_datetime/src/datetime.rs
@@ -0,0 +1,951 @@
+use core::fmt;
+use core::str::{self, FromStr};
+
+/// A parsed TOML datetime value
+///
+/// This structure is intended to represent the datetime primitive type that can
+/// be encoded into TOML documents. This type is a parsed version that contains
+/// all metadata internally.
+///
+/// Currently this type is intentionally conservative and only supports
+/// `to_string` as an accessor. Over time though it's intended that it'll grow
+/// more support!
+///
+/// Note that if you're using `Deserialize` to deserialize a TOML document, you
+/// can use this as a placeholder for where you're expecting a datetime to be
+/// specified.
+///
+/// Also note though that while this type implements `Serialize` and
+/// `Deserialize` it's only recommended to use this type with the TOML format,
+/// otherwise encoded in other formats it may look a little odd.
+///
+/// Depending on how the option values are used, this struct will correspond
+/// with one of the following four datetimes from the [TOML v1.0.0 spec]:
+///
+/// | `date` | `time` | `offset` | TOML type |
+/// | --------- | --------- | --------- | ------------------ |
+/// | `Some(_)` | `Some(_)` | `Some(_)` | [Offset Date-Time] |
+/// | `Some(_)` | `Some(_)` | `None` | [Local Date-Time] |
+/// | `Some(_)` | `None` | `None` | [Local Date] |
+/// | `None` | `Some(_)` | `None` | [Local Time] |
+///
+/// **1. Offset Date-Time**: If all the optional values are used, `Datetime`
+/// corresponds to an [Offset Date-Time]. From the TOML v1.0.0 spec:
+///
+/// > To unambiguously represent a specific instant in time, you may use an
+/// > RFC 3339 formatted date-time with offset.
+/// >
+/// > ```toml
+/// > odt1 = 1979-05-27T07:32:00Z
+/// > odt2 = 1979-05-27T00:32:00-07:00
+/// > odt3 = 1979-05-27T00:32:00.999999-07:00
+/// > ```
+/// >
+/// > For the sake of readability, you may replace the T delimiter between date
+/// > and time with a space character (as permitted by RFC 3339 section 5.6).
+/// >
+/// > ```toml
+/// > odt4 = 1979-05-27 07:32:00Z
+/// > ```
+///
+/// **2. Local Date-Time**: If `date` and `time` are given but `offset` is
+/// `None`, `Datetime` corresponds to a [Local Date-Time]. From the spec:
+///
+/// > If you omit the offset from an RFC 3339 formatted date-time, it will
+/// > represent the given date-time without any relation to an offset or
+/// > timezone. It cannot be converted to an instant in time without additional
+/// > information. Conversion to an instant, if required, is implementation-
+/// > specific.
+/// >
+/// > ```toml
+/// > ldt1 = 1979-05-27T07:32:00
+/// > ldt2 = 1979-05-27T00:32:00.999999
+/// > ```
+///
+/// **3. Local Date**: If only `date` is given, `Datetime` corresponds to a
+/// [Local Date]; see the docs for [`Date`].
+///
+/// **4. Local Time**: If only `time` is given, `Datetime` corresponds to a
+/// [Local Time]; see the docs for [`Time`].
+///
+/// [TOML v1.0.0 spec]: https://toml.io/en/v1.0.0
+/// [Offset Date-Time]: https://toml.io/en/v1.0.0#offset-date-time
+/// [Local Date-Time]: https://toml.io/en/v1.0.0#local-date-time
+/// [Local Date]: https://toml.io/en/v1.0.0#local-date
+/// [Local Time]: https://toml.io/en/v1.0.0#local-time
+#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Debug)]
+pub struct Datetime {
+ /// Optional date.
+ /// Required for: *Offset Date-Time*, *Local Date-Time*, *Local Date*.
+ pub date: Option<Date>,
+
+ /// Optional time.
+ /// Required for: *Offset Date-Time*, *Local Date-Time*, *Local Time*.
+ pub time: Option<Time>,
+
+ /// Optional offset.
+ /// Required for: *Offset Date-Time*.
+ pub offset: Option<Offset>,
+}
+
+// Currently serde itself doesn't have a datetime type, so we map our `Datetime`
+// to a special value in the serde data model. Namely one with these special
+// fields/struct names.
+//
+// In general the TOML encoder/decoder will catch this and not literally emit
+// these strings but rather emit datetimes as they're intended.
+#[cfg(feature = "serde")]
+pub(crate) const FIELD: &str = "$__toml_private_datetime";
+#[cfg(feature = "serde")]
+pub(crate) const NAME: &str = "$__toml_private_Datetime";
+#[cfg(feature = "serde")]
+pub(crate) fn is_datetime(name: &'static str) -> bool {
+ name == NAME
+}
+
+/// A parsed TOML date value
+///
+/// May be part of a [`Datetime`]. Alone, `Date` corresponds to a [Local Date].
+/// From the TOML v1.0.0 spec:
+///
+/// > If you include only the date portion of an RFC 3339 formatted date-time,
+/// > it will represent that entire day without any relation to an offset or
+/// > timezone.
+/// >
+/// > ```toml
+/// > ld1 = 1979-05-27
+/// > ```
+///
+/// [Local Date]: https://toml.io/en/v1.0.0#local-date
+#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Debug)]
+pub struct Date {
+ /// Year: four digits
+ pub year: u16,
+ /// Month: 1 to 12
+ pub month: u8,
+ /// Day: 1 to {28, 29, 30, 31} (based on month/year)
+ pub day: u8,
+}
+
+/// A parsed TOML time value
+///
+/// May be part of a [`Datetime`]. Alone, `Time` corresponds to a [Local Time].
+/// From the TOML v1.0.0 spec:
+///
+/// > If you include only the time portion of an RFC 3339 formatted date-time,
+/// > it will represent that time of day without any relation to a specific
+/// > day or any offset or timezone.
+/// >
+/// > ```toml
+/// > lt1 = 07:32:00
+/// > lt2 = 00:32:00.999999
+/// > ```
+/// >
+/// > Millisecond precision is required. Further precision of fractional
+/// > seconds is implementation-specific. If the value contains greater
+/// > precision than the implementation can support, the additional precision
+/// > must be truncated, not rounded.
+///
+/// [Local Time]: https://toml.io/en/v1.0.0#local-time
+#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Debug)]
+pub struct Time {
+ /// Hour: 0 to 23
+ pub hour: u8,
+ /// Minute: 0 to 59
+ pub minute: u8,
+ /// Second: 0 to {58, 59, 60} (based on leap second rules)
+ pub second: u8,
+ /// Nanosecond: 0 to `999_999_999`
+ pub nanosecond: u32,
+}
+
+/// A parsed TOML time offset
+///
+#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Debug)]
+pub enum Offset {
+ /// > A suffix which, when applied to a time, denotes a UTC offset of 00:00;
+ /// > often spoken "Zulu" from the ICAO phonetic alphabet representation of
+ /// > the letter "Z". --- [RFC 3339 section 2]
+ ///
+ /// [RFC 3339 section 2]: https://datatracker.ietf.org/doc/html/rfc3339#section-2
+ Z,
+
+ /// Offset between local time and UTC
+ Custom {
+ /// Minutes: -`1_440..1_440`
+ minutes: i16,
+ },
+}
+
+impl Datetime {
+ #[cfg(feature = "serde")]
+ fn type_name(&self) -> &'static str {
+ match (
+ self.date.is_some(),
+ self.time.is_some(),
+ self.offset.is_some(),
+ ) {
+ (true, true, true) => "offset datetime",
+ (true, true, false) => "local datetime",
+ (true, false, false) => Date::type_name(),
+ (false, true, false) => Time::type_name(),
+ _ => unreachable!("unsupported datetime combination"),
+ }
+ }
+}
+
+impl Date {
+ #[cfg(feature = "serde")]
+ fn type_name() -> &'static str {
+ "local date"
+ }
+}
+
+impl Time {
+ #[cfg(feature = "serde")]
+ fn type_name() -> &'static str {
+ "local time"
+ }
+}
+
+impl From<Date> for Datetime {
+ fn from(other: Date) -> Self {
+ Self {
+ date: Some(other),
+ time: None,
+ offset: None,
+ }
+ }
+}
+
+impl From<Time> for Datetime {
+ fn from(other: Time) -> Self {
+ Self {
+ date: None,
+ time: Some(other),
+ offset: None,
+ }
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl fmt::Display for Datetime {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if let Some(ref date) = self.date {
+ write!(f, "{date}")?;
+ }
+ if let Some(ref time) = self.time {
+ if self.date.is_some() {
+ write!(f, "T")?;
+ }
+ write!(f, "{time}")?;
+ }
+ if let Some(ref offset) = self.offset {
+ write!(f, "{offset}")?;
+ }
+ Ok(())
+ }
+}
+
+impl fmt::Display for Date {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{:04}-{:02}-{:02}", self.year, self.month, self.day)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl fmt::Display for Time {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{:02}:{:02}:{:02}", self.hour, self.minute, self.second)?;
+ if self.nanosecond != 0 {
+ let s = alloc::format!("{:09}", self.nanosecond);
+ write!(f, ".{}", s.trim_end_matches('0'))?;
+ }
+ Ok(())
+ }
+}
+
+impl fmt::Display for Offset {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Self::Z => write!(f, "Z"),
+ Self::Custom { mut minutes } => {
+ let mut sign = '+';
+ if minutes < 0 {
+ minutes *= -1;
+ sign = '-';
+ }
+ let hours = minutes / 60;
+ let minutes = minutes % 60;
+ write!(f, "{sign}{hours:02}:{minutes:02}")
+ }
+ }
+ }
+}
+
+impl FromStr for Datetime {
+ type Err = DatetimeParseError;
+
+ fn from_str(date: &str) -> Result<Self, DatetimeParseError> {
+ // Accepted formats:
+ //
+ // 0000-00-00T00:00:00.00Z
+ // 0000-00-00T00:00:00.00
+ // 0000-00-00
+ // 00:00:00.00
+ //
+ // ```abnf
+ // ;; Date and Time (as defined in RFC 3339)
+ //
+ // date-time = offset-date-time / local-date-time / local-date / local-time
+ //
+ // date-fullyear = 4DIGIT
+ // date-month = 2DIGIT ; 01-12
+ // date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on month/year
+ // time-delim = "T" / %x20 ; T, t, or space
+ // time-hour = 2DIGIT ; 00-23
+ // time-minute = 2DIGIT ; 00-59
+ // time-second = 2DIGIT ; 00-58, 00-59, 00-60 based on leap second rules
+ // time-secfrac = "." 1*DIGIT
+ // time-numoffset = ( "+" / "-" ) time-hour ":" time-minute
+ // time-offset = "Z" / time-numoffset
+ //
+ // partial-time = time-hour ":" time-minute ":" time-second [ time-secfrac ]
+ // full-date = date-fullyear "-" date-month "-" date-mday
+ // full-time = partial-time time-offset
+ //
+ // ;; Offset Date-Time
+ //
+ // offset-date-time = full-date time-delim full-time
+ //
+ // ;; Local Date-Time
+ //
+ // local-date-time = full-date time-delim partial-time
+ //
+ // ;; Local Date
+ //
+ // local-date = full-date
+ //
+ // ;; Local Time
+ //
+ // local-time = partial-time
+ // ```
+ let mut result = Self {
+ date: None,
+ time: None,
+ offset: None,
+ };
+
+ let mut lexer = Lexer::new(date);
+
+ let digits = lexer
+ .next()
+ .ok_or(DatetimeParseError::new().expected("year or hour"))?;
+ digits
+ .is(TokenKind::Digits)
+ .map_err(|err| err.expected("year or hour"))?;
+ let sep = lexer
+ .next()
+ .ok_or(DatetimeParseError::new().expected("`-` (YYYY-MM) or `:` (HH:MM)"))?;
+ match sep.kind {
+ TokenKind::Dash => {
+ let year = digits;
+ let month = lexer
+ .next()
+ .ok_or_else(|| DatetimeParseError::new().what("date").expected("month"))?;
+ month
+ .is(TokenKind::Digits)
+ .map_err(|err| err.what("date").expected("month"))?;
+ let sep = lexer.next().ok_or(
+ DatetimeParseError::new()
+ .what("date")
+ .expected("`-` (MM-DD)"),
+ )?;
+ sep.is(TokenKind::Dash)
+ .map_err(|err| err.what("date").expected("`-` (MM-DD)"))?;
+ let day = lexer
+ .next()
+ .ok_or(DatetimeParseError::new().what("date").expected("day"))?;
+ day.is(TokenKind::Digits)
+ .map_err(|err| err.what("date").expected("day"))?;
+
+ if year.raw.len() != 4 {
+ return Err(DatetimeParseError::new()
+ .what("date")
+ .expected("a four-digit year (YYYY)"));
+ }
+ if month.raw.len() != 2 {
+ return Err(DatetimeParseError::new()
+ .what("date")
+ .expected("a two-digit month (MM)"));
+ }
+ if day.raw.len() != 2 {
+ return Err(DatetimeParseError::new()
+ .what("date")
+ .expected("a two-digit day (DD)"));
+ }
+ let date = Date {
+ year: year.raw.parse().map_err(|_err| DatetimeParseError::new())?,
+ month: month
+ .raw
+ .parse()
+ .map_err(|_err| DatetimeParseError::new())?,
+ day: day.raw.parse().map_err(|_err| DatetimeParseError::new())?,
+ };
+ if date.month < 1 || date.month > 12 {
+ return Err(DatetimeParseError::new()
+ .what("date")
+ .expected("month between 01 and 12"));
+ }
+ let is_leap_year =
+ (date.year % 4 == 0) && ((date.year % 100 != 0) || (date.year % 400 == 0));
+ let (max_days_in_month, expected_day) = match date.month {
+ 2 if is_leap_year => (29, "day between 01 and 29"),
+ 2 => (28, "day between 01 and 28"),
+ 4 | 6 | 9 | 11 => (30, "day between 01 and 30"),
+ _ => (31, "day between 01 and 31"),
+ };
+ if date.day < 1 || date.day > max_days_in_month {
+ return Err(DatetimeParseError::new()
+ .what("date")
+ .expected(expected_day));
+ }
+
+ result.date = Some(date);
+ }
+ TokenKind::Colon => lexer = Lexer::new(date),
+ _ => {
+ return Err(DatetimeParseError::new().expected("`-` (YYYY-MM) or `:` (HH:MM)"));
+ }
+ }
+
+ // Next parse the "partial-time" if available
+ let partial_time = if result.date.is_some() {
+ let sep = lexer.next();
+ match sep {
+ Some(token) if matches!(token.kind, TokenKind::T | TokenKind::Space) => true,
+ Some(_token) => {
+ return Err(DatetimeParseError::new()
+ .what("date-time")
+ .expected("`T` between date and time"));
+ }
+ None => false,
+ }
+ } else {
+ result.date.is_none()
+ };
+
+ if partial_time {
+ let hour = lexer
+ .next()
+ .ok_or_else(|| DatetimeParseError::new().what("time").expected("hour"))?;
+ hour.is(TokenKind::Digits)
+ .map_err(|err| err.what("time").expected("hour"))?;
+ let sep = lexer.next().ok_or(
+ DatetimeParseError::new()
+ .what("time")
+ .expected("`:` (HH:MM)"),
+ )?;
+ sep.is(TokenKind::Colon)
+ .map_err(|err| err.what("time").expected("`:` (HH:MM)"))?;
+ let minute = lexer
+ .next()
+ .ok_or(DatetimeParseError::new().what("time").expected("minute"))?;
+ minute
+ .is(TokenKind::Digits)
+ .map_err(|err| err.what("time").expected("minute"))?;
+ let sep = lexer.next().ok_or(
+ DatetimeParseError::new()
+ .what("time")
+ .expected("`:` (MM:SS)"),
+ )?;
+ sep.is(TokenKind::Colon)
+ .map_err(|err| err.what("time").expected("`:` (MM:SS)"))?;
+ let second = lexer
+ .next()
+ .ok_or(DatetimeParseError::new().what("time").expected("second"))?;
+ second
+ .is(TokenKind::Digits)
+ .map_err(|err| err.what("time").expected("second"))?;
+
+ let nanosecond = if lexer.clone().next().map(|t| t.kind) == Some(TokenKind::Dot) {
+ let sep = lexer.next().ok_or(DatetimeParseError::new())?;
+ sep.is(TokenKind::Dot)?;
+ let nanosecond = lexer.next().ok_or(
+ DatetimeParseError::new()
+ .what("time")
+ .expected("nanosecond"),
+ )?;
+ nanosecond
+ .is(TokenKind::Digits)
+ .map_err(|err| err.what("time").expected("nanosecond"))?;
+ Some(nanosecond)
+ } else {
+ None
+ };
+
+ if hour.raw.len() != 2 {
+ return Err(DatetimeParseError::new()
+ .what("time")
+ .expected("a two-digit hour (HH)"));
+ }
+ if minute.raw.len() != 2 {
+ return Err(DatetimeParseError::new()
+ .what("time")
+ .expected("a two-digit minute (MM)"));
+ }
+ if second.raw.len() != 2 {
+ return Err(DatetimeParseError::new()
+ .what("time")
+ .expected("a two-digit second (SS)"));
+ }
+
+ let time = Time {
+ hour: hour.raw.parse().map_err(|_err| DatetimeParseError::new())?,
+ minute: minute
+ .raw
+ .parse()
+ .map_err(|_err| DatetimeParseError::new())?,
+ second: second
+ .raw
+ .parse()
+ .map_err(|_err| DatetimeParseError::new())?,
+ nanosecond: nanosecond.map(|t| s_to_nanoseconds(t.raw)).unwrap_or(0),
+ };
+
+ if time.hour > 23 {
+ return Err(DatetimeParseError::new()
+ .what("time")
+ .expected("hour between 00 and 23"));
+ }
+ if time.minute > 59 {
+ return Err(DatetimeParseError::new()
+ .what("time")
+ .expected("minute between 00 and 59"));
+ }
+ // 00-58, 00-59, 00-60 based on leap second rules
+ if time.second > 60 {
+ return Err(DatetimeParseError::new()
+ .what("time")
+ .expected("second between 00 and 60"));
+ }
+ if time.nanosecond > 999_999_999 {
+ return Err(DatetimeParseError::new()
+ .what("time")
+ .expected("nanoseconds overflowed"));
+ }
+
+ result.time = Some(time);
+ }
+
+ // And finally, parse the offset
+ if result.date.is_some() && result.time.is_some() {
+ match lexer.next() {
+ Some(token) if token.kind == TokenKind::Z => {
+ result.offset = Some(Offset::Z);
+ }
+ Some(token) if matches!(token.kind, TokenKind::Plus | TokenKind::Dash) => {
+ let sign = if token.kind == TokenKind::Plus { 1 } else { -1 };
+ let hours = lexer
+ .next()
+ .ok_or(DatetimeParseError::new().what("offset").expected("hour"))?;
+ hours
+ .is(TokenKind::Digits)
+ .map_err(|err| err.what("offset").expected("hour"))?;
+ let sep = lexer.next().ok_or(
+ DatetimeParseError::new()
+ .what("offset")
+ .expected("`:` (HH:MM)"),
+ )?;
+ sep.is(TokenKind::Colon)
+ .map_err(|err| err.what("offset").expected("`:` (HH:MM)"))?;
+ let minutes = lexer
+ .next()
+ .ok_or(DatetimeParseError::new().what("offset").expected("minute"))?;
+ minutes
+ .is(TokenKind::Digits)
+ .map_err(|err| err.what("offset").expected("minute"))?;
+
+ if hours.raw.len() != 2 {
+ return Err(DatetimeParseError::new()
+ .what("offset")
+ .expected("a two-digit hour (HH)"));
+ }
+ if minutes.raw.len() != 2 {
+ return Err(DatetimeParseError::new()
+ .what("offset")
+ .expected("a two-digit minute (MM)"));
+ }
+
+ let hours = hours
+ .raw
+ .parse::<u8>()
+ .map_err(|_err| DatetimeParseError::new())?;
+ let minutes = minutes
+ .raw
+ .parse::<u8>()
+ .map_err(|_err| DatetimeParseError::new())?;
+
+ if hours > 23 {
+ return Err(DatetimeParseError::new()
+ .what("offset")
+ .expected("hours between 00 and 23"));
+ }
+ if minutes > 59 {
+ return Err(DatetimeParseError::new()
+ .what("offset")
+ .expected("minutes between 00 and 59"));
+ }
+
+ let total_minutes = sign * (hours as i16 * 60 + minutes as i16);
+
+ if !((-24 * 60)..=(24 * 60)).contains(&total_minutes) {
+ return Err(DatetimeParseError::new().what("offset"));
+ }
+
+ result.offset = Some(Offset::Custom {
+ minutes: total_minutes,
+ });
+ }
+ Some(_token) => {
+ return Err(DatetimeParseError::new()
+ .what("offset")
+ .expected("`Z`, +OFFSET, -OFFSET"));
+ }
+ None => {}
+ }
+ }
+
+ // Return an error if we didn't hit eof, otherwise return our parsed
+ // date
+ if lexer.unknown().is_some() {
+ return Err(DatetimeParseError::new());
+ }
+
+ Ok(result)
+ }
+}
+
+fn s_to_nanoseconds(input: &str) -> u32 {
+ let mut nanosecond = 0;
+ for (i, byte) in input.bytes().enumerate() {
+ if byte.is_ascii_digit() {
+ if i < 9 {
+ let p = 10_u32.pow(8 - i as u32);
+ nanosecond += p * u32::from(byte - b'0');
+ }
+ } else {
+ panic!("invalid nanoseconds {input:?}");
+ }
+ }
+ nanosecond
+}
+
+#[derive(Copy, Clone)]
+struct Token<'s> {
+ kind: TokenKind,
+ raw: &'s str,
+}
+
+impl Token<'_> {
+ fn is(&self, kind: TokenKind) -> Result<(), DatetimeParseError> {
+ if self.kind == kind {
+ Ok(())
+ } else {
+ Err(DatetimeParseError::new())
+ }
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq)]
+enum TokenKind {
+ Digits,
+ Dash,
+ Colon,
+ Dot,
+ T,
+ Space,
+ Z,
+ Plus,
+ Unknown,
+}
+
+#[derive(Copy, Clone)]
+struct Lexer<'s> {
+ stream: &'s str,
+}
+
+impl<'s> Lexer<'s> {
+ fn new(input: &'s str) -> Self {
+ Self { stream: input }
+ }
+
+ fn unknown(&mut self) -> Option<Token<'s>> {
+ let remaining = self.stream.len();
+ if remaining == 0 {
+ return None;
+ }
+ let raw = self.stream;
+ self.stream = &self.stream[remaining..remaining];
+ Some(Token {
+ kind: TokenKind::Unknown,
+ raw,
+ })
+ }
+}
+
+impl<'s> Iterator for Lexer<'s> {
+ type Item = Token<'s>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let (kind, end) = match self.stream.as_bytes().first()? {
+ b'0'..=b'9' => {
+ let end = self
+ .stream
+ .as_bytes()
+ .iter()
+ .position(|b| !b.is_ascii_digit())
+ .unwrap_or(self.stream.len());
+ (TokenKind::Digits, end)
+ }
+ b'-' => (TokenKind::Dash, 1),
+ b':' => (TokenKind::Colon, 1),
+ b'T' | b't' => (TokenKind::T, 1),
+ b' ' => (TokenKind::Space, 1),
+ b'Z' | b'z' => (TokenKind::Z, 1),
+ b'+' => (TokenKind::Plus, 1),
+ b'.' => (TokenKind::Dot, 1),
+ _ => (TokenKind::Unknown, self.stream.len()),
+ };
+ let (raw, rest) = self.stream.split_at(end);
+ self.stream = rest;
+ Some(Token { kind, raw })
+ }
+}
+
+/// Error returned from parsing a `Datetime` in the `FromStr` implementation.
+#[derive(Debug, Clone)]
+#[non_exhaustive]
+pub struct DatetimeParseError {
+ what: Option<&'static str>,
+ expected: Option<&'static str>,
+}
+
+impl DatetimeParseError {
+ fn new() -> Self {
+ Self {
+ what: None,
+ expected: None,
+ }
+ }
+ fn what(mut self, what: &'static str) -> Self {
+ self.what = Some(what);
+ self
+ }
+ fn expected(mut self, expected: &'static str) -> Self {
+ self.expected = Some(expected);
+ self
+ }
+}
+
+impl fmt::Display for DatetimeParseError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if let Some(what) = self.what {
+ write!(f, "invalid {what}")?;
+ } else {
+ "invalid datetime".fmt(f)?;
+ }
+ if let Some(expected) = self.expected {
+ write!(f, ", expected {expected}")?;
+ }
+ Ok(())
+ }
+}
+
+#[cfg(feature = "std")]
+impl std::error::Error for DatetimeParseError {}
+#[cfg(all(not(feature = "std"), feature = "serde"))]
+impl serde_core::de::StdError for DatetimeParseError {}
+
+#[cfg(feature = "serde")]
+#[cfg(feature = "alloc")]
+impl serde_core::ser::Serialize for Datetime {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde_core::ser::Serializer,
+ {
+ use crate::alloc::string::ToString as _;
+ use serde_core::ser::SerializeStruct;
+
+ let mut s = serializer.serialize_struct(NAME, 1)?;
+ s.serialize_field(FIELD, &self.to_string())?;
+ s.end()
+ }
+}
+
+#[cfg(feature = "serde")]
+#[cfg(feature = "alloc")]
+impl serde_core::ser::Serialize for Date {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde_core::ser::Serializer,
+ {
+ Datetime::from(*self).serialize(serializer)
+ }
+}
+
+#[cfg(feature = "serde")]
+#[cfg(feature = "alloc")]
+impl serde_core::ser::Serialize for Time {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde_core::ser::Serializer,
+ {
+ Datetime::from(*self).serialize(serializer)
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'de> serde_core::de::Deserialize<'de> for Datetime {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde_core::de::Deserializer<'de>,
+ {
+ struct DatetimeVisitor;
+
+ impl<'de> serde_core::de::Visitor<'de> for DatetimeVisitor {
+ type Value = Datetime;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("a TOML datetime")
+ }
+
+ fn visit_map<V>(self, mut visitor: V) -> Result<Datetime, V::Error>
+ where
+ V: serde_core::de::MapAccess<'de>,
+ {
+ let value = visitor.next_key::<DatetimeKey>()?;
+ if value.is_none() {
+ return Err(serde_core::de::Error::custom("datetime key not found"));
+ }
+ let v: DatetimeFromString = visitor.next_value()?;
+ Ok(v.value)
+ }
+ }
+
+ static FIELDS: [&str; 1] = [FIELD];
+ deserializer.deserialize_struct(NAME, &FIELDS, DatetimeVisitor)
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'de> serde_core::de::Deserialize<'de> for Date {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde_core::de::Deserializer<'de>,
+ {
+ match Datetime::deserialize(deserializer)? {
+ Datetime {
+ date: Some(date),
+ time: None,
+ offset: None,
+ } => Ok(date),
+ datetime => Err(serde_core::de::Error::invalid_type(
+ serde_core::de::Unexpected::Other(datetime.type_name()),
+ &Self::type_name(),
+ )),
+ }
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'de> serde_core::de::Deserialize<'de> for Time {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde_core::de::Deserializer<'de>,
+ {
+ match Datetime::deserialize(deserializer)? {
+ Datetime {
+ date: None,
+ time: Some(time),
+ offset: None,
+ } => Ok(time),
+ datetime => Err(serde_core::de::Error::invalid_type(
+ serde_core::de::Unexpected::Other(datetime.type_name()),
+ &Self::type_name(),
+ )),
+ }
+ }
+}
+
+#[cfg(feature = "serde")]
+struct DatetimeKey;
+
+#[cfg(feature = "serde")]
+impl<'de> serde_core::de::Deserialize<'de> for DatetimeKey {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde_core::de::Deserializer<'de>,
+ {
+ struct FieldVisitor;
+
+ impl serde_core::de::Visitor<'_> for FieldVisitor {
+ type Value = ();
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("a valid datetime field")
+ }
+
+ fn visit_str<E>(self, s: &str) -> Result<(), E>
+ where
+ E: serde_core::de::Error,
+ {
+ if s == FIELD {
+ Ok(())
+ } else {
+ Err(serde_core::de::Error::custom(
+ "expected field with custom name",
+ ))
+ }
+ }
+ }
+
+ deserializer.deserialize_identifier(FieldVisitor)?;
+ Ok(Self)
+ }
+}
+
+#[cfg(feature = "serde")]
+pub(crate) struct DatetimeFromString {
+ pub(crate) value: Datetime,
+}
+
+#[cfg(feature = "serde")]
+impl<'de> serde_core::de::Deserialize<'de> for DatetimeFromString {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde_core::de::Deserializer<'de>,
+ {
+ struct Visitor;
+
+ impl serde_core::de::Visitor<'_> for Visitor {
+ type Value = DatetimeFromString;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("string containing a datetime")
+ }
+
+ fn visit_str<E>(self, s: &str) -> Result<DatetimeFromString, E>
+ where
+ E: serde_core::de::Error,
+ {
+ match s.parse() {
+ Ok(date) => Ok(DatetimeFromString { value: date }),
+ Err(e) => Err(serde_core::de::Error::custom(e)),
+ }
+ }
+ }
+
+ deserializer.deserialize_str(Visitor)
+ }
+}
diff --git a/third_party/rust/toml_datetime/src/de.rs b/third_party/rust/toml_datetime/src/de.rs
@@ -0,0 +1,156 @@
+//! Deserialization support for [`Datetime`][crate::Datetime]
+
+use alloc::string::ToString;
+
+use serde_core::de::value::BorrowedStrDeserializer;
+use serde_core::de::IntoDeserializer;
+
+/// Check if deserializing a [`Datetime`][crate::Datetime]
+pub fn is_datetime(name: &'static str) -> bool {
+ crate::datetime::is_datetime(name)
+}
+
+/// Deserializer / format support for emitting [`Datetime`][crate::Datetime]
+pub struct DatetimeDeserializer<E> {
+ date: Option<crate::Datetime>,
+ _error: core::marker::PhantomData<E>,
+}
+
+impl<E> DatetimeDeserializer<E> {
+ /// Create a deserializer to emit [`Datetime`][crate::Datetime]
+ pub fn new(date: crate::Datetime) -> Self {
+ Self {
+ date: Some(date),
+ _error: Default::default(),
+ }
+ }
+}
+
+impl<'de, E> serde_core::de::MapAccess<'de> for DatetimeDeserializer<E>
+where
+ E: serde_core::de::Error,
+{
+ type Error = E;
+
+ fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
+ where
+ K: serde_core::de::DeserializeSeed<'de>,
+ {
+ if self.date.is_some() {
+ seed.deserialize(BorrowedStrDeserializer::new(crate::datetime::FIELD))
+ .map(Some)
+ } else {
+ Ok(None)
+ }
+ }
+
+ fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
+ where
+ V: serde_core::de::DeserializeSeed<'de>,
+ {
+ if let Some(date) = self.date.take() {
+ seed.deserialize(date.to_string().into_deserializer())
+ } else {
+ panic!("next_value_seed called before next_key_seed")
+ }
+ }
+}
+
+/// Integrate [`Datetime`][crate::Datetime] into an untagged deserialize
+#[cfg(feature = "alloc")]
+pub enum VisitMap<'de> {
+ /// The map was deserialized as a [Datetime][crate::Datetime] value
+ Datetime(crate::Datetime),
+ /// The map is of an unknown format and needs further deserialization
+ Key(alloc::borrow::Cow<'de, str>),
+}
+
+impl<'de> VisitMap<'de> {
+ /// Determine the type of the map by deserializing it
+ pub fn next_key_seed<V: serde_core::de::MapAccess<'de>>(
+ visitor: &mut V,
+ ) -> Result<Option<Self>, V::Error> {
+ let mut key = None;
+ let Some(()) = visitor.next_key_seed(DatetimeOrTable::new(&mut key))? else {
+ return Ok(None);
+ };
+ let result = if let Some(key) = key {
+ VisitMap::Key(key)
+ } else {
+ let date: crate::datetime::DatetimeFromString = visitor.next_value()?;
+ VisitMap::Datetime(date.value)
+ };
+ Ok(Some(result))
+ }
+}
+
+struct DatetimeOrTable<'m, 'de> {
+ key: &'m mut Option<alloc::borrow::Cow<'de, str>>,
+}
+
+impl<'m, 'de> DatetimeOrTable<'m, 'de> {
+ fn new(key: &'m mut Option<alloc::borrow::Cow<'de, str>>) -> Self {
+ *key = None;
+ Self { key }
+ }
+}
+
+impl<'de> serde_core::de::DeserializeSeed<'de> for DatetimeOrTable<'_, 'de> {
+ type Value = ();
+
+ fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
+ where
+ D: serde_core::de::Deserializer<'de>,
+ {
+ deserializer.deserialize_any(self)
+ }
+}
+
+impl<'de> serde_core::de::Visitor<'de> for DatetimeOrTable<'_, 'de> {
+ type Value = ();
+
+ fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ formatter.write_str("a string key")
+ }
+
+ fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+ where
+ E: serde_core::de::Error,
+ {
+ if s == crate::datetime::FIELD {
+ *self.key = None;
+ Ok(())
+ } else {
+ use crate::alloc::borrow::ToOwned as _;
+ *self.key = Some(alloc::borrow::Cow::Owned(s.to_owned()));
+ Ok(())
+ }
+ }
+
+ fn visit_borrowed_str<E>(self, s: &'de str) -> Result<Self::Value, E>
+ where
+ E: serde_core::de::Error,
+ {
+ if s == crate::datetime::FIELD {
+ *self.key = None;
+ Ok(())
+ } else {
+ *self.key = Some(alloc::borrow::Cow::Borrowed(s));
+ Ok(())
+ }
+ }
+
+ #[allow(unused_qualifications)]
+ fn visit_string<E>(self, s: alloc::string::String) -> Result<Self::Value, E>
+ where
+ E: serde_core::de::Error,
+ {
+ if s == crate::datetime::FIELD {
+ *self.key = None;
+ Ok(())
+ } else {
+ *self.key = Some(alloc::borrow::Cow::Owned(s));
+ Ok(())
+ }
+ }
+}
diff --git a/third_party/rust/toml_datetime/src/lib.rs b/third_party/rust/toml_datetime/src/lib.rs
@@ -0,0 +1,39 @@
+//! A [TOML]-compatible datetime type
+//!
+//! [TOML]: https://github.com/toml-lang/toml
+
+#![cfg_attr(docsrs, feature(doc_cfg))]
+#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
+#![warn(missing_docs)]
+#![warn(clippy::std_instead_of_core)]
+#![warn(clippy::std_instead_of_alloc)]
+// Makes rustc abort compilation if there are any unsafe blocks in the crate.
+// Presence of this annotation is picked up by tools such as cargo-geiger
+// and lets them ensure that there is indeed no unsafe code as opposed to
+// something they couldn't detect (e.g. unsafe added via macro expansion, etc).
+#![forbid(unsafe_code)]
+#![warn(clippy::print_stderr)]
+#![warn(clippy::print_stdout)]
+
+#[cfg(feature = "alloc")]
+#[allow(unused_extern_crates)]
+extern crate alloc;
+
+mod datetime;
+
+#[cfg(feature = "serde")]
+#[cfg(feature = "alloc")]
+pub mod de;
+#[cfg(feature = "serde")]
+#[cfg(feature = "alloc")]
+pub mod ser;
+
+pub use crate::datetime::Date;
+pub use crate::datetime::Datetime;
+pub use crate::datetime::DatetimeParseError;
+pub use crate::datetime::Offset;
+pub use crate::datetime::Time;
+
+#[doc = include_str!("../README.md")]
+#[cfg(doctest)]
+pub struct ReadmeDoctests;
diff --git a/third_party/rust/toml_datetime/src/ser.rs b/third_party/rust/toml_datetime/src/ser.rs
@@ -0,0 +1,245 @@
+//! Serialization support for [`Datetime`][crate::Datetime]
+
+/// Check if serializing a [`Datetime`][crate::Datetime]
+pub fn is_datetime(name: &'static str) -> bool {
+ crate::datetime::is_datetime(name)
+}
+
+/// See [`DatetimeSerializer`]
+#[derive(Debug)]
+#[non_exhaustive]
+pub enum SerializerError {
+ /// Unsupported datetime format
+ InvalidFormat(crate::DatetimeParseError),
+ /// Unsupported serialization protocol
+ InvalidProtocol,
+}
+
+impl serde_core::ser::Error for SerializerError {
+ fn custom<T>(_msg: T) -> Self
+ where
+ T: core::fmt::Display,
+ {
+ Self::InvalidProtocol
+ }
+}
+
+impl core::fmt::Display for SerializerError {
+ fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ Self::InvalidFormat(e) => e.fmt(formatter),
+ Self::InvalidProtocol => "invalid serialization protocol".fmt(formatter),
+ }
+ }
+}
+
+#[cfg(feature = "std")]
+impl std::error::Error for SerializerError {}
+#[cfg(all(not(feature = "std"), feature = "serde"))]
+impl serde_core::de::StdError for SerializerError {}
+
+/// Serializer / format support for emitting [`Datetime`][crate::Datetime]
+#[derive(Default)]
+pub struct DatetimeSerializer {
+ value: Option<crate::Datetime>,
+}
+
+impl DatetimeSerializer {
+ /// Create a serializer to emit [`Datetime`][crate::Datetime]
+ pub fn new() -> Self {
+ Self { value: None }
+ }
+
+ /// See [`serde_core::ser::SerializeStruct::serialize_field`]
+ pub fn serialize_field<T>(
+ &mut self,
+ key: &'static str,
+ value: &T,
+ ) -> Result<(), SerializerError>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ if key == crate::datetime::FIELD {
+ self.value = Some(value.serialize(DatetimeFieldSerializer::default())?);
+ }
+
+ Ok(())
+ }
+
+ /// See [`serde_core::ser::SerializeStruct::end`]
+ pub fn end(self) -> Result<crate::Datetime, SerializerError> {
+ self.value.ok_or(SerializerError::InvalidProtocol)
+ }
+}
+
+#[derive(Default)]
+struct DatetimeFieldSerializer {}
+
+impl serde_core::ser::Serializer for DatetimeFieldSerializer {
+ type Ok = crate::Datetime;
+ type Error = SerializerError;
+ type SerializeSeq = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeTuple = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeTupleStruct = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeTupleVariant = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeMap = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeStruct = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+ type SerializeStructVariant = serde_core::ser::Impossible<Self::Ok, Self::Error>;
+
+ fn serialize_bool(self, _value: bool) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_i8(self, _value: i8) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_i16(self, _value: i16) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_i32(self, _value: i32) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_i64(self, _value: i64) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_u8(self, _value: u8) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_u16(self, _value: u16) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_u32(self, _value: u32) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_u64(self, _value: u64) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_f32(self, _value: f32) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_f64(self, _value: f64) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_char(self, _value: char) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_str(self, v: &str) -> Result<Self::Ok, Self::Error> {
+ v.parse::<crate::Datetime>()
+ .map_err(SerializerError::InvalidFormat)
+ }
+
+ fn serialize_bytes(self, _value: &[u8]) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_some<T>(self, _value: &T) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_unit_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ ) -> Result<Self::Ok, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_newtype_struct<T>(
+ self,
+ _name: &'static str,
+ _value: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_newtype_variant<T>(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _value: &T,
+ ) -> Result<Self::Ok, Self::Error>
+ where
+ T: serde_core::ser::Serialize + ?Sized,
+ {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_tuple_struct(
+ self,
+ _name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleStruct, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_tuple_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleVariant, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_struct(
+ self,
+ _name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStruct, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+
+ fn serialize_struct_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStructVariant, Self::Error> {
+ Err(SerializerError::InvalidProtocol)
+ }
+}
diff --git a/third_party/rust/toml_parser/.cargo-checksum.json b/third_party/rust/toml_parser/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.lock":"5d3b67250c7d1f51e2aa44e7a8974ea058f5429d7a3c05a74eca6e615c51c299","Cargo.toml":"db95a7817a874ffca0ff946140eb2e75db191b5e49044b69fdc921a1b915c4d2","LICENSE-APACHE":"c6596eb7be8581c18be736c846fb9173b69eccf6ef94c5135893ec56bd92ba08","LICENSE-MIT":"6efb0476a1cc085077ed49357026d8c173bf33017278ef440f222fb9cbcb66e6","README.md":"d570aaa6d3b9e23a91915eaf382a4bfc7b1ac07b6eef5204903f065da755f4d5","src/debug.rs":"925ce2dcd3936244cb5f0316bd51edff5fd0f12ab95932a8b3e668705d257f08","src/decoder/mod.rs":"b8407b1fa8157495c9501d875befaa07d2a01179e6d9db2fda9eb242a34a4059","src/decoder/scalar.rs":"aa8c4a55d41a7578c09c450579a8723097bf43c933003c2bbf4167949904fbf2","src/decoder/string.rs":"7b9ea1bfb15a6e072a39f4e06f3ca9f24ac304caebd92763f5220d57b45ece37","src/decoder/ws.rs":"b0f53c265bef89b2dc1beca8c3f1d83d261468a6f1a25684991b191954cb221d","src/error.rs":"458abec1fc2bc5b8c5edfcfe488783646dbcda1f6a9451c7db4bcb9521c5a113","src/lexer/mod.rs":"729c8d5a2c680d039dbabee4ec0279a4f91ddfcb87214c71728ab41f56b633c9","src/lexer/test.rs":"dd39a1b6e1aef4b73c40e8ade87d9fee0b065c19aca036960c51837e8b31be10","src/lexer/token.rs":"92e72b3b58140d2dd581e8b4dc8cd329c5f5f195c9f6c72be5a08fa0d7b4e251","src/lib.rs":"d26e6723317857205b3c84bfc9c3e79c4f7c7ba0bd69d48b237263a4bebb9674","src/macros.rs":"01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b","src/parser/document.rs":"161bd75d9d7f17b8574fdc1f5d9207d95f3c976dcbe5f5357eb5855a6dcfd73f","src/parser/event.rs":"e3a9a2f377b782b78bdd437c2980a7a01c102c57d29bf5dc586ceed1d7a319d8","src/parser/mod.rs":"7f12b2fd441f9483c226881be8e09835a05951638ee6128ea6fddf5e58fd8921","src/source.rs":"50730ce8a6bf67639cb370f5ec1f091bd90174366ecb92f2f60bdddeba5b0450"},"package":"c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e"}
+\ No newline at end of file
diff --git a/third_party/rust/toml_parser/Cargo.lock b/third_party/rust/toml_parser/Cargo.lock
@@ -0,0 +1,216 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "anstream"
+version = "0.6.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192"
+dependencies = [
+ "anstyle",
+ "anstyle-parse",
+ "anstyle-query",
+ "anstyle-wincon",
+ "colorchoice",
+ "is_terminal_polyfill",
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
+
+[[package]]
+name = "anstyle-parse"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
+dependencies = [
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle-query"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2"
+dependencies = [
+ "windows-sys",
+]
+
+[[package]]
+name = "anstyle-wincon"
+version = "3.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a"
+dependencies = [
+ "anstyle",
+ "once_cell_polyfill",
+ "windows-sys",
+]
+
+[[package]]
+name = "colorchoice"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
+
+[[package]]
+name = "is_terminal_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
+
+[[package]]
+name = "memchr"
+version = "2.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
+
+[[package]]
+name = "normalize-line-endings"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
+
+[[package]]
+name = "once_cell_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
+
+[[package]]
+name = "similar"
+version = "2.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa"
+
+[[package]]
+name = "snapbox"
+version = "0.6.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96dcfc4581e3355d70ac2ee14cfdf81dce3d85c85f1ed9e2c1d3013f53b3436b"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "normalize-line-endings",
+ "similar",
+ "snapbox-macros",
+]
+
+[[package]]
+name = "snapbox-macros"
+version = "0.3.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "16569f53ca23a41bb6f62e0a5084aa1661f4814a67fa33696a79073e03a664af"
+dependencies = [
+ "anstream",
+]
+
+[[package]]
+name = "toml_parser"
+version = "1.0.4"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "snapbox",
+ "winnow",
+]
+
+[[package]]
+name = "utf8parse"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
+
+[[package]]
+name = "windows-link"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
+
+[[package]]
+name = "windows-sys"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.53.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91"
+dependencies = [
+ "windows-link",
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
+
+[[package]]
+name = "winnow"
+version = "0.7.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
+dependencies = [
+ "memchr",
+]
diff --git a/third_party/rust/toml_parser/Cargo.toml b/third_party/rust/toml_parser/Cargo.toml
@@ -0,0 +1,196 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.76"
+name = "toml_parser"
+version = "1.0.4"
+build = false
+include = [
+ "build.rs",
+ "src/**/*",
+ "Cargo.toml",
+ "Cargo.lock",
+ "LICENSE*",
+ "README.md",
+ "examples/**/*",
+]
+autolib = false
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
+description = "Yet another format-preserving TOML parser."
+readme = "README.md"
+keywords = [
+ "encoding",
+ "toml",
+ "no_std",
+]
+categories = [
+ "encoding",
+ "parser-implementations",
+ "parsing",
+ "config",
+]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/toml-rs/toml"
+
+[package.metadata.docs.rs]
+all-features = true
+rustdoc-args = ["--generate-link-to-definition"]
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "Unreleased"
+replace = "{{version}}"
+min = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = '\.\.\.HEAD'
+replace = "...{{tag_name}}"
+exactly = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "ReleaseDate"
+replace = "{{date}}"
+min = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "<!-- next-header -->"
+replace = """
+<!-- next-header -->
+## [Unreleased] - ReleaseDate
+"""
+exactly = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "<!-- next-url -->"
+replace = """
+<!-- next-url -->
+[Unreleased]: https://github.com/toml-rs/toml/compare/{{tag_name}}...HEAD"""
+exactly = 1
+
+[features]
+alloc = []
+debug = [
+ "std",
+ "dep:anstream",
+ "dep:anstyle",
+]
+default = ["std"]
+simd = ["winnow/simd"]
+std = ["alloc"]
+unsafe = []
+
+[lib]
+name = "toml_parser"
+path = "src/lib.rs"
+
+[dependencies.anstream]
+version = "0.6.20"
+optional = true
+
+[dependencies.anstyle]
+version = "1.0.11"
+optional = true
+
+[dependencies.winnow]
+version = "0.7.13"
+default-features = false
+
+[dev-dependencies.anstream]
+version = "0.6.20"
+features = ["test"]
+
+[dev-dependencies.snapbox]
+version = "0.6.21"
+
+[lints.clippy]
+bool_assert_comparison = "allow"
+branches_sharing_code = "allow"
+checked_conversions = "warn"
+collapsible_else_if = "allow"
+create_dir = "warn"
+dbg_macro = "warn"
+debug_assert_with_mut_call = "warn"
+doc_markdown = "warn"
+empty_enum = "warn"
+enum_glob_use = "warn"
+expl_impl_clone_on_copy = "warn"
+explicit_deref_methods = "warn"
+explicit_into_iter_loop = "warn"
+fallible_impl_from = "warn"
+filter_map_next = "warn"
+flat_map_option = "warn"
+float_cmp_const = "warn"
+fn_params_excessive_bools = "warn"
+from_iter_instead_of_collect = "warn"
+get_first = "allow"
+if_same_then_else = "allow"
+implicit_clone = "warn"
+imprecise_flops = "warn"
+inconsistent_struct_constructor = "warn"
+inefficient_to_string = "warn"
+infinite_loop = "warn"
+invalid_upcast_comparisons = "warn"
+large_digit_groups = "warn"
+large_stack_arrays = "warn"
+large_types_passed_by_value = "warn"
+let_and_return = "allow"
+linkedlist = "warn"
+lossy_float_literal = "warn"
+macro_use_imports = "warn"
+mem_forget = "warn"
+mutex_integer = "warn"
+needless_bool = "allow"
+needless_continue = "allow"
+needless_for_each = "warn"
+negative_feature_names = "warn"
+path_buf_push_overwrite = "warn"
+ptr_as_ptr = "warn"
+rc_mutex = "warn"
+redundant_feature_names = "warn"
+ref_option_ref = "warn"
+rest_pat_in_fully_bound_structs = "warn"
+result_large_err = "allow"
+same_functions_in_if_condition = "warn"
+self_named_module_files = "warn"
+semicolon_if_nothing_returned = "warn"
+str_to_string = "warn"
+string_add = "warn"
+string_add_assign = "warn"
+string_lit_as_bytes = "warn"
+string_to_string = "warn"
+todo = "warn"
+trait_duplication_in_bounds = "warn"
+uninlined_format_args = "warn"
+use_self = "warn"
+verbose_file_reads = "warn"
+wildcard_imports = "warn"
+zero_sized_map_values = "warn"
+
+[lints.rust]
+unnameable_types = "allow"
+unreachable_pub = "warn"
+unsafe_op_in_unsafe_fn = "warn"
+unused_lifetimes = "warn"
+unused_macro_rules = "warn"
+unused_qualifications = "warn"
+
+[lints.rust.rust_2018_idioms]
+level = "warn"
+priority = -1
diff --git a/third_party/rust/toml_parser/LICENSE-APACHE b/third_party/rust/toml_parser/LICENSE-APACHE
@@ -0,0 +1,202 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/third_party/rust/toml_parser/LICENSE-MIT b/third_party/rust/toml_parser/LICENSE-MIT
@@ -0,0 +1,19 @@
+Copyright (c) Individual contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/rust/toml_parser/README.md b/third_party/rust/toml_parser/README.md
@@ -0,0 +1,23 @@
+# toml_parser
+
+[](https://github.com/toml-rs/toml/actions)
+[](https://codecov.io/gh/toml-rs/toml)
+[](https://crates.io/crates/toml_parser)
+[](https://docs.rs/toml_parser)
+[](https://gitter.im/toml_parser/Lobby)
+
+
+Zero-copy parsing of TOML
+
+## License
+
+Licensed under either of
+
+* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or <https://www.apache.org/licenses/LICENSE-2.0>)
+* MIT license ([LICENSE-MIT](LICENSE-MIT) or <https://opensource.org/license/mit>)
+
+at your option.
+
+### Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.
diff --git a/third_party/rust/toml_parser/src/debug.rs b/third_party/rust/toml_parser/src/debug.rs
@@ -0,0 +1,140 @@
+use crate::decoder::Encoding;
+use crate::ErrorSink;
+use crate::Span;
+
+pub(crate) struct DebugDepth(core::sync::atomic::AtomicUsize);
+
+impl DebugDepth {
+ pub(crate) fn enter_unchecked(&self) -> usize {
+ self.0.fetch_add(1, core::sync::atomic::Ordering::SeqCst)
+ }
+
+ pub(crate) fn exit_unchecked(&self) {
+ let _ = self.0.fetch_sub(1, core::sync::atomic::Ordering::SeqCst);
+ }
+
+ pub(crate) fn depth(&self) -> usize {
+ self.0.load(core::sync::atomic::Ordering::SeqCst)
+ }
+}
+
+static DEBUG_DEPTH: DebugDepth = DebugDepth(core::sync::atomic::AtomicUsize::new(0));
+
+fn render_event(span: impl Into<Option<Span>>, text: &str, style: anstyle::Style) {
+ #![allow(unexpected_cfgs)] // HACK: fixed in newer versions
+ let span = span.into();
+ let depth = DEBUG_DEPTH.depth().min(20);
+ anstream::eprintln!("{:depth$}{style}{text}: {span:?}{style:#}", "");
+}
+
+pub(crate) struct DebugErrorSink<'s> {
+ sink: &'s mut dyn ErrorSink,
+}
+
+impl<'s> DebugErrorSink<'s> {
+ pub(crate) fn new(sink: &'s mut dyn ErrorSink) -> Self {
+ Self { sink }
+ }
+}
+
+impl ErrorSink for DebugErrorSink<'_> {
+ fn report_error(&mut self, error: crate::ParseError) {
+ render_event(
+ error.unexpected(),
+ &format!("{error:?}"),
+ anstyle::AnsiColor::Red.on_default(),
+ );
+ self.sink.report_error(error);
+ }
+}
+
+pub(crate) struct DebugEventReceiver<'r> {
+ receiver: &'r mut dyn crate::parser::EventReceiver,
+}
+
+impl<'r> DebugEventReceiver<'r> {
+ pub(crate) fn new(receiver: &'r mut dyn crate::parser::EventReceiver) -> Self {
+ Self { receiver }
+ }
+}
+
+impl crate::parser::EventReceiver for DebugEventReceiver<'_> {
+ fn std_table_open(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.std_table_open(span, error);
+ render_event(span, "[", anstyle::Style::new() | anstyle::Effects::DIMMED);
+ DEBUG_DEPTH.enter_unchecked();
+ }
+ fn std_table_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.std_table_close(span, error);
+ DEBUG_DEPTH.exit_unchecked();
+ render_event(span, "]", anstyle::Style::new() | anstyle::Effects::DIMMED);
+ }
+ fn array_table_open(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.array_table_open(span, error);
+ render_event(span, "[[", anstyle::Style::new() | anstyle::Effects::DIMMED);
+ DEBUG_DEPTH.enter_unchecked();
+ }
+ fn array_table_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.array_table_close(span, error);
+ DEBUG_DEPTH.exit_unchecked();
+ render_event(span, "]]", anstyle::Style::new() | anstyle::Effects::DIMMED);
+ }
+ fn inline_table_open(&mut self, span: Span, error: &mut dyn ErrorSink) -> bool {
+ let allowed = self.receiver.inline_table_open(span, error);
+ render_event(span, "{", anstyle::Style::new() | anstyle::Effects::DIMMED);
+ DEBUG_DEPTH.enter_unchecked();
+ allowed
+ }
+ fn inline_table_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.inline_table_close(span, error);
+ DEBUG_DEPTH.exit_unchecked();
+ render_event(span, "}", anstyle::Style::new() | anstyle::Effects::DIMMED);
+ }
+ fn array_open(&mut self, span: Span, error: &mut dyn ErrorSink) -> bool {
+ let allowed = self.receiver.array_open(span, error);
+ render_event(span, "[", anstyle::Style::new() | anstyle::Effects::DIMMED);
+ DEBUG_DEPTH.enter_unchecked();
+ allowed
+ }
+ fn array_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.array_close(span, error);
+ DEBUG_DEPTH.exit_unchecked();
+ render_event(span, "]", anstyle::Style::new() | anstyle::Effects::DIMMED);
+ }
+ fn simple_key(&mut self, span: Span, encoding: Option<Encoding>, error: &mut dyn ErrorSink) {
+ self.receiver.simple_key(span, encoding, error);
+ render_event(span, "<key>", anstyle::AnsiColor::Magenta.on_default());
+ }
+ fn key_sep(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.key_sep(span, error);
+ render_event(span, ".", anstyle::Style::new() | anstyle::Effects::DIMMED);
+ }
+ fn key_val_sep(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.key_val_sep(span, error);
+ render_event(span, "=", anstyle::Style::new() | anstyle::Effects::DIMMED);
+ }
+ fn scalar(&mut self, span: Span, encoding: Option<Encoding>, error: &mut dyn ErrorSink) {
+ self.receiver.scalar(span, encoding, error);
+ render_event(span, "<scalar>", anstyle::AnsiColor::Green.on_default());
+ }
+ fn value_sep(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.value_sep(span, error);
+ render_event(span, ",", anstyle::Style::new() | anstyle::Effects::DIMMED);
+ }
+ fn whitespace(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.whitespace(span, error);
+ render_event(span, "<whitespace>", anstyle::AnsiColor::Cyan.on_default());
+ }
+ fn comment(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.comment(span, error);
+ render_event(span, "<comment>", anstyle::AnsiColor::Cyan.on_default());
+ }
+ fn newline(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.newline(span, error);
+ render_event(span, "<newline>", anstyle::AnsiColor::Cyan.on_default());
+ }
+ fn error(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.error(span, error);
+ render_event(span, "<error>", anstyle::AnsiColor::Red.on_default());
+ }
+}
diff --git a/third_party/rust/toml_parser/src/decoder/mod.rs b/third_party/rust/toml_parser/src/decoder/mod.rs
@@ -0,0 +1,117 @@
+//! Decode [raw][crate::Raw] TOML values into Rust native types
+//!
+//! See
+//! - [`Raw::decode_key`][crate::Raw::decode_key]
+//! - [`Raw::decode_scalar`][crate::Raw::decode_scalar]
+//! - [`Raw::decode_whitespace`][crate::Raw::decode_whitespace]
+//! - [`Raw::decode_comment`][crate::Raw::decode_comment]
+//! - [`Raw::decode_newline`][crate::Raw::decode_newline]
+
+#[cfg(feature = "alloc")]
+use alloc::borrow::Cow;
+#[cfg(feature = "alloc")]
+use alloc::string::String;
+
+pub(crate) mod scalar;
+pub(crate) mod string;
+pub(crate) mod ws;
+
+pub use scalar::IntegerRadix;
+pub use scalar::ScalarKind;
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+#[repr(u8)]
+pub enum Encoding {
+ LiteralString = crate::lexer::APOSTROPHE,
+ BasicString = crate::lexer::QUOTATION_MARK,
+ MlLiteralString = 1,
+ MlBasicString,
+}
+
+impl Encoding {
+ pub const fn description(&self) -> &'static str {
+ match self {
+ Self::LiteralString => crate::lexer::TokenKind::LiteralString.description(),
+ Self::BasicString => crate::lexer::TokenKind::BasicString.description(),
+ Self::MlLiteralString => crate::lexer::TokenKind::MlLiteralString.description(),
+ Self::MlBasicString => crate::lexer::TokenKind::MlBasicString.description(),
+ }
+ }
+}
+
+pub trait StringBuilder<'s> {
+ fn clear(&mut self);
+ #[must_use]
+ fn push_str(&mut self, append: &'s str) -> bool;
+ #[must_use]
+ fn push_char(&mut self, append: char) -> bool;
+}
+
+impl<'s> StringBuilder<'s> for () {
+ fn clear(&mut self) {}
+ fn push_str(&mut self, _append: &'s str) -> bool {
+ true
+ }
+ fn push_char(&mut self, _append: char) -> bool {
+ true
+ }
+}
+
+impl<'s> StringBuilder<'s> for &'s str {
+ fn clear(&mut self) {
+ *self = &self[0..0];
+ }
+ fn push_str(&mut self, append: &'s str) -> bool {
+ if self.is_empty() {
+ *self = append;
+ true
+ } else {
+ false
+ }
+ }
+ fn push_char(&mut self, _append: char) -> bool {
+ false
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl<'s> StringBuilder<'s> for Cow<'s, str> {
+ fn clear(&mut self) {
+ match self {
+ Cow::Borrowed(s) => {
+ s.clear();
+ }
+ Cow::Owned(s) => s.clear(),
+ }
+ }
+ fn push_str(&mut self, append: &'s str) -> bool {
+ match self {
+ Cow::Borrowed(s) => {
+ if !s.push_str(append) {
+ self.to_mut().push_str(append);
+ }
+ }
+ Cow::Owned(s) => s.push_str(append),
+ }
+ true
+ }
+ fn push_char(&mut self, append: char) -> bool {
+ self.to_mut().push(append);
+ true
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl<'s> StringBuilder<'s> for String {
+ fn clear(&mut self) {
+ self.clear();
+ }
+ fn push_str(&mut self, append: &'s str) -> bool {
+ self.push_str(append);
+ true
+ }
+ fn push_char(&mut self, append: char) -> bool {
+ self.push(append);
+ true
+ }
+}
diff --git a/third_party/rust/toml_parser/src/decoder/scalar.rs b/third_party/rust/toml_parser/src/decoder/scalar.rs
@@ -0,0 +1,725 @@
+use winnow::stream::ContainsToken as _;
+use winnow::stream::FindSlice as _;
+use winnow::stream::Offset as _;
+use winnow::stream::Stream as _;
+
+use crate::decoder::StringBuilder;
+use crate::ErrorSink;
+use crate::Expected;
+use crate::ParseError;
+use crate::Raw;
+use crate::Span;
+
+const ALLOCATION_ERROR: &str = "could not allocate for string";
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+pub enum ScalarKind {
+ String,
+ Boolean(bool),
+ DateTime,
+ Float,
+ Integer(IntegerRadix),
+}
+
+impl ScalarKind {
+ pub fn description(&self) -> &'static str {
+ match self {
+ Self::String => "string",
+ Self::Boolean(_) => "boolean",
+ Self::DateTime => "date-time",
+ Self::Float => "float",
+ Self::Integer(radix) => radix.description(),
+ }
+ }
+
+ pub fn invalid_description(&self) -> &'static str {
+ match self {
+ Self::String => "invalid string",
+ Self::Boolean(_) => "invalid boolean",
+ Self::DateTime => "invalid date-time",
+ Self::Float => "invalid float",
+ Self::Integer(radix) => radix.invalid_description(),
+ }
+ }
+}
+
+#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+pub enum IntegerRadix {
+ #[default]
+ Dec,
+ Hex,
+ Oct,
+ Bin,
+}
+
+impl IntegerRadix {
+ pub fn description(&self) -> &'static str {
+ match self {
+ Self::Dec => "integer",
+ Self::Hex => "hexadecimal",
+ Self::Oct => "octal",
+ Self::Bin => "binary",
+ }
+ }
+
+ pub fn value(&self) -> u32 {
+ match self {
+ Self::Dec => 10,
+ Self::Hex => 16,
+ Self::Oct => 8,
+ Self::Bin => 2,
+ }
+ }
+
+ pub fn invalid_description(&self) -> &'static str {
+ match self {
+ Self::Dec => "invalid integer number",
+ Self::Hex => "invalid hexadecimal number",
+ Self::Oct => "invalid octal number",
+ Self::Bin => "invalid binary number",
+ }
+ }
+
+ fn validator(&self) -> fn(char) -> bool {
+ match self {
+ Self::Dec => |c| c.is_ascii_digit(),
+ Self::Hex => |c| c.is_ascii_hexdigit(),
+ Self::Oct => |c| matches!(c, '0'..='7'),
+ Self::Bin => |c| matches!(c, '0'..='1'),
+ }
+ }
+}
+
+pub(crate) fn decode_unquoted_scalar<'i>(
+ raw: Raw<'i>,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) -> ScalarKind {
+ let s = raw.as_str();
+ let Some(first) = s.as_bytes().first() else {
+ return decode_invalid(raw, output, error);
+ };
+ match first {
+ // number starts
+ b'+' | b'-' => {
+ let value = &raw.as_str()[1..];
+ decode_sign_prefix(raw, value, output, error)
+ }
+ // Report as if they were numbers because its most likely a typo
+ b'_' => decode_datetime_or_float_or_integer(raw.as_str(), raw, output, error),
+ // Date/number starts
+ b'0' => decode_zero_prefix(raw.as_str(), false, raw, output, error),
+ b'1'..=b'9' => decode_datetime_or_float_or_integer(raw.as_str(), raw, output, error),
+ // Report as if they were numbers because its most likely a typo
+ b'.' => {
+ let kind = ScalarKind::Float;
+ let stream = raw.as_str();
+ ensure_float(stream, raw, error);
+ decode_float_or_integer(stream, raw, kind, output, error)
+ }
+ b't' | b'T' => {
+ const SYMBOL: &str = "true";
+ let kind = ScalarKind::Boolean(true);
+ let expected = &[Expected::Literal(SYMBOL)];
+ decode_symbol(raw, SYMBOL, kind, expected, output, error)
+ }
+ b'f' | b'F' => {
+ const SYMBOL: &str = "false";
+ let kind = ScalarKind::Boolean(false);
+ let expected = &[Expected::Literal(SYMBOL)];
+ decode_symbol(raw, SYMBOL, kind, expected, output, error)
+ }
+ b'i' | b'I' => {
+ const SYMBOL: &str = "inf";
+ let kind = ScalarKind::Float;
+ let expected = &[Expected::Literal(SYMBOL)];
+ decode_symbol(raw, SYMBOL, kind, expected, output, error)
+ }
+ b'n' | b'N' => {
+ const SYMBOL: &str = "nan";
+ let kind = ScalarKind::Float;
+ let expected = &[Expected::Literal(SYMBOL)];
+ decode_symbol(raw, SYMBOL, kind, expected, output, error)
+ }
+ _ => decode_invalid(raw, output, error),
+ }
+}
+
+pub(crate) fn decode_sign_prefix<'i>(
+ raw: Raw<'i>,
+ value: &'i str,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) -> ScalarKind {
+ let Some(first) = value.as_bytes().first() else {
+ return decode_invalid(raw, output, error);
+ };
+ match first {
+ // number starts
+ b'+' | b'-' => {
+ let start = value.offset_from(&raw.as_str());
+ let end = start + 1;
+ error.report_error(
+ ParseError::new("redundant numeric sign")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+
+ let value = &value[1..];
+ decode_sign_prefix(raw, value, output, error)
+ }
+ // Report as if they were numbers because its most likely a typo
+ b'_' => decode_datetime_or_float_or_integer(value, raw, output, error),
+ // Date/number starts
+ b'0' => decode_zero_prefix(value, true, raw, output, error),
+ b'1'..=b'9' => decode_datetime_or_float_or_integer(value, raw, output, error),
+ // Report as if they were numbers because its most likely a typo
+ b'.' => {
+ let kind = ScalarKind::Float;
+ let stream = raw.as_str();
+ ensure_float(stream, raw, error);
+ decode_float_or_integer(stream, raw, kind, output, error)
+ }
+ b'i' | b'I' => {
+ const SYMBOL: &str = "inf";
+ let kind = ScalarKind::Float;
+ if value != SYMBOL {
+ let expected = &[Expected::Literal(SYMBOL)];
+ let start = value.offset_from(&raw.as_str());
+ let end = start + value.len();
+ error.report_error(
+ ParseError::new(kind.invalid_description())
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(expected)
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ decode_as(raw, SYMBOL, kind, output, error)
+ } else {
+ decode_as_is(raw, kind, output, error)
+ }
+ }
+ b'n' | b'N' => {
+ const SYMBOL: &str = "nan";
+ let kind = ScalarKind::Float;
+ if value != SYMBOL {
+ let expected = &[Expected::Literal(SYMBOL)];
+ let start = value.offset_from(&raw.as_str());
+ let end = start + value.len();
+ error.report_error(
+ ParseError::new(kind.invalid_description())
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(expected)
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ decode_as(raw, SYMBOL, kind, output, error)
+ } else {
+ decode_as_is(raw, kind, output, error)
+ }
+ }
+ _ => decode_invalid(raw, output, error),
+ }
+}
+
+pub(crate) fn decode_zero_prefix<'i>(
+ value: &'i str,
+ signed: bool,
+ raw: Raw<'i>,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) -> ScalarKind {
+ debug_assert_eq!(value.as_bytes()[0], b'0');
+ if value.len() == 1 {
+ let kind = ScalarKind::Integer(IntegerRadix::Dec);
+ // No extra validation needed
+ decode_float_or_integer(raw.as_str(), raw, kind, output, error)
+ } else {
+ let radix = value.as_bytes()[1];
+ match radix {
+ b'x' | b'X' => {
+ if signed {
+ error.report_error(
+ ParseError::new("integers with a radix cannot be signed")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(0, 1)),
+ );
+ }
+ if radix == b'X' {
+ let start = value.offset_from(&raw.as_str());
+ let end = start + 2;
+ error.report_error(
+ ParseError::new("radix must be lowercase")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("0x")])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ let radix = IntegerRadix::Hex;
+ let kind = ScalarKind::Integer(radix);
+ let stream = &value[2..];
+ ensure_radixed_value(stream, raw, radix, error);
+ decode_float_or_integer(stream, raw, kind, output, error)
+ }
+ b'o' | b'O' => {
+ if signed {
+ error.report_error(
+ ParseError::new("integers with a radix cannot be signed")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(0, 1)),
+ );
+ }
+ if radix == b'O' {
+ let start = value.offset_from(&raw.as_str());
+ let end = start + 2;
+ error.report_error(
+ ParseError::new("radix must be lowercase")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("0o")])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ let radix = IntegerRadix::Oct;
+ let kind = ScalarKind::Integer(radix);
+ let stream = &value[2..];
+ ensure_radixed_value(stream, raw, radix, error);
+ decode_float_or_integer(stream, raw, kind, output, error)
+ }
+ b'b' | b'B' => {
+ if signed {
+ error.report_error(
+ ParseError::new("integers with a radix cannot be signed")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(0, 1)),
+ );
+ }
+ if radix == b'B' {
+ let start = value.offset_from(&raw.as_str());
+ let end = start + 2;
+ error.report_error(
+ ParseError::new("radix must be lowercase")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("0b")])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ let radix = IntegerRadix::Bin;
+ let kind = ScalarKind::Integer(radix);
+ let stream = &value[2..];
+ ensure_radixed_value(stream, raw, radix, error);
+ decode_float_or_integer(stream, raw, kind, output, error)
+ }
+ b'd' | b'D' => {
+ if signed {
+ error.report_error(
+ ParseError::new("integers with a radix cannot be signed")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(0, 1)),
+ );
+ }
+ let radix = IntegerRadix::Dec;
+ let kind = ScalarKind::Integer(radix);
+ let stream = &value[2..];
+ error.report_error(
+ ParseError::new("redundant integer number prefix")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(0, 2)),
+ );
+ ensure_radixed_value(stream, raw, radix, error);
+ decode_float_or_integer(stream, raw, kind, output, error)
+ }
+ _ => decode_datetime_or_float_or_integer(value, raw, output, error),
+ }
+ }
+}
+
+pub(crate) fn decode_datetime_or_float_or_integer<'i>(
+ value: &'i str,
+ raw: Raw<'i>,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) -> ScalarKind {
+ let Some(digit_end) = value
+ .as_bytes()
+ .offset_for(|b| !(b'0'..=b'9').contains_token(b))
+ else {
+ let kind = ScalarKind::Integer(IntegerRadix::Dec);
+ let stream = raw.as_str();
+ ensure_no_leading_zero(value, raw, error);
+ return decode_float_or_integer(stream, raw, kind, output, error);
+ };
+
+ #[cfg(feature = "unsafe")] // SAFETY: ascii digits ensures UTF-8 boundary
+ let rest = unsafe { &value.get_unchecked(digit_end..) };
+ #[cfg(not(feature = "unsafe"))]
+ let rest = &value[digit_end..];
+
+ if rest.starts_with("-") || rest.starts_with(":") {
+ decode_as_is(raw, ScalarKind::DateTime, output, error)
+ } else if rest.contains(" ") {
+ decode_invalid(raw, output, error)
+ } else if is_float(rest) {
+ let kind = ScalarKind::Float;
+ let stream = raw.as_str();
+ ensure_float(value, raw, error);
+ decode_float_or_integer(stream, raw, kind, output, error)
+ } else if rest.starts_with("_") {
+ let kind = ScalarKind::Integer(IntegerRadix::Dec);
+ let stream = raw.as_str();
+ ensure_no_leading_zero(value, raw, error);
+ decode_float_or_integer(stream, raw, kind, output, error)
+ } else {
+ decode_invalid(raw, output, error)
+ }
+}
+
+/// ```abnf
+/// float = float-int-part ( exp / frac [ exp ] )
+///
+/// float-int-part = dec-int
+/// frac = decimal-point zero-prefixable-int
+/// decimal-point = %x2E ; .
+/// zero-prefixable-int = DIGIT *( DIGIT / underscore DIGIT )
+///
+/// exp = "e" float-exp-part
+/// float-exp-part = [ minus / plus ] zero-prefixable-int
+/// ```
+pub(crate) fn ensure_float<'i>(mut value: &'i str, raw: Raw<'i>, error: &mut dyn ErrorSink) {
+ ensure_dec_uint(&mut value, raw, false, "invalid mantissa", error);
+
+ if value.starts_with(".") {
+ let _ = value.next_token();
+ ensure_dec_uint(&mut value, raw, true, "invalid fraction", error);
+ }
+
+ if value.starts_with(['e', 'E']) {
+ let _ = value.next_token();
+ if value.starts_with(['+', '-']) {
+ let _ = value.next_token();
+ }
+ ensure_dec_uint(&mut value, raw, true, "invalid exponent", error);
+ }
+
+ if !value.is_empty() {
+ let start = value.offset_from(&raw.as_str());
+ let end = raw.len();
+ error.report_error(
+ ParseError::new(ScalarKind::Float.invalid_description())
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+}
+
+pub(crate) fn ensure_dec_uint<'i>(
+ value: &mut &'i str,
+ raw: Raw<'i>,
+ zero_prefix: bool,
+ invalid_description: &'static str,
+ error: &mut dyn ErrorSink,
+) {
+ let start = *value;
+ let mut digit_count = 0;
+ while let Some(current) = value.chars().next() {
+ if current.is_ascii_digit() {
+ digit_count += 1;
+ } else if current == '_' {
+ } else {
+ break;
+ }
+ let _ = value.next_token();
+ }
+
+ match digit_count {
+ 0 => {
+ let start = start.offset_from(&raw.as_str());
+ let end = start;
+ error.report_error(
+ ParseError::new(invalid_description)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Description("digits")])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ 1 => {}
+ _ if start.starts_with("0") && !zero_prefix => {
+ let start = start.offset_from(&raw.as_str());
+ let end = start + 1;
+ error.report_error(
+ ParseError::new("unexpected leading zero")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ _ => {}
+ }
+}
+
+pub(crate) fn ensure_no_leading_zero<'i>(value: &'i str, raw: Raw<'i>, error: &mut dyn ErrorSink) {
+ if value.starts_with("0") {
+ let start = value.offset_from(&raw.as_str());
+ let end = start + 1;
+ error.report_error(
+ ParseError::new("unexpected leading zero")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+}
+
+pub(crate) fn ensure_radixed_value(
+ value: &str,
+ raw: Raw<'_>,
+ radix: IntegerRadix,
+ error: &mut dyn ErrorSink,
+) {
+ let invalid = ['+', '-'];
+ let value = if let Some(value) = value.strip_prefix(invalid) {
+ let pos = raw.as_str().find(invalid).unwrap();
+ error.report_error(
+ ParseError::new("unexpected sign")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(pos, pos + 1)),
+ );
+ value
+ } else {
+ value
+ };
+
+ let valid = radix.validator();
+ for (index, c) in value.char_indices() {
+ if !valid(c) && c != '_' {
+ let pos = value.offset_from(&raw.as_str()) + index;
+ error.report_error(
+ ParseError::new(radix.invalid_description())
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_unexpected(Span::new_unchecked(pos, pos)),
+ );
+ }
+ }
+}
+
+pub(crate) fn decode_float_or_integer<'i>(
+ stream: &'i str,
+ raw: Raw<'i>,
+ kind: ScalarKind,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) -> ScalarKind {
+ output.clear();
+
+ let underscore = "_";
+
+ if has_underscore(stream) {
+ if stream.starts_with(underscore) {
+ error.report_error(
+ ParseError::new("`_` may only go between digits")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(0, underscore.len())),
+ );
+ }
+ if 1 < stream.len() && stream.ends_with(underscore) {
+ let start = stream.offset_from(&raw.as_str());
+ let end = start + stream.len();
+ error.report_error(
+ ParseError::new("`_` may only go between digits")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[])
+ .with_unexpected(Span::new_unchecked(end - underscore.len(), end)),
+ );
+ }
+
+ for part in stream.split(underscore) {
+ let part_start = part.offset_from(&raw.as_str());
+ let part_end = part_start + part.len();
+
+ if 0 < part_start {
+ let first = part.as_bytes().first().copied().unwrap_or(b'0');
+ if !is_any_digit(first, kind) {
+ let start = part_start - 1;
+ let end = part_start;
+ debug_assert_eq!(&raw.as_str()[start..end], underscore);
+ error.report_error(
+ ParseError::new("`_` may only go between digits")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ }
+ if 1 < part.len() && part_end < raw.len() {
+ let last = part.as_bytes().last().copied().unwrap_or(b'0');
+ if !is_any_digit(last, kind) {
+ let start = part_end;
+ let end = start + underscore.len();
+ debug_assert_eq!(&raw.as_str()[start..end], underscore);
+ error.report_error(
+ ParseError::new("`_` may only go between digits")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ }
+
+ if part.is_empty() && part_start != 0 && part_end != raw.len() {
+ let start = part_start;
+ let end = start + 1;
+ error.report_error(
+ ParseError::new("`_` may only go between digits")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+
+ if !part.is_empty() && !output.push_str(part) {
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR)
+ .with_unexpected(Span::new_unchecked(part_start, part_end)),
+ );
+ }
+ }
+ } else {
+ if !output.push_str(stream) {
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR)
+ .with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+ }
+
+ kind
+}
+
+fn is_any_digit(b: u8, kind: ScalarKind) -> bool {
+ if kind == ScalarKind::Float {
+ is_dec_integer_digit(b)
+ } else {
+ is_any_integer_digit(b)
+ }
+}
+
+fn is_any_integer_digit(b: u8) -> bool {
+ (b'0'..=b'9', b'a'..=b'f', b'A'..=b'F').contains_token(b)
+}
+
+fn is_dec_integer_digit(b: u8) -> bool {
+ (b'0'..=b'9').contains_token(b)
+}
+
+fn has_underscore(raw: &str) -> bool {
+ raw.as_bytes().find_slice(b'_').is_some()
+}
+
+fn is_float(raw: &str) -> bool {
+ raw.as_bytes().find_slice((b'.', b'e', b'E')).is_some()
+}
+
+pub(crate) fn decode_as_is<'i>(
+ raw: Raw<'i>,
+ kind: ScalarKind,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) -> ScalarKind {
+ let kind = decode_as(raw, raw.as_str(), kind, output, error);
+ kind
+}
+
+pub(crate) fn decode_as<'i>(
+ raw: Raw<'i>,
+ symbol: &'i str,
+ kind: ScalarKind,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) -> ScalarKind {
+ output.clear();
+ if !output.push_str(symbol) {
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR).with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+ kind
+}
+
+pub(crate) fn decode_symbol<'i>(
+ raw: Raw<'i>,
+ symbol: &'static str,
+ kind: ScalarKind,
+ expected: &'static [Expected],
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) -> ScalarKind {
+ if raw.as_str() != symbol {
+ if raw.as_str().contains(" ") {
+ return decode_invalid(raw, output, error);
+ } else {
+ error.report_error(
+ ParseError::new(kind.invalid_description())
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(expected)
+ .with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+ }
+
+ decode_as(raw, symbol, kind, output, error)
+}
+
+pub(crate) fn decode_invalid<'i>(
+ raw: Raw<'i>,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) -> ScalarKind {
+ if raw.as_str().ends_with("'''") {
+ error.report_error(
+ ParseError::new("missing opening quote")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal(r#"'''"#)])
+ .with_unexpected(Span::new_unchecked(0, 0)),
+ );
+ } else if raw.as_str().ends_with(r#"""""#) {
+ error.report_error(
+ ParseError::new("missing opening quote")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Description("multi-line basic string")])
+ .with_expected(&[Expected::Literal(r#"""""#)])
+ .with_unexpected(Span::new_unchecked(0, 0)),
+ );
+ } else if raw.as_str().ends_with("'") {
+ error.report_error(
+ ParseError::new("missing opening quote")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal(r#"'"#)])
+ .with_unexpected(Span::new_unchecked(0, 0)),
+ );
+ } else if raw.as_str().ends_with(r#"""#) {
+ error.report_error(
+ ParseError::new("missing opening quote")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal(r#"""#)])
+ .with_unexpected(Span::new_unchecked(0, 0)),
+ );
+ } else {
+ error.report_error(
+ ParseError::new("string values must be quoted")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Description("literal string")])
+ .with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+
+ output.clear();
+ if !output.push_str(raw.as_str()) {
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR).with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+ ScalarKind::String
+}
diff --git a/third_party/rust/toml_parser/src/decoder/string.rs b/third_party/rust/toml_parser/src/decoder/string.rs
@@ -0,0 +1,1265 @@
+use core::ops::RangeInclusive;
+
+use winnow::stream::ContainsToken as _;
+use winnow::stream::Offset as _;
+use winnow::stream::Stream as _;
+
+use crate::decoder::StringBuilder;
+use crate::lexer::APOSTROPHE;
+use crate::lexer::ML_BASIC_STRING_DELIM;
+use crate::lexer::ML_LITERAL_STRING_DELIM;
+use crate::lexer::QUOTATION_MARK;
+use crate::lexer::WSCHAR;
+use crate::ErrorSink;
+use crate::Expected;
+use crate::ParseError;
+use crate::Raw;
+use crate::Span;
+
+const ALLOCATION_ERROR: &str = "could not allocate for string";
+
+/// Parse literal string
+///
+/// ```bnf
+/// ;; Literal String
+///
+/// literal-string = apostrophe *literal-char apostrophe
+///
+/// apostrophe = %x27 ; ' apostrophe
+///
+/// literal-char = %x09 / %x20-26 / %x28-7E / non-ascii
+/// ```
+pub(crate) fn decode_literal_string<'i>(
+ raw: Raw<'i>,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) {
+ const INVALID_STRING: &str = "invalid literal string";
+
+ output.clear();
+
+ let s = raw.as_str();
+ let s = if let Some(stripped) = s.strip_prefix(APOSTROPHE as char) {
+ stripped
+ } else {
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("'")])
+ .with_unexpected(Span::new_unchecked(0, 0)),
+ );
+ s
+ };
+ let s = if let Some(stripped) = s.strip_suffix(APOSTROPHE as char) {
+ stripped
+ } else {
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("'")])
+ .with_unexpected(Span::new_unchecked(raw.len(), raw.len())),
+ );
+ s
+ };
+
+ for (i, b) in s.as_bytes().iter().enumerate() {
+ if !LITERAL_CHAR.contains_token(b) {
+ let offset = (&s.as_bytes()[i..]).offset_from(&raw.as_bytes());
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Description("non-single-quote visible characters")])
+ .with_unexpected(Span::new_unchecked(offset, offset)),
+ );
+ }
+ }
+
+ if !output.push_str(s) {
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR).with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+}
+
+/// `literal-char = %x09 / %x20-26 / %x28-7E / non-ascii`
+const LITERAL_CHAR: (
+ u8,
+ RangeInclusive<u8>,
+ RangeInclusive<u8>,
+ RangeInclusive<u8>,
+) = (0x9, 0x20..=0x26, 0x28..=0x7E, NON_ASCII);
+
+/// `non-ascii = %x80-D7FF / %xE000-10FFFF`
+/// - ASCII is 0xxxxxxx
+/// - First byte for UTF-8 is 11xxxxxx
+/// - Subsequent UTF-8 bytes are 10xxxxxx
+const NON_ASCII: RangeInclusive<u8> = 0x80..=0xff;
+
+/// Parse multi-line literal string
+///
+/// ```bnf
+/// ;; Multiline Literal String
+///
+/// ml-literal-string = ml-literal-string-delim [ newline ] ml-literal-body
+/// ml-literal-string-delim
+/// ml-literal-string-delim = 3apostrophe
+/// ml-literal-body = *mll-content *( mll-quotes 1*mll-content ) [ mll-quotes ]
+///
+/// mll-content = mll-char / newline
+/// mll-quotes = 1*2apostrophe
+/// ```
+pub(crate) fn decode_ml_literal_string<'i>(
+ raw: Raw<'i>,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) {
+ const INVALID_STRING: &str = "invalid multi-line literal string";
+ output.clear();
+
+ let s = raw.as_str();
+ let s = if let Some(stripped) = s.strip_prefix(ML_LITERAL_STRING_DELIM) {
+ stripped
+ } else {
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("'")])
+ .with_unexpected(Span::new_unchecked(0, 0)),
+ );
+ s
+ };
+ let s = strip_start_newline(s);
+ let s = if let Some(stripped) = s.strip_suffix(ML_LITERAL_STRING_DELIM) {
+ stripped
+ } else {
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("'")])
+ .with_unexpected(Span::new_unchecked(raw.len(), raw.len())),
+ );
+ s.trim_end_matches('\'')
+ };
+
+ for (i, b) in s.as_bytes().iter().enumerate() {
+ if *b == b'\'' || *b == b'\n' {
+ } else if *b == b'\r' {
+ if s.as_bytes().get(i + 1) != Some(&b'\n') {
+ let offset = (&s.as_bytes()[i + 1..]).offset_from(&raw.as_bytes());
+ error.report_error(
+ ParseError::new("carriage return must be followed by newline")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("\n")])
+ .with_unexpected(Span::new_unchecked(offset, offset)),
+ );
+ }
+ } else if !MLL_CHAR.contains_token(b) {
+ let offset = (&s.as_bytes()[i..]).offset_from(&raw.as_bytes());
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Description("non-single-quote characters")])
+ .with_unexpected(Span::new_unchecked(offset, offset)),
+ );
+ }
+ }
+
+ if !output.push_str(s) {
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR).with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+}
+
+/// `mll-char = %x09 / %x20-26 / %x28-7E / non-ascii`
+const MLL_CHAR: (
+ u8,
+ RangeInclusive<u8>,
+ RangeInclusive<u8>,
+ RangeInclusive<u8>,
+) = (0x9, 0x20..=0x26, 0x28..=0x7E, NON_ASCII);
+
+/// Parse basic string
+///
+/// ```bnf
+/// ;; Basic String
+///
+/// basic-string = quotation-mark *basic-char quotation-mark
+///
+/// basic-char = basic-unescaped / escaped
+///
+/// escaped = escape escape-seq-char
+/// ```
+pub(crate) fn decode_basic_string<'i>(
+ raw: Raw<'i>,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) {
+ const INVALID_STRING: &str = "invalid basic string";
+ output.clear();
+
+ let s = raw.as_str();
+ let s = if let Some(stripped) = s.strip_prefix(QUOTATION_MARK as char) {
+ stripped
+ } else {
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("\"")])
+ .with_unexpected(Span::new_unchecked(0, 0)),
+ );
+ s
+ };
+ let mut s = if let Some(stripped) = s.strip_suffix(QUOTATION_MARK as char) {
+ stripped
+ } else {
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("\"")])
+ .with_unexpected(Span::new_unchecked(raw.len(), raw.len())),
+ );
+ s
+ };
+
+ let segment = basic_unescaped(&mut s);
+ if !output.push_str(segment) {
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR).with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+ while !s.is_empty() {
+ if s.starts_with("\\") {
+ let _ = s.next_token();
+
+ let c = escape_seq_char(&mut s, raw, error);
+ if !output.push_char(c) {
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR)
+ .with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+ } else {
+ let invalid = basic_invalid(&mut s);
+ let start = invalid.offset_from(&raw.as_str());
+ let end = start + invalid.len();
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[
+ Expected::Description("non-double-quote visible characters"),
+ Expected::Literal("\\"),
+ ])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ let _ = output.push_str(invalid);
+ }
+
+ let segment = basic_unescaped(&mut s);
+ if !output.push_str(segment) {
+ let start = segment.offset_from(&raw.as_str());
+ let end = start + segment.len();
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR).with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ }
+}
+
+/// `basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii`
+fn basic_unescaped<'i>(stream: &mut &'i str) -> &'i str {
+ let offset = stream
+ .as_bytes()
+ .offset_for(|b| !BASIC_UNESCAPED.contains_token(b))
+ .unwrap_or(stream.len());
+ #[cfg(feature = "unsafe")] // SAFETY: BASIC_UNESCAPED ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ }
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset)
+}
+
+fn basic_invalid<'i>(stream: &mut &'i str) -> &'i str {
+ let offset = stream
+ .as_bytes()
+ .offset_for(|b| (BASIC_UNESCAPED, ESCAPE).contains_token(b))
+ .unwrap_or(stream.len());
+ #[cfg(feature = "unsafe")] // SAFETY: BASIC_UNESCAPED ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ }
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset)
+}
+
+/// `basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii`
+#[allow(clippy::type_complexity)]
+const BASIC_UNESCAPED: (
+ (u8, u8),
+ u8,
+ RangeInclusive<u8>,
+ RangeInclusive<u8>,
+ RangeInclusive<u8>,
+) = (WSCHAR, 0x21, 0x23..=0x5B, 0x5D..=0x7E, NON_ASCII);
+
+/// `escape = %x5C ; \`
+const ESCAPE: u8 = b'\\';
+
+/// ```bnf
+/// escape-seq-char = %x22 ; " quotation mark U+0022
+/// escape-seq-char =/ %x5C ; \ reverse solidus U+005C
+/// escape-seq-char =/ %x62 ; b backspace U+0008
+/// escape-seq-char =/ %x66 ; f form feed U+000C
+/// escape-seq-char =/ %x6E ; n line feed U+000A
+/// escape-seq-char =/ %x72 ; r carriage return U+000D
+/// escape-seq-char =/ %x74 ; t tab U+0009
+/// escape-seq-char =/ %x75 4HEXDIG ; uXXXX U+XXXX
+/// escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX
+/// ```
+fn escape_seq_char(stream: &mut &str, raw: Raw<'_>, error: &mut dyn ErrorSink) -> char {
+ const EXPECTED_ESCAPES: &[Expected] = &[
+ Expected::Literal("b"),
+ Expected::Literal("f"),
+ Expected::Literal("n"),
+ Expected::Literal("r"),
+ Expected::Literal("\\"),
+ Expected::Literal("\""),
+ Expected::Literal("u"),
+ Expected::Literal("U"),
+ ];
+
+ let start = stream.checkpoint();
+ let Some(id) = stream.next_token() else {
+ let offset = stream.offset_from(&raw.as_str());
+ error.report_error(
+ ParseError::new("missing escaped value")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(EXPECTED_ESCAPES)
+ .with_unexpected(Span::new_unchecked(offset, offset)),
+ );
+ return '\\';
+ };
+ match id {
+ 'b' => '\u{8}',
+ 'f' => '\u{c}',
+ 'n' => '\n',
+ 'r' => '\r',
+ 't' => '\t',
+ 'u' => hexescape(stream, 4, raw, error),
+ 'U' => hexescape(stream, 8, raw, error),
+ '\\' => '\\',
+ '"' => '"',
+ _ => {
+ stream.reset(&start);
+ let offset = stream.offset_from(&raw.as_str());
+ error.report_error(
+ ParseError::new("missing escaped value")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(EXPECTED_ESCAPES)
+ .with_unexpected(Span::new_unchecked(offset, offset)),
+ );
+ '\\'
+ }
+ }
+}
+
+fn hexescape(
+ stream: &mut &str,
+ num_digits: usize,
+ raw: Raw<'_>,
+ error: &mut dyn ErrorSink,
+) -> char {
+ let offset = stream
+ .as_bytes()
+ .offset_for(|b| !HEXDIG.contains_token(b))
+ .unwrap_or_else(|| stream.eof_offset())
+ .min(num_digits);
+ #[cfg(feature = "unsafe")] // SAFETY: HEXDIG ensure `offset` is along UTF-8 boundary
+ let value = unsafe { stream.next_slice_unchecked(offset) };
+ #[cfg(not(feature = "unsafe"))]
+ let value = stream.next_slice(offset);
+
+ if value.len() != num_digits {
+ let offset = stream.offset_from(&raw.as_str());
+ error.report_error(
+ ParseError::new("too few unicode value digits")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Description("unicode hexadecimal value")])
+ .with_unexpected(Span::new_unchecked(offset, offset)),
+ );
+ return '�';
+ }
+
+ let Some(value) = u32::from_str_radix(value, 16).ok().and_then(char::from_u32) else {
+ let offset = value.offset_from(&raw.as_str());
+ error.report_error(
+ ParseError::new("invalid value")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Description("unicode hexadecimal value")])
+ .with_unexpected(Span::new_unchecked(offset, offset)),
+ );
+ return '�';
+ };
+
+ value
+}
+
+/// `HEXDIG = DIGIT / "A" / "B" / "C" / "D" / "E" / "F"`
+const HEXDIG: (RangeInclusive<u8>, RangeInclusive<u8>, RangeInclusive<u8>) =
+ (DIGIT, b'A'..=b'F', b'a'..=b'f');
+
+/// `DIGIT = %x30-39 ; 0-9`
+const DIGIT: RangeInclusive<u8> = b'0'..=b'9';
+
+fn strip_start_newline(s: &str) -> &str {
+ s.strip_prefix('\n')
+ .or_else(|| s.strip_prefix("\r\n"))
+ .unwrap_or(s)
+}
+
+/// Parse multi-line basic string
+///
+/// ```bnf
+/// ;; Multiline Basic String
+///
+/// ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
+/// ml-basic-string-delim
+/// ml-basic-string-delim = 3quotation-mark
+///
+/// ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ]
+///
+/// mlb-content = mlb-char / newline / mlb-escaped-nl
+/// mlb-char = mlb-unescaped / escaped
+/// mlb-quotes = 1*2quotation-mark
+/// ```
+pub(crate) fn decode_ml_basic_string<'i>(
+ raw: Raw<'i>,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) {
+ const INVALID_STRING: &str = "invalid multi-line basic string";
+
+ let s = raw.as_str();
+ let s = if let Some(stripped) = s.strip_prefix(ML_BASIC_STRING_DELIM) {
+ stripped
+ } else {
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("\"")])
+ .with_unexpected(Span::new_unchecked(0, 0)),
+ );
+ s
+ };
+ let s = strip_start_newline(s);
+ let mut s = if let Some(stripped) = s.strip_suffix(ML_BASIC_STRING_DELIM) {
+ stripped
+ } else {
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("\"")])
+ .with_unexpected(Span::new_unchecked(raw.len(), raw.len())),
+ );
+ s
+ };
+
+ let segment = mlb_unescaped(&mut s);
+ if !output.push_str(segment) {
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR).with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+ while !s.is_empty() {
+ if s.starts_with("\\") {
+ let _ = s.next_token();
+
+ if s.as_bytes()
+ .first()
+ .map(|b| (WSCHAR, b'\r', b'\n').contains_token(b))
+ .unwrap_or(false)
+ {
+ mlb_escaped_nl(&mut s, raw, error);
+ } else {
+ let c = escape_seq_char(&mut s, raw, error);
+ if !output.push_char(c) {
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR)
+ .with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+ }
+ } else if s.starts_with("\r") {
+ let offset = if s.starts_with("\r\n") {
+ "\r\n".len()
+ } else {
+ let start = s.offset_from(&raw.as_str()) + 1;
+ error.report_error(
+ ParseError::new("carriage return must be followed by newline")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("\n")])
+ .with_unexpected(Span::new_unchecked(start, start)),
+ );
+ "\r".len()
+ };
+ #[cfg(feature = "unsafe")]
+ // SAFETY: Newlines ensure `offset` is along UTF-8 boundary
+ let newline = unsafe { s.next_slice_unchecked(offset) };
+ #[cfg(not(feature = "unsafe"))]
+ let newline = s.next_slice(offset);
+ if !output.push_str(newline) {
+ let start = newline.offset_from(&raw.as_str());
+ let end = start + newline.len();
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR)
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ } else {
+ let invalid = mlb_invalid(&mut s);
+ let start = invalid.offset_from(&raw.as_str());
+ let end = start + invalid.len();
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("\\"), Expected::Description("characters")])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ let _ = output.push_str(invalid);
+ }
+
+ let segment = mlb_unescaped(&mut s);
+ if !output.push_str(segment) {
+ let start = segment.offset_from(&raw.as_str());
+ let end = start + segment.len();
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR).with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ }
+}
+
+/// ```bnf
+/// mlb-escaped-nl = escape ws newline *( wschar / newline )
+/// ```
+fn mlb_escaped_nl(stream: &mut &str, raw: Raw<'_>, error: &mut dyn ErrorSink) {
+ const INVALID_STRING: &str = "invalid multi-line basic string";
+ let ws_offset = stream
+ .as_bytes()
+ .offset_for(|b| !WSCHAR.contains_token(b))
+ .unwrap_or(stream.len());
+ #[cfg(feature = "unsafe")] // SAFETY: WSCHAR ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(ws_offset);
+ }
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(ws_offset);
+
+ let start = stream.checkpoint();
+ match stream.next_token() {
+ Some('\n') => {}
+ Some('\r') => {
+ if stream.as_bytes().first() == Some(&b'\n') {
+ let _ = stream.next_token();
+ } else {
+ let start = stream.offset_from(&raw.as_str());
+ let end = start;
+ error.report_error(
+ ParseError::new("carriage return must be followed by newline")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("\n")])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ }
+ _ => {
+ stream.reset(&start);
+
+ let start = stream.offset_from(&raw.as_str());
+ let end = start;
+ error.report_error(
+ ParseError::new(INVALID_STRING)
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("\n")])
+ .with_unexpected(Span::new_unchecked(start, end)),
+ );
+ }
+ }
+
+ loop {
+ let start_offset = stream.offset_from(&raw.as_str());
+
+ let offset = stream
+ .as_bytes()
+ .offset_for(|b| !(WSCHAR, b'\n').contains_token(b))
+ .unwrap_or(stream.len());
+ #[cfg(feature = "unsafe")] // SAFETY: WSCHAR ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset);
+ }
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ if stream.starts_with("\r") {
+ let offset = if stream.starts_with("\r\n") {
+ "\r\n".len()
+ } else {
+ let start = stream.offset_from(&raw.as_str()) + 1;
+ error.report_error(
+ ParseError::new("carriage return must be followed by newline")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("\n")])
+ .with_unexpected(Span::new_unchecked(start, start)),
+ );
+ "\r".len()
+ };
+ #[cfg(feature = "unsafe")]
+ // SAFETY: Newlines ensure `offset` is along UTF-8 boundary
+ let _ = unsafe { stream.next_slice_unchecked(offset) };
+ #[cfg(not(feature = "unsafe"))]
+ let _ = stream.next_slice(offset);
+ }
+
+ let end_offset = stream.offset_from(&raw.as_str());
+ if start_offset == end_offset {
+ break;
+ }
+ }
+}
+
+/// `mlb-unescaped` extended with `mlb-quotes` and `LF`
+///
+/// **warning:** `newline` is not validated
+///
+/// ```bnf
+/// ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ]
+///
+/// mlb-content = mlb-char / newline / mlb-escaped-nl
+/// mlb-char = mlb-unescaped / escaped
+/// mlb-quotes = 1*2quotation-mark
+/// mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+/// ```
+fn mlb_unescaped<'i>(stream: &mut &'i str) -> &'i str {
+ let offset = stream
+ .as_bytes()
+ .offset_for(|b| !(MLB_UNESCAPED, b'"', b'\n').contains_token(b))
+ .unwrap_or(stream.len());
+ #[cfg(feature = "unsafe")] // SAFETY: BASIC_UNESCAPED ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ }
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset)
+}
+
+fn mlb_invalid<'i>(stream: &mut &'i str) -> &'i str {
+ let offset = stream
+ .as_bytes()
+ .offset_for(|b| (MLB_UNESCAPED, b'"', b'\n', ESCAPE, '\r').contains_token(b))
+ .unwrap_or(stream.len());
+ #[cfg(feature = "unsafe")] // SAFETY: BASIC_UNESCAPED ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ }
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset)
+}
+
+/// `mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii`
+#[allow(clippy::type_complexity)]
+const MLB_UNESCAPED: (
+ (u8, u8),
+ u8,
+ RangeInclusive<u8>,
+ RangeInclusive<u8>,
+ RangeInclusive<u8>,
+) = (WSCHAR, 0x21, 0x23..=0x5B, 0x5D..=0x7E, NON_ASCII);
+
+/// Parse unquoted key
+///
+/// ```bnf
+/// unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
+/// ```
+pub(crate) fn decode_unquoted_key<'i>(
+ raw: Raw<'i>,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+) {
+ let s = raw.as_str();
+
+ if s.is_empty() {
+ error.report_error(
+ ParseError::new("unquoted keys cannot be empty")
+ .with_context(Span::new_unchecked(0, s.len()))
+ .with_expected(&[
+ Expected::Description("letters"),
+ Expected::Description("numbers"),
+ Expected::Literal("-"),
+ Expected::Literal("_"),
+ ])
+ .with_unexpected(Span::new_unchecked(0, s.len())),
+ );
+ }
+
+ for (i, b) in s.as_bytes().iter().enumerate() {
+ if !UNQUOTED_CHAR.contains_token(b) {
+ error.report_error(
+ ParseError::new("invalid unquoted key")
+ .with_context(Span::new_unchecked(0, s.len()))
+ .with_expected(&[
+ Expected::Description("letters"),
+ Expected::Description("numbers"),
+ Expected::Literal("-"),
+ Expected::Literal("_"),
+ ])
+ .with_unexpected(Span::new_unchecked(i, i)),
+ );
+ }
+ }
+
+ if !output.push_str(s) {
+ error.report_error(
+ ParseError::new(ALLOCATION_ERROR).with_unexpected(Span::new_unchecked(0, raw.len())),
+ );
+ }
+}
+
+/// `unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _`
+const UNQUOTED_CHAR: (
+ RangeInclusive<u8>,
+ RangeInclusive<u8>,
+ RangeInclusive<u8>,
+ u8,
+ u8,
+) = (b'A'..=b'Z', b'a'..=b'z', b'0'..=b'9', b'-', b'_');
+
+#[cfg(test)]
+#[cfg(feature = "std")]
+mod test {
+ use super::*;
+ use crate::decoder::Encoding;
+
+ use alloc::borrow::Cow;
+
+ use snapbox::assert_data_eq;
+ use snapbox::prelude::*;
+ use snapbox::str;
+
+ #[test]
+ fn literal_string() {
+ let cases = [
+ (
+ r"'C:\Users\nodejs\templates'",
+ str![[r#"C:\Users\nodejs\templates"#]].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r"'\\ServerX\admin$\system32\'",
+ str![[r#"\\ServerX\admin$\system32\"#]].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#"'Tom "Dubs" Preston-Werner'"#,
+ str![[r#"Tom "Dubs" Preston-Werner"#]].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r"'<\i\c*\s*>'",
+ str![[r#"<\i\c*\s*>"#]].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ ];
+ for (input, expected, expected_error) in cases {
+ let mut error = Vec::new();
+ let mut actual = Cow::Borrowed("");
+ decode_literal_string(
+ Raw::new_unchecked(input, Some(Encoding::LiteralString), Default::default()),
+ &mut actual,
+ &mut error,
+ );
+ assert_data_eq!(actual.as_ref(), expected);
+ assert_data_eq!(error.to_debug(), expected_error);
+ }
+ }
+
+ #[test]
+ fn ml_literal_string() {
+ let cases = [
+ (
+ r"'''I [dw]on't need \d{2} apples'''",
+ str![[r#"I [dw]on't need \d{2} apples"#]].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#"''''one_quote''''"#,
+ str!["'one_quote'"].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#"'''
+The first newline is
+trimmed in raw strings.
+ All other whitespace
+ is preserved.
+'''"#,
+ str![[r#"
+The first newline is
+trimmed in raw strings.
+ All other whitespace
+ is preserved.
+
+"#]]
+ .raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ ];
+ for (input, expected, expected_error) in cases {
+ let mut error = Vec::new();
+ let mut actual = Cow::Borrowed("");
+ decode_ml_literal_string(
+ Raw::new_unchecked(input, Some(Encoding::MlLiteralString), Default::default()),
+ &mut actual,
+ &mut error,
+ );
+ assert_data_eq!(actual.as_ref(), expected);
+ assert_data_eq!(error.to_debug(), expected_error);
+ }
+ }
+
+ #[test]
+ fn basic_string() {
+ let cases = [
+ (
+ r#""""#,
+ str![""].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#""content\"trailing""#,
+ str![[r#"content"trailing"#]].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#""content\""#,
+ str![[r#"content\"#]].raw(),
+ str![[r#"
+[
+ ParseError {
+ context: Some(
+ 0..10,
+ ),
+ description: "missing escaped value",
+ expected: Some(
+ [
+ Literal(
+ "b",
+ ),
+ Literal(
+ "f",
+ ),
+ Literal(
+ "n",
+ ),
+ Literal(
+ "r",
+ ),
+ Literal(
+ "\\",
+ ),
+ Literal(
+ "\"",
+ ),
+ Literal(
+ "u",
+ ),
+ Literal(
+ "U",
+ ),
+ ],
+ ),
+ unexpected: Some(
+ 9..9,
+ ),
+ },
+]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#""content
+trailing""#,
+ str![[r#"
+content
+trailing
+"#]]
+ .raw(),
+ str![[r#"
+[
+ ParseError {
+ context: Some(
+ 0..18,
+ ),
+ description: "invalid basic string",
+ expected: Some(
+ [
+ Description(
+ "non-double-quote visible characters",
+ ),
+ Literal(
+ "\\",
+ ),
+ ],
+ ),
+ unexpected: Some(
+ 8..9,
+ ),
+ },
+]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#""I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF. \U0002070E""#,
+ str![[r#"
+I'm a string. "You can quote me". Name José
+Location SF. 𠜎
+"#]]
+ .raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ ];
+ for (input, expected, expected_error) in cases {
+ let mut error = Vec::new();
+ let mut actual = Cow::Borrowed("");
+ decode_basic_string(
+ Raw::new_unchecked(input, Some(Encoding::BasicString), Default::default()),
+ &mut actual,
+ &mut error,
+ );
+ assert_data_eq!(actual.as_ref(), expected);
+ assert_data_eq!(error.to_debug(), expected_error);
+ }
+ }
+
+ #[test]
+ fn ml_basic_string() {
+ let cases = [
+ (
+ r#""""
+Roses are red
+Violets are blue""""#,
+ str![[r#"
+Roses are red
+Violets are blue
+"#]]
+ .raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#"""" \""" """"#,
+ str![[r#" """ "#]].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#"""" \\""""#,
+ str![[r#" \"#]].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#""""
+The quick brown \
+
+
+ fox jumps over \
+ the lazy dog.""""#,
+ str!["The quick brown fox jumps over the lazy dog."].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#""""\
+ The quick brown \
+ fox jumps over \
+ the lazy dog.\
+ """"#,
+ str!["The quick brown fox jumps over the lazy dog."].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#""""\
+ """"#,
+ str![""].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#""""
+\
+ \
+""""#,
+ str![""].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#"""" """#,
+ str![[r#" """#]].raw(),
+ str![[r#"
+[
+ ParseError {
+ context: Some(
+ 0..7,
+ ),
+ description: "invalid multi-line basic string",
+ expected: Some(
+ [
+ Literal(
+ "\"",
+ ),
+ ],
+ ),
+ unexpected: Some(
+ 7..7,
+ ),
+ },
+]
+
+"#]]
+ .raw(),
+ ),
+ (
+ r#"""" \""""#,
+ str![[r#" \"#]].raw(),
+ str![[r#"
+[
+ ParseError {
+ context: Some(
+ 0..9,
+ ),
+ description: "missing escaped value",
+ expected: Some(
+ [
+ Literal(
+ "b",
+ ),
+ Literal(
+ "f",
+ ),
+ Literal(
+ "n",
+ ),
+ Literal(
+ "r",
+ ),
+ Literal(
+ "\\",
+ ),
+ Literal(
+ "\"",
+ ),
+ Literal(
+ "u",
+ ),
+ Literal(
+ "U",
+ ),
+ ],
+ ),
+ unexpected: Some(
+ 6..6,
+ ),
+ },
+]
+
+"#]]
+ .raw(),
+ ),
+ ];
+ for (input, expected, expected_error) in cases {
+ let mut error = Vec::new();
+ let mut actual = Cow::Borrowed("");
+ decode_ml_basic_string(
+ Raw::new_unchecked(input, Some(Encoding::MlBasicString), Default::default()),
+ &mut actual,
+ &mut error,
+ );
+ assert_data_eq!(actual.as_ref(), expected);
+ assert_data_eq!(error.to_debug(), expected_error);
+ }
+ }
+
+ #[test]
+ fn unquoted_keys() {
+ let cases = [
+ (
+ "a",
+ str!["a"].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ "hello",
+ str!["hello"].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ "-",
+ str!["-"].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ "_",
+ str!["_"].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ "-hello-world-",
+ str!["-hello-world-"].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ "_hello_world_",
+ str!["_hello_world_"].raw(),
+ str![[r#"
+[]
+
+"#]]
+ .raw(),
+ ),
+ (
+ "",
+ str![""].raw(),
+ str![[r#"
+[
+ ParseError {
+ context: Some(
+ 0..0,
+ ),
+ description: "unquoted keys cannot be empty",
+ expected: Some(
+ [
+ Description(
+ "letters",
+ ),
+ Description(
+ "numbers",
+ ),
+ Literal(
+ "-",
+ ),
+ Literal(
+ "_",
+ ),
+ ],
+ ),
+ unexpected: Some(
+ 0..0,
+ ),
+ },
+]
+
+"#]]
+ .raw(),
+ ),
+ ];
+
+ for (input, expected, expected_error) in cases {
+ let mut error = Vec::new();
+ let mut actual = Cow::Borrowed("");
+ decode_unquoted_key(
+ Raw::new_unchecked(input, None, Default::default()),
+ &mut actual,
+ &mut error,
+ );
+ assert_data_eq!(actual.as_ref(), expected);
+ assert_data_eq!(error.to_debug(), expected_error);
+ }
+ }
+}
diff --git a/third_party/rust/toml_parser/src/decoder/ws.rs b/third_party/rust/toml_parser/src/decoder/ws.rs
@@ -0,0 +1,76 @@
+use core::ops::RangeInclusive;
+
+use winnow::stream::ContainsToken as _;
+
+use crate::lexer::COMMENT_START_SYMBOL;
+use crate::ErrorSink;
+use crate::Expected;
+use crate::ParseError;
+use crate::Raw;
+use crate::Span;
+
+/// Parse comment
+///
+/// ```bnf
+/// ;; Comment
+///
+/// comment-start-symbol = %x23 ; #
+/// non-ascii = %x80-D7FF / %xE000-10FFFF
+/// non-eol = %x09 / %x20-7F / non-ascii
+///
+/// comment = comment-start-symbol *non-eol
+/// ```
+pub(crate) fn decode_comment(raw: Raw<'_>, error: &mut dyn ErrorSink) {
+ let s = raw.as_bytes();
+
+ if s.first() != Some(&COMMENT_START_SYMBOL) {
+ error.report_error(
+ ParseError::new("missing comment start")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("#")])
+ .with_unexpected(Span::new_unchecked(0, 0)),
+ );
+ }
+
+ for (i, b) in s.iter().copied().enumerate() {
+ if !NON_EOL.contains_token(b) {
+ error.report_error(
+ ParseError::new("invalid comment character")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Description("printable characters")])
+ .with_unexpected(Span::new_unchecked(i, i)),
+ );
+ }
+ }
+}
+
+// non-ascii = %x80-D7FF / %xE000-10FFFF
+// - ASCII is 0xxxxxxx
+// - First byte for UTF-8 is 11xxxxxx
+// - Subsequent UTF-8 bytes are 10xxxxxx
+pub(crate) const NON_ASCII: RangeInclusive<u8> = 0x80..=0xff;
+
+// non-eol = %x09 / %x20-7E / non-ascii
+pub(crate) const NON_EOL: (u8, RangeInclusive<u8>, RangeInclusive<u8>) =
+ (0x09, 0x20..=0x7E, NON_ASCII);
+
+/// Parse newline
+///
+/// ```bnf
+///;; Newline
+///
+/// newline = %x0A ; LF
+/// newline =/ %x0D.0A ; CRLF
+/// ```
+pub(crate) fn decode_newline(raw: Raw<'_>, error: &mut dyn ErrorSink) {
+ let s = raw.as_str();
+
+ if s == "\r" {
+ error.report_error(
+ ParseError::new("carriage return must be followed by newline")
+ .with_context(Span::new_unchecked(0, raw.len()))
+ .with_expected(&[Expected::Literal("\n")])
+ .with_unexpected(Span::new_unchecked(raw.len(), raw.len())),
+ );
+ }
+}
diff --git a/third_party/rust/toml_parser/src/error.rs b/third_party/rust/toml_parser/src/error.rs
@@ -0,0 +1,102 @@
+use crate::Span;
+
+pub trait ErrorSink {
+ fn report_error(&mut self, error: ParseError);
+}
+
+impl<F> ErrorSink for F
+where
+ F: FnMut(ParseError),
+{
+ fn report_error(&mut self, error: ParseError) {
+ (self)(error);
+ }
+}
+
+impl ErrorSink for () {
+ fn report_error(&mut self, _error: ParseError) {}
+}
+
+impl ErrorSink for Option<ParseError> {
+ fn report_error(&mut self, error: ParseError) {
+ self.get_or_insert(error);
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[allow(unused_qualifications)]
+impl ErrorSink for alloc::vec::Vec<ParseError> {
+ fn report_error(&mut self, error: ParseError) {
+ self.push(error);
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+#[non_exhaustive]
+pub struct ParseError {
+ context: Option<Span>,
+ description: ErrorStr,
+ expected: Option<&'static [Expected]>,
+ unexpected: Option<Span>,
+}
+
+impl ParseError {
+ pub fn new(description: impl Into<ErrorStr>) -> Self {
+ Self {
+ context: None,
+ description: description.into(),
+ expected: None,
+ unexpected: None,
+ }
+ }
+
+ pub fn with_context(mut self, context: Span) -> Self {
+ self.context = Some(context);
+ self
+ }
+
+ pub fn with_expected(mut self, expected: &'static [Expected]) -> Self {
+ self.expected = Some(expected);
+ self
+ }
+
+ pub fn with_unexpected(mut self, unexpected: Span) -> Self {
+ self.unexpected = Some(unexpected);
+ self
+ }
+
+ pub fn context(&self) -> Option<Span> {
+ self.context
+ }
+ pub fn description(&self) -> &str {
+ &self.description
+ }
+ pub fn expected(&self) -> Option<&'static [Expected]> {
+ self.expected
+ }
+ pub fn unexpected(&self) -> Option<Span> {
+ self.unexpected
+ }
+
+ pub(crate) fn rebase_spans(mut self, offset: usize) -> Self {
+ if let Some(context) = self.context.as_mut() {
+ *context += offset;
+ }
+ if let Some(unexpected) = self.unexpected.as_mut() {
+ *unexpected += offset;
+ }
+ self
+ }
+}
+
+#[cfg(feature = "alloc")]
+type ErrorStr = alloc::borrow::Cow<'static, str>;
+#[cfg(not(feature = "alloc"))]
+type ErrorStr = &'static str;
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+#[non_exhaustive]
+pub enum Expected {
+ Literal(&'static str),
+ Description(&'static str),
+}
diff --git a/third_party/rust/toml_parser/src/lexer/mod.rs b/third_party/rust/toml_parser/src/lexer/mod.rs
@@ -0,0 +1,627 @@
+//! Lex TOML tokens
+//!
+//! To get started, see [`Source::lex`][crate::Source::lex]
+
+#[cfg(test)]
+#[cfg(feature = "std")]
+mod test;
+mod token;
+
+#[cfg(feature = "alloc")]
+use alloc::vec::Vec;
+
+use winnow::stream::AsBStr as _;
+use winnow::stream::ContainsToken as _;
+use winnow::stream::FindSlice as _;
+use winnow::stream::Location;
+use winnow::stream::Stream as _;
+
+use crate::Span;
+
+pub use token::Token;
+pub use token::TokenKind;
+
+/// Lex TOML [tokens][Token]
+///
+/// To get started, see [`Source::lex`][crate::Source::lex]
+pub struct Lexer<'i> {
+ stream: Stream<'i>,
+ eof: bool,
+}
+
+impl<'i> Lexer<'i> {
+ pub(crate) fn new(input: &'i str) -> Self {
+ let mut stream = Stream::new(input);
+ if input.as_bytes().starts_with(BOM) {
+ let offset = BOM.len();
+ #[cfg(feature = "unsafe")] // SAFETY: only called when next character is ASCII
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+ }
+ Lexer { stream, eof: false }
+ }
+
+ #[cfg(feature = "alloc")]
+ pub fn into_vec(self) -> Vec<Token> {
+ #![allow(unused_qualifications)] // due to MSRV of 1.66
+ let capacity = core::cmp::min(
+ self.stream.len(),
+ usize::MAX / core::mem::size_of::<Token>(),
+ );
+ let mut vec = Vec::with_capacity(capacity);
+ vec.extend(self);
+ vec
+ }
+}
+
+impl Iterator for Lexer<'_> {
+ type Item = Token;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let Some(peek_byte) = self.stream.as_bstr().first() else {
+ if self.eof {
+ return None;
+ } else {
+ self.eof = true;
+ let start = self.stream.current_token_start();
+ let span = Span::new_unchecked(start, start);
+ return Some(Token::new(TokenKind::Eof, span));
+ }
+ };
+ Some(process_token(*peek_byte, &mut self.stream))
+ }
+}
+
+const BOM: &[u8] = b"\xEF\xBB\xBF";
+
+pub(crate) type Stream<'i> = winnow::stream::LocatingSlice<&'i str>;
+
+fn process_token(peek_byte: u8, stream: &mut Stream<'_>) -> Token {
+ let token = match peek_byte {
+ b'.' => lex_ascii_char(stream, TokenKind::Dot),
+ b'=' => lex_ascii_char(stream, TokenKind::Equals),
+ b',' => lex_ascii_char(stream, TokenKind::Comma),
+ b'[' => lex_ascii_char(stream, TokenKind::LeftSquareBracket),
+ b']' => lex_ascii_char(stream, TokenKind::RightSquareBracket),
+ b'{' => lex_ascii_char(stream, TokenKind::LeftCurlyBracket),
+ b'}' => lex_ascii_char(stream, TokenKind::RightCurlyBracket),
+ b' ' => lex_whitespace(stream),
+ b'\t' => lex_whitespace(stream),
+ b'#' => lex_comment(stream),
+ b'\r' => lex_crlf(stream),
+ b'\n' => lex_ascii_char(stream, TokenKind::Newline),
+ b'\'' => {
+ if stream.starts_with(ML_LITERAL_STRING_DELIM) {
+ lex_ml_literal_string(stream)
+ } else {
+ lex_literal_string(stream)
+ }
+ }
+ b'"' => {
+ if stream.starts_with(ML_BASIC_STRING_DELIM) {
+ lex_ml_basic_string(stream)
+ } else {
+ lex_basic_string(stream)
+ }
+ }
+ _ => lex_atom(stream),
+ };
+ token
+}
+
+/// Process an ASCII character token
+///
+/// # Safety
+///
+/// - `stream` must be UTF-8
+/// - `stream` must be non-empty
+/// - `stream[0]` must be ASCII
+fn lex_ascii_char(stream: &mut Stream<'_>, kind: TokenKind) -> Token {
+ debug_assert!(!stream.is_empty());
+ let start = stream.current_token_start();
+
+ let offset = 1; // an ascii character
+ #[cfg(feature = "unsafe")] // SAFETY: only called when next character is ASCII
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ let end = stream.previous_token_end();
+ let span = Span::new_unchecked(start, end);
+ Token::new(kind, span)
+}
+
+/// Process Whitespace
+///
+/// ```bnf
+/// ;; Whitespace
+///
+/// ws = *wschar
+/// wschar = %x20 ; Space
+/// wschar =/ %x09 ; Horizontal tab
+/// ```
+///
+/// # Safety
+///
+/// - `stream` must be UTF-8
+/// - `stream` must be non-empty
+fn lex_whitespace(stream: &mut Stream<'_>) -> Token {
+ debug_assert!(!stream.is_empty());
+ let start = stream.current_token_start();
+
+ let offset = stream
+ .as_bstr()
+ .offset_for(|b| !WSCHAR.contains_token(b))
+ .unwrap_or(stream.eof_offset());
+ #[cfg(feature = "unsafe")] // SAFETY: WSCHAR ensures `offset` will be at UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ let end = stream.previous_token_end();
+ let span = Span::new_unchecked(start, end);
+ Token::new(TokenKind::Whitespace, span)
+}
+
+/// ```bnf
+/// wschar = %x20 ; Space
+/// wschar =/ %x09 ; Horizontal tab
+/// ```
+pub(crate) const WSCHAR: (u8, u8) = (b' ', b'\t');
+
+/// Process Comment
+///
+/// ```bnf
+/// ;; Comment
+///
+/// comment-start-symbol = %x23 ; #
+/// non-ascii = %x80-D7FF / %xE000-10FFFF
+/// non-eol = %x09 / %x20-7F / non-ascii
+///
+/// comment = comment-start-symbol *non-eol
+/// ```
+///
+/// # Safety
+///
+/// - `stream` must be UTF-8
+/// - `stream[0] == b'#'`
+fn lex_comment(stream: &mut Stream<'_>) -> Token {
+ let start = stream.current_token_start();
+
+ let offset = stream
+ .as_bytes()
+ .find_slice((b'\r', b'\n'))
+ .map(|s| s.start)
+ .unwrap_or_else(|| stream.eof_offset());
+ #[cfg(feature = "unsafe")] // SAFETY: newlines ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ let end = stream.previous_token_end();
+ let span = Span::new_unchecked(start, end);
+ Token::new(TokenKind::Comment, span)
+}
+
+/// `comment-start-symbol = %x23 ; #`
+pub(crate) const COMMENT_START_SYMBOL: u8 = b'#';
+
+/// Process Newline
+///
+/// ```bnf
+/// ;; Newline
+///
+/// newline = %x0A ; LF
+/// newline =/ %x0D.0A ; CRLF
+/// ```
+///
+/// # Safety
+///
+/// - `stream` must be UTF-8
+/// - `stream[0] == b'\r'`
+fn lex_crlf(stream: &mut Stream<'_>) -> Token {
+ let start = stream.current_token_start();
+
+ let mut offset = '\r'.len_utf8();
+ let has_lf = stream.as_bstr().get(1) == Some(&b'\n');
+ if has_lf {
+ offset += '\n'.len_utf8();
+ }
+
+ #[cfg(feature = "unsafe")] // SAFETY: newlines ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+ let end = stream.previous_token_end();
+ let span = Span::new_unchecked(start, end);
+
+ Token::new(TokenKind::Newline, span)
+}
+
+/// Process literal string
+///
+/// ```bnf
+/// ;; Literal String
+///
+/// literal-string = apostrophe *literal-char apostrophe
+///
+/// apostrophe = %x27 ; ' apostrophe
+///
+/// literal-char = %x09 / %x20-26 / %x28-7E / non-ascii
+/// ```
+///
+/// # Safety
+///
+/// - `stream` must be UTF-8
+/// - `stream[0] == b'\''`
+fn lex_literal_string(stream: &mut Stream<'_>) -> Token {
+ let start = stream.current_token_start();
+
+ let offset = 1; // APOSTROPHE
+ #[cfg(feature = "unsafe")] // SAFETY: only called when next character is ASCII
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ let offset = match stream.as_bstr().find_slice((APOSTROPHE, b'\n')) {
+ Some(span) => {
+ if stream.as_bstr()[span.start] == APOSTROPHE {
+ span.end
+ } else {
+ span.start
+ }
+ }
+ None => stream.eof_offset(),
+ };
+ #[cfg(feature = "unsafe")]
+ // SAFETY: `APOSTROPHE`/newline ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ let end = stream.previous_token_end();
+ let span = Span::new_unchecked(start, end);
+ Token::new(TokenKind::LiteralString, span)
+}
+
+/// `apostrophe = %x27 ; ' apostrophe`
+pub(crate) const APOSTROPHE: u8 = b'\'';
+
+/// Process multi-line literal string
+///
+/// ```bnf
+/// ;; Multiline Literal String
+///
+/// ml-literal-string = ml-literal-string-delim [ newline ] ml-literal-body
+/// ml-literal-string-delim
+/// ml-literal-string-delim = 3apostrophe
+/// ml-literal-body = *mll-content *( mll-quotes 1*mll-content ) [ mll-quotes ]
+///
+/// mll-content = mll-char / newline
+/// mll-char = %x09 / %x20-26 / %x28-7E / non-ascii
+/// mll-quotes = 1*2apostrophe
+/// ```
+///
+/// # Safety
+///
+/// - `stream` must be UTF-8
+/// - `stream.starts_with(ML_LITERAL_STRING_DELIM)`
+fn lex_ml_literal_string(stream: &mut Stream<'_>) -> Token {
+ let start = stream.current_token_start();
+
+ let offset = ML_LITERAL_STRING_DELIM.len();
+ #[cfg(feature = "unsafe")] // SAFETY: only called when next character is ASCII
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ let offset = match stream.as_bstr().find_slice(ML_LITERAL_STRING_DELIM) {
+ Some(span) => span.end,
+ None => stream.eof_offset(),
+ };
+ #[cfg(feature = "unsafe")]
+ // SAFETY: `ML_LITERAL_STRING_DELIM` ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ if stream.as_bstr().peek_token() == Some(APOSTROPHE) {
+ let offset = 1;
+ #[cfg(feature = "unsafe")] // SAFETY: `APOSTROPHE` ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ if stream.as_bstr().peek_token() == Some(APOSTROPHE) {
+ let offset = 1;
+ #[cfg(feature = "unsafe")]
+ // SAFETY: `APOSTROPHE` ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+ }
+ }
+
+ let end = stream.previous_token_end();
+ let span = Span::new_unchecked(start, end);
+ Token::new(TokenKind::MlLiteralString, span)
+}
+
+/// `ml-literal-string-delim = 3apostrophe`
+pub(crate) const ML_LITERAL_STRING_DELIM: &str = "'''";
+
+/// Process basic string
+///
+/// ```bnf
+/// ;; Basic String
+///
+/// basic-string = quotation-mark *basic-char quotation-mark
+///
+/// quotation-mark = %x22 ; "
+///
+/// basic-char = basic-unescaped / escaped
+/// basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+/// escaped = escape escape-seq-char
+///
+/// escape = %x5C ; \
+/// escape-seq-char = %x22 ; " quotation mark U+0022
+/// escape-seq-char =/ %x5C ; \ reverse solidus U+005C
+/// escape-seq-char =/ %x62 ; b backspace U+0008
+/// escape-seq-char =/ %x66 ; f form feed U+000C
+/// escape-seq-char =/ %x6E ; n line feed U+000A
+/// escape-seq-char =/ %x72 ; r carriage return U+000D
+/// escape-seq-char =/ %x74 ; t tab U+0009
+/// escape-seq-char =/ %x75 4HEXDIG ; uXXXX U+XXXX
+/// escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX
+/// ```
+///
+/// # Safety
+///
+/// - `stream` must be UTF-8
+/// - `stream[0] == b'"'`
+fn lex_basic_string(stream: &mut Stream<'_>) -> Token {
+ let start = stream.current_token_start();
+
+ let offset = 1; // QUOTATION_MARK
+ #[cfg(feature = "unsafe")] // SAFETY: only called when next character is ASCII
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ loop {
+ // newline is present for error recovery
+ match stream.as_bstr().find_slice((QUOTATION_MARK, ESCAPE, b'\n')) {
+ Some(span) => {
+ let found = stream.as_bstr()[span.start];
+ if found == QUOTATION_MARK {
+ let offset = span.end;
+ #[cfg(feature = "unsafe")]
+ // SAFETY: `QUOTATION_MARK` ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+ break;
+ } else if found == ESCAPE {
+ let offset = span.end;
+ #[cfg(feature = "unsafe")]
+ // SAFETY: `ESCAPE` / newline ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ let peek = stream.as_bstr().peek_token();
+ match peek {
+ Some(ESCAPE) | Some(QUOTATION_MARK) => {
+ let offset = 1; // ESCAPE / QUOTATION_MARK
+ #[cfg(feature = "unsafe")]
+ #[cfg(feature = "unsafe")]
+ // SAFETY: `ESCAPE` / newline ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+ }
+ _ => {}
+ }
+ continue;
+ } else if found == b'\n' {
+ let offset = span.start;
+ #[cfg(feature = "unsafe")]
+ // SAFETY: newline ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+ break;
+ } else {
+ unreachable!("found `{found}`");
+ }
+ }
+ None => {
+ stream.finish();
+ break;
+ }
+ }
+ }
+
+ let end = stream.previous_token_end();
+ let span = Span::new_unchecked(start, end);
+ Token::new(TokenKind::BasicString, span)
+}
+
+/// `quotation-mark = %x22 ; "`
+pub(crate) const QUOTATION_MARK: u8 = b'"';
+
+/// `escape = %x5C ; \`
+pub(crate) const ESCAPE: u8 = b'\\';
+
+/// Process multi-line basic string
+///
+/// ```bnf
+/// ;; Multiline Basic String
+///
+/// ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
+/// ml-basic-string-delim
+/// ml-basic-string-delim = 3quotation-mark
+/// ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ]
+///
+/// mlb-content = mlb-char / newline / mlb-escaped-nl
+/// mlb-char = mlb-unescaped / escaped
+/// mlb-quotes = 1*2quotation-mark
+/// mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+/// mlb-escaped-nl = escape ws newline *( wschar / newline )
+/// ```
+///
+/// # Safety
+///
+/// - `stream` must be UTF-8
+/// - `stream.starts_with(ML_BASIC_STRING_DELIM)`
+fn lex_ml_basic_string(stream: &mut Stream<'_>) -> Token {
+ let start = stream.current_token_start();
+
+ let offset = ML_BASIC_STRING_DELIM.len();
+ #[cfg(feature = "unsafe")] // SAFETY: only called when next character is ASCII
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ loop {
+ // newline is present for error recovery
+ match stream.as_bstr().find_slice((ML_BASIC_STRING_DELIM, "\\")) {
+ Some(span) => {
+ let found = stream.as_bstr()[span.start];
+ if found == QUOTATION_MARK {
+ let offset = span.end;
+ #[cfg(feature = "unsafe")]
+ // SAFETY: `QUOTATION_MARK` ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+ break;
+ } else if found == ESCAPE {
+ let offset = span.end;
+ #[cfg(feature = "unsafe")]
+ // SAFETY: `ESCAPE` ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ let peek = stream.as_bstr().peek_token();
+ match peek {
+ Some(ESCAPE) | Some(QUOTATION_MARK) => {
+ let offset = 1; // ESCAPE / QUOTATION_MARK
+ #[cfg(feature = "unsafe")]
+ // SAFETY: `QUOTATION_MARK`/`ESCAPE` ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+ }
+ _ => {}
+ }
+ continue;
+ } else {
+ unreachable!("found `{found}`");
+ }
+ }
+ None => {
+ stream.finish();
+ break;
+ }
+ }
+ }
+ if stream.as_bstr().peek_token() == Some(QUOTATION_MARK) {
+ let offset = 1;
+ #[cfg(feature = "unsafe")]
+ // SAFETY: `QUOTATION_MARK` ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+ if stream.as_bstr().peek_token() == Some(QUOTATION_MARK) {
+ let offset = 1;
+ #[cfg(feature = "unsafe")]
+ // SAFETY: `QUOTATION_MARK` ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+ }
+ }
+
+ let end = stream.previous_token_end();
+ let span = Span::new_unchecked(start, end);
+ Token::new(TokenKind::MlBasicString, span)
+}
+
+/// `ml-basic-string-delim = 3quotation-mark`
+pub(crate) const ML_BASIC_STRING_DELIM: &str = "\"\"\"";
+
+/// Process Atom
+///
+/// This is everything else
+///
+/// # Safety
+///
+/// - `stream` must be UTF-8
+/// - `stream` must be non-empty
+fn lex_atom(stream: &mut Stream<'_>) -> Token {
+ let start = stream.current_token_start();
+
+ // Intentionally leaves off quotes in case the opening quote was missing
+ const TOKEN_START: &[u8] = b".=,[]{} \t#\r\n";
+ let offset = stream
+ .as_bstr()
+ .offset_for(|b| TOKEN_START.contains_token(b))
+ .unwrap_or_else(|| stream.eof_offset());
+ #[cfg(feature = "unsafe")] // SAFETY: `TOKEN_START` ensure `offset` is along UTF-8 boundary
+ unsafe {
+ stream.next_slice_unchecked(offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(offset);
+
+ let end = stream.previous_token_end();
+ let span = Span::new_unchecked(start, end);
+ Token::new(TokenKind::Atom, span)
+}
diff --git a/third_party/rust/toml_parser/src/lexer/test.rs b/third_party/rust/toml_parser/src/lexer/test.rs
@@ -0,0 +1,1700 @@
+use super::*;
+
+use snapbox::assert_data_eq;
+use snapbox::prelude::*;
+use snapbox::str;
+
+#[test]
+fn test_lex_ascii_char() {
+ let cases = [(
+ ".trailing",
+ str![[r#"
+Token {
+ kind: Dot,
+ span: 0..1,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ )];
+ for (stream, expected_tokens, expected_stream) in cases {
+ dbg!(stream);
+ let mut stream = Stream::new(stream);
+ let actual_tokens = lex_ascii_char(&mut stream, TokenKind::Dot);
+ assert_data_eq!(actual_tokens.to_debug(), expected_tokens.raw());
+ let stream = *stream;
+ assert_data_eq!(stream, expected_stream.raw());
+ }
+}
+
+#[test]
+fn test_lex_whitespace() {
+ let cases = [
+ (
+ " ",
+ str![[r#"
+Token {
+ kind: Whitespace,
+ span: 0..1,
+}
+
+"#]]
+ .raw(),
+ str![].raw(),
+ ),
+ (
+ " \t \t \t ",
+ str![[r#"
+Token {
+ kind: Whitespace,
+ span: 0..9,
+}
+
+"#]]
+ .raw(),
+ str![].raw(),
+ ),
+ (
+ " \n",
+ str![[r#"
+Token {
+ kind: Whitespace,
+ span: 0..1,
+}
+
+"#]]
+ .raw(),
+ str![[r#"
+
+
+"#]]
+ .raw(),
+ ),
+ (
+ " #",
+ str![[r#"
+Token {
+ kind: Whitespace,
+ span: 0..1,
+}
+
+"#]]
+ .raw(),
+ str!["#"].raw(),
+ ),
+ (
+ " a",
+ str![[r#"
+Token {
+ kind: Whitespace,
+ span: 0..1,
+}
+
+"#]]
+ .raw(),
+ str!["a"].raw(),
+ ),
+ ];
+ for (stream, expected_tokens, expected_stream) in cases {
+ dbg!(stream);
+ let mut stream = Stream::new(stream);
+ let actual_tokens = lex_whitespace(&mut stream);
+ assert_data_eq!(actual_tokens.to_debug(), expected_tokens.raw());
+ let stream = *stream;
+ assert_data_eq!(stream, expected_stream.raw());
+ }
+}
+
+#[test]
+fn test_lex_comment() {
+ let cases = [
+ (
+ "#",
+ str![[r#"
+Token {
+ kind: Comment,
+ span: 0..1,
+}
+
+"#]]
+ .raw(),
+ str![""].raw(),
+ ),
+ (
+ "# content",
+ str![[r#"
+Token {
+ kind: Comment,
+ span: 0..9,
+}
+
+"#]]
+ .raw(),
+ str![""].raw(),
+ ),
+ (
+ "# content \ntrailing",
+ str![[r#"
+Token {
+ kind: Comment,
+ span: 0..10,
+}
+
+"#]]
+ .raw(),
+ str![[r#"
+
+trailing
+"#]]
+ .raw(),
+ ),
+ (
+ "# content \r\ntrailing",
+ str![[r#"
+Token {
+ kind: Comment,
+ span: 0..10,
+}
+
+"#]]
+ .raw(),
+ str![[r#"
+
+trailing
+"#]]
+ .raw(),
+ ),
+ (
+ "# content \0continue",
+ str![[r#"
+Token {
+ kind: Comment,
+ span: 0..19,
+}
+
+"#]]
+ .raw(),
+ str![""].raw(),
+ ),
+ ];
+ for (stream, expected_tokens, expected_stream) in cases {
+ dbg!(stream);
+ let mut stream = Stream::new(stream);
+ let actual_tokens = lex_comment(&mut stream);
+ assert_data_eq!(actual_tokens.to_debug(), expected_tokens.raw());
+ let stream = *stream;
+ assert_data_eq!(stream, expected_stream.raw());
+ }
+}
+
+#[test]
+fn test_lex_crlf() {
+ let cases = [
+ (
+ "\r\ntrailing",
+ str![[r#"
+Token {
+ kind: Newline,
+ span: 0..2,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ (
+ "\rtrailing",
+ str![[r#"
+Token {
+ kind: Newline,
+ span: 0..1,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ ];
+ for (stream, expected_tokens, expected_stream) in cases {
+ dbg!(stream);
+ let mut stream = Stream::new(stream);
+ let actual_tokens = lex_crlf(&mut stream);
+ assert_data_eq!(actual_tokens.to_debug(), expected_tokens.raw());
+ let stream = *stream;
+ assert_data_eq!(stream, expected_stream.raw());
+ }
+}
+
+#[test]
+fn test_lex_literal_string() {
+ let cases = [
+ (
+ "''",
+ str![[r#"
+Token {
+ kind: LiteralString,
+ span: 0..2,
+}
+
+"#]]
+ .raw(),
+ str![""].raw(),
+ ),
+ (
+ "''trailing",
+ str![[r#"
+Token {
+ kind: LiteralString,
+ span: 0..2,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ (
+ "'content'trailing",
+ str![[r#"
+Token {
+ kind: LiteralString,
+ span: 0..9,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ (
+ "'content",
+ str![[r#"
+Token {
+ kind: LiteralString,
+ span: 0..8,
+}
+
+"#]]
+ .raw(),
+ str![""].raw(),
+ ),
+ (
+ "'content\ntrailing",
+ str![[r#"
+Token {
+ kind: LiteralString,
+ span: 0..8,
+}
+
+"#]]
+ .raw(),
+ str![[r#"
+
+trailing
+"#]]
+ .raw(),
+ ),
+ ];
+ for (stream, expected_tokens, expected_stream) in cases {
+ dbg!(stream);
+ let mut stream = Stream::new(stream);
+ let actual_tokens = lex_literal_string(&mut stream);
+ assert_data_eq!(actual_tokens.to_debug(), expected_tokens.raw());
+ let stream = *stream;
+ assert_data_eq!(stream, expected_stream.raw());
+ }
+}
+
+#[test]
+fn test_lex_ml_literal_string() {
+ let cases = [
+ (
+ "''''''",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..6,
+}
+
+"#]]
+ .raw(),
+ str![""].raw(),
+ ),
+ (
+ "''''''trailing",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..6,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ (
+ "'''content'''trailing",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..13,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ (
+ "'''content",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..10,
+}
+
+"#]]
+ .raw(),
+ str![""].raw(),
+ ),
+ (
+ "'''content'",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..11,
+}
+
+"#]]
+ .raw(),
+ str![""].raw(),
+ ),
+ (
+ "'''content''",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..12,
+}
+
+"#]]
+ .raw(),
+ str![""].raw(),
+ ),
+ (
+ "'''content\ntrailing",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..19,
+}
+
+"#]]
+ .raw(),
+ str![""].raw(),
+ ),
+ (
+ "'''''''trailing",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..7,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ (
+ "''''''''trailing",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..8,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ (
+ "'''''''''trailing",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..8,
+}
+
+"#]]
+ .raw(),
+ str!["'trailing"].raw(),
+ ),
+ (
+ "'''''content''''trailing",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..16,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ (
+ "'''''content'''''trailing",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..17,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ (
+ "'''''content''''''trailing",
+ str![[r#"
+Token {
+ kind: MlLiteralString,
+ span: 0..17,
+}
+
+"#]]
+ .raw(),
+ str!["'trailing"].raw(),
+ ),
+ ];
+ for (stream, expected_tokens, expected_stream) in cases {
+ dbg!(stream);
+ let mut stream = Stream::new(stream);
+ let actual_tokens = lex_ml_literal_string(&mut stream);
+ assert_data_eq!(actual_tokens.to_debug(), expected_tokens.raw());
+ let stream = *stream;
+ assert_data_eq!(stream, expected_stream.raw());
+ }
+}
+
+#[test]
+fn test_lex_basic_string() {
+ let cases = [
+ (
+ r#""""#,
+ str![[r#"
+Token {
+ kind: BasicString,
+ span: 0..2,
+}
+
+"#]]
+ .raw(),
+ str![].raw(),
+ ),
+ (
+ r#"""trailing"#,
+ str![[r#"
+Token {
+ kind: BasicString,
+ span: 0..2,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ (
+ r#""content"trailing"#,
+ str![[r#"
+Token {
+ kind: BasicString,
+ span: 0..9,
+}
+
+"#]]
+ .raw(),
+ str!["trailing"].raw(),
+ ),
+ (
+ r#""content"#,
+ str![[r#"
+Token {
+ kind: BasicString,
+ span: 0..8,
+}
+
+"#]]
+ .raw(),
+ str![].raw(),
+ ),
+ (
+ r#""content\ntrailing"#,
+ str![[r#"
+Token {
+ kind: BasicString,
+ span: 0..18,
+}
+
+"#]]
+ .raw(),
+ str![].raw(),
+ ),
+ ];
+ for (stream, expected_tokens, expected_stream) in cases {
+ dbg!(stream);
+ let mut stream = Stream::new(stream);
+ let actual_tokens = lex_basic_string(&mut stream);
+ assert_data_eq!(actual_tokens.to_debug(), expected_tokens.raw());
+ let stream = *stream;
+ assert_data_eq!(stream, expected_stream.raw());
+ }
+}
+
+#[test]
+fn test_lex_atom() {
+ let cases = [
+ (
+ "hello",
+ str![[r#"
+Token {
+ kind: Atom,
+ span: 0..5,
+}
+
+"#]]
+ .raw(),
+ str![""].raw(),
+ ),
+ (
+ "hello = world",
+ str![[r#"
+Token {
+ kind: Atom,
+ span: 0..5,
+}
+
+"#]]
+ .raw(),
+ str![" = world"].raw(),
+ ),
+ (
+ "1.100e100 ]",
+ str![[r#"
+Token {
+ kind: Atom,
+ span: 0..1,
+}
+
+"#]]
+ .raw(),
+ str![".100e100 ]"].raw(),
+ ),
+ (
+ "a.b.c = 5",
+ str![[r#"
+Token {
+ kind: Atom,
+ span: 0..1,
+}
+
+"#]]
+ .raw(),
+ str![".b.c = 5"].raw(),
+ ),
+ (
+ "true ]",
+ str![[r#"
+Token {
+ kind: Atom,
+ span: 0..4,
+}
+
+"#]]
+ .raw(),
+ str![" ]"].raw(),
+ ),
+ ];
+ for (stream, expected_tokens, expected_stream) in cases {
+ dbg!(stream);
+ let mut stream = Stream::new(stream);
+ let actual_tokens = lex_atom(&mut stream);
+ assert_data_eq!(actual_tokens.to_debug(), expected_tokens.raw());
+ let stream = *stream;
+ assert_data_eq!(stream, expected_stream.raw());
+ }
+}
+
+#[track_caller]
+fn t(input: &str, expected: impl IntoData) {
+ let source = crate::Source::new(input);
+ let actual = source.lex().into_vec();
+ assert_data_eq!(actual.to_debug(), expected);
+
+ if !actual.is_empty() {
+ let spans = actual.iter().map(|t| t.span()).collect::<Vec<_>>();
+ assert_eq!(spans.first().unwrap().start(), 0);
+ assert_eq!(spans.last().unwrap().end(), input.len());
+ for i in 0..(spans.len() - 1) {
+ let current = &spans[i];
+ let next = &spans[i + 1];
+ assert_eq!(current.end(), next.start());
+ }
+ }
+}
+
+#[test]
+fn literal_strings() {
+ t(
+ "''",
+ str![[r#"
+[
+ Token {
+ kind: LiteralString,
+ span: 0..2,
+ },
+ Token {
+ kind: Eof,
+ span: 2..2,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "''''''",
+ str![[r#"
+[
+ Token {
+ kind: MlLiteralString,
+ span: 0..6,
+ },
+ Token {
+ kind: Eof,
+ span: 6..6,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "'''\n'''",
+ str![[r#"
+[
+ Token {
+ kind: MlLiteralString,
+ span: 0..7,
+ },
+ Token {
+ kind: Eof,
+ span: 7..7,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "'a'",
+ str![[r#"
+[
+ Token {
+ kind: LiteralString,
+ span: 0..3,
+ },
+ Token {
+ kind: Eof,
+ span: 3..3,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "'\"a'",
+ str![[r#"
+[
+ Token {
+ kind: LiteralString,
+ span: 0..4,
+ },
+ Token {
+ kind: Eof,
+ span: 4..4,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "''''a'''",
+ str![[r#"
+[
+ Token {
+ kind: MlLiteralString,
+ span: 0..8,
+ },
+ Token {
+ kind: Eof,
+ span: 8..8,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "'''\n'a\n'''",
+ str![[r#"
+[
+ Token {
+ kind: MlLiteralString,
+ span: 0..10,
+ },
+ Token {
+ kind: Eof,
+ span: 10..10,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "'''a\n'a\r\n'''",
+ str![[r#"
+[
+ Token {
+ kind: MlLiteralString,
+ span: 0..12,
+ },
+ Token {
+ kind: Eof,
+ span: 12..12,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+}
+
+#[test]
+fn basic_strings() {
+ t(
+ r#""""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..2,
+ },
+ Token {
+ kind: Eof,
+ span: 2..2,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""""""""#,
+ str![[r#"
+[
+ Token {
+ kind: MlBasicString,
+ span: 0..6,
+ },
+ Token {
+ kind: Eof,
+ span: 6..6,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""a""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..3,
+ },
+ Token {
+ kind: Eof,
+ span: 3..3,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""""a""""#,
+ str![[r#"
+[
+ Token {
+ kind: MlBasicString,
+ span: 0..7,
+ },
+ Token {
+ kind: Eof,
+ span: 7..7,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\t""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..4,
+ },
+ Token {
+ kind: Eof,
+ span: 4..4,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\u0000""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..8,
+ },
+ Token {
+ kind: Eof,
+ span: 8..8,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\U00000000""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..12,
+ },
+ Token {
+ kind: Eof,
+ span: 12..12,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\U000A0000""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..12,
+ },
+ Token {
+ kind: Eof,
+ span: 12..12,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\\t""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..5,
+ },
+ Token {
+ kind: Eof,
+ span: 5..5,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "\"\t\"",
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..3,
+ },
+ Token {
+ kind: Eof,
+ span: 3..3,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "\"\"\"\n\t\"\"\"",
+ str![[r#"
+[
+ Token {
+ kind: MlBasicString,
+ span: 0..8,
+ },
+ Token {
+ kind: Eof,
+ span: 8..8,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "\"\"\"\\\n\"\"\"",
+ str![[r#"
+[
+ Token {
+ kind: MlBasicString,
+ span: 0..8,
+ },
+ Token {
+ kind: Eof,
+ span: 8..8,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "\"\"\"\\\n \t \t \\\r\n \t \n \t \r\n\"\"\"",
+ str![[r#"
+[
+ Token {
+ kind: MlBasicString,
+ span: 0..34,
+ },
+ Token {
+ kind: Eof,
+ span: 34..34,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\r""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..4,
+ },
+ Token {
+ kind: Eof,
+ span: 4..4,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\n""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..4,
+ },
+ Token {
+ kind: Eof,
+ span: 4..4,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\b""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..4,
+ },
+ Token {
+ kind: Eof,
+ span: 4..4,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""a\fa""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..6,
+ },
+ Token {
+ kind: Eof,
+ span: 6..6,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\"a""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..5,
+ },
+ Token {
+ kind: Eof,
+ span: 5..5,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "\"\"\"\na\"\"\"",
+ str![[r#"
+[
+ Token {
+ kind: MlBasicString,
+ span: 0..8,
+ },
+ Token {
+ kind: Eof,
+ span: 8..8,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "\"\"\"\n\"\"\"",
+ str![[r#"
+[
+ Token {
+ kind: MlBasicString,
+ span: 0..7,
+ },
+ Token {
+ kind: Eof,
+ span: 7..7,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""""a\"""b""""#,
+ str![[r#"
+[
+ Token {
+ kind: MlBasicString,
+ span: 0..12,
+ },
+ Token {
+ kind: Eof,
+ span: 12..12,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\a"#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..3,
+ },
+ Token {
+ kind: Eof,
+ span: 3..3,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "\"\\\n",
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..2,
+ },
+ Token {
+ kind: Newline,
+ span: 2..3,
+ },
+ Token {
+ kind: Eof,
+ span: 3..3,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "\"\\\r\n",
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..3,
+ },
+ Token {
+ kind: Newline,
+ span: 3..4,
+ },
+ Token {
+ kind: Eof,
+ span: 4..4,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "\"\\",
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..2,
+ },
+ Token {
+ kind: Eof,
+ span: 2..2,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "\"\u{0}",
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..2,
+ },
+ Token {
+ kind: Eof,
+ span: 2..2,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\U00""#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..6,
+ },
+ Token {
+ kind: Eof,
+ span: 6..6,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\U00"#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..5,
+ },
+ Token {
+ kind: Eof,
+ span: 5..5,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\uD800"#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..7,
+ },
+ Token {
+ kind: Eof,
+ span: 7..7,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ r#""\UFFFFFFFF"#,
+ str![[r#"
+[
+ Token {
+ kind: BasicString,
+ span: 0..11,
+ },
+ Token {
+ kind: Eof,
+ span: 11..11,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+}
+
+#[test]
+fn keylike() {
+ t(
+ "foo",
+ str![[r#"
+[
+ Token {
+ kind: Atom,
+ span: 0..3,
+ },
+ Token {
+ kind: Eof,
+ span: 3..3,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "0bar",
+ str![[r#"
+[
+ Token {
+ kind: Atom,
+ span: 0..4,
+ },
+ Token {
+ kind: Eof,
+ span: 4..4,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "bar0",
+ str![[r#"
+[
+ Token {
+ kind: Atom,
+ span: 0..4,
+ },
+ Token {
+ kind: Eof,
+ span: 4..4,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "1234",
+ str![[r#"
+[
+ Token {
+ kind: Atom,
+ span: 0..4,
+ },
+ Token {
+ kind: Eof,
+ span: 4..4,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "a-b",
+ str![[r#"
+[
+ Token {
+ kind: Atom,
+ span: 0..3,
+ },
+ Token {
+ kind: Eof,
+ span: 3..3,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "a_B",
+ str![[r#"
+[
+ Token {
+ kind: Atom,
+ span: 0..3,
+ },
+ Token {
+ kind: Eof,
+ span: 3..3,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "-_-",
+ str![[r#"
+[
+ Token {
+ kind: Atom,
+ span: 0..3,
+ },
+ Token {
+ kind: Eof,
+ span: 3..3,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "___",
+ str![[r#"
+[
+ Token {
+ kind: Atom,
+ span: 0..3,
+ },
+ Token {
+ kind: Eof,
+ span: 3..3,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+}
+
+#[test]
+fn all() {
+ t(
+ " a ",
+ str![[r#"
+[
+ Token {
+ kind: Whitespace,
+ span: 0..1,
+ },
+ Token {
+ kind: Atom,
+ span: 1..2,
+ },
+ Token {
+ kind: Whitespace,
+ span: 2..3,
+ },
+ Token {
+ kind: Eof,
+ span: 3..3,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+
+ t(
+ " a\t [[]] \t [] {} , . =\n# foo \r\n#foo \n ",
+ str![[r#"
+[
+ Token {
+ kind: Whitespace,
+ span: 0..1,
+ },
+ Token {
+ kind: Atom,
+ span: 1..2,
+ },
+ Token {
+ kind: Whitespace,
+ span: 2..4,
+ },
+ Token {
+ kind: LeftSquareBracket,
+ span: 4..5,
+ },
+ Token {
+ kind: LeftSquareBracket,
+ span: 5..6,
+ },
+ Token {
+ kind: RightSquareBracket,
+ span: 6..7,
+ },
+ Token {
+ kind: RightSquareBracket,
+ span: 7..8,
+ },
+ Token {
+ kind: Whitespace,
+ span: 8..11,
+ },
+ Token {
+ kind: LeftSquareBracket,
+ span: 11..12,
+ },
+ Token {
+ kind: RightSquareBracket,
+ span: 12..13,
+ },
+ Token {
+ kind: Whitespace,
+ span: 13..14,
+ },
+ Token {
+ kind: LeftCurlyBracket,
+ span: 14..15,
+ },
+ Token {
+ kind: RightCurlyBracket,
+ span: 15..16,
+ },
+ Token {
+ kind: Whitespace,
+ span: 16..17,
+ },
+ Token {
+ kind: Comma,
+ span: 17..18,
+ },
+ Token {
+ kind: Whitespace,
+ span: 18..19,
+ },
+ Token {
+ kind: Dot,
+ span: 19..20,
+ },
+ Token {
+ kind: Whitespace,
+ span: 20..21,
+ },
+ Token {
+ kind: Equals,
+ span: 21..22,
+ },
+ Token {
+ kind: Newline,
+ span: 22..23,
+ },
+ Token {
+ kind: Comment,
+ span: 23..29,
+ },
+ Token {
+ kind: Newline,
+ span: 29..31,
+ },
+ Token {
+ kind: Comment,
+ span: 31..36,
+ },
+ Token {
+ kind: Newline,
+ span: 36..37,
+ },
+ Token {
+ kind: Whitespace,
+ span: 37..38,
+ },
+ Token {
+ kind: Eof,
+ span: 38..38,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+}
+
+#[test]
+fn bare_cr_bad() {
+ t(
+ "\r",
+ str![[r#"
+[
+ Token {
+ kind: Newline,
+ span: 0..1,
+ },
+ Token {
+ kind: Eof,
+ span: 1..1,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "'\n",
+ str![[r#"
+[
+ Token {
+ kind: LiteralString,
+ span: 0..1,
+ },
+ Token {
+ kind: Newline,
+ span: 1..2,
+ },
+ Token {
+ kind: Eof,
+ span: 2..2,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "'\u{0}",
+ str![[r#"
+[
+ Token {
+ kind: LiteralString,
+ span: 0..2,
+ },
+ Token {
+ kind: Eof,
+ span: 2..2,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "'",
+ str![[r#"
+[
+ Token {
+ kind: LiteralString,
+ span: 0..1,
+ },
+ Token {
+ kind: Eof,
+ span: 1..1,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+ t(
+ "\u{0}",
+ str![[r#"
+[
+ Token {
+ kind: Atom,
+ span: 0..1,
+ },
+ Token {
+ kind: Eof,
+ span: 1..1,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+}
+
+#[test]
+fn bad_comment() {
+ t(
+ "#\u{0}",
+ str![[r#"
+[
+ Token {
+ kind: Comment,
+ span: 0..2,
+ },
+ Token {
+ kind: Eof,
+ span: 2..2,
+ },
+]
+
+"#]]
+ .raw(),
+ );
+}
diff --git a/third_party/rust/toml_parser/src/lexer/token.rs b/third_party/rust/toml_parser/src/lexer/token.rs
@@ -0,0 +1,106 @@
+//! Lexed TOML tokens
+
+use super::Span;
+use super::APOSTROPHE;
+use super::COMMENT_START_SYMBOL;
+use super::QUOTATION_MARK;
+use super::WSCHAR;
+use crate::decoder::Encoding;
+
+/// An unvalidated TOML Token
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+pub struct Token {
+ pub(super) kind: TokenKind,
+ pub(super) span: Span,
+}
+
+impl Token {
+ pub(super) fn new(kind: TokenKind, span: Span) -> Self {
+ Self { kind, span }
+ }
+
+ #[inline(always)]
+ pub fn kind(&self) -> TokenKind {
+ self.kind
+ }
+
+ #[inline(always)]
+ pub fn span(&self) -> Span {
+ self.span
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+#[repr(u8)]
+pub enum TokenKind {
+ /// Either for dotted-key or float
+ Dot = b'.',
+ /// Key-value separator
+ Equals = b'=',
+ /// Value separator
+ Comma = b',',
+ /// Either array or standard-table start
+ LeftSquareBracket = b'[',
+ /// Either array or standard-table end
+ RightSquareBracket = b']',
+ /// Inline table start
+ LeftCurlyBracket = b'{',
+ /// Inline table end
+ RightCurlyBracket = b'}',
+ Whitespace = WSCHAR.0,
+ Comment = COMMENT_START_SYMBOL,
+ Newline = b'\n',
+ LiteralString = APOSTROPHE,
+ BasicString = QUOTATION_MARK,
+ MlLiteralString = 1,
+ MlBasicString,
+
+ /// Anything else
+ Atom,
+
+ Eof,
+}
+
+impl TokenKind {
+ pub const fn description(&self) -> &'static str {
+ match self {
+ Self::Dot => "`.`",
+ Self::Equals => "`=`",
+ Self::Comma => "`,`",
+ Self::LeftSquareBracket => "`[`",
+ Self::RightSquareBracket => "`]`",
+ Self::LeftCurlyBracket => "`{`",
+ Self::RightCurlyBracket => "`}`",
+ Self::Whitespace => "whitespace",
+ Self::Comment => "comment",
+ Self::Newline => "newline",
+ Self::LiteralString => "literal string",
+ Self::BasicString => "basic string",
+ Self::MlLiteralString => "multi-line literal string",
+ Self::MlBasicString => "multi-line basic string",
+ Self::Atom => "token",
+ Self::Eof => "end-of-input",
+ }
+ }
+
+ pub fn encoding(&self) -> Option<Encoding> {
+ match self {
+ Self::LiteralString => Some(Encoding::LiteralString),
+ Self::BasicString => Some(Encoding::BasicString),
+ Self::MlLiteralString => Some(Encoding::MlLiteralString),
+ Self::MlBasicString => Some(Encoding::MlBasicString),
+ Self::Atom
+ | Self::LeftSquareBracket
+ | Self::RightSquareBracket
+ | Self::Dot
+ | Self::Equals
+ | Self::Comma
+ | Self::RightCurlyBracket
+ | Self::LeftCurlyBracket
+ | Self::Whitespace
+ | Self::Newline
+ | Self::Comment
+ | Self::Eof => None,
+ }
+ }
+}
diff --git a/third_party/rust/toml_parser/src/lib.rs b/third_party/rust/toml_parser/src/lib.rs
@@ -0,0 +1,49 @@
+//! TOML lexer and parser
+//!
+//! Characteristics:
+//! - Error recovery
+//! - Lazy validation
+//! - `forbid(unsafe)` by default, requiring the `unsafe` feature otherwise
+//! - `no_std` support, including putting users in charge of allocation choices (including not
+//! allocating)
+//!
+//! Full parsing is broken into three phases:
+//! 1. [Lexing tokens][lexer]
+//! 2. [Parsing tokens][parser] (push parser)
+//! 3. Organizing the physical layout into the logical layout,
+//! including [decoding keys and values][decoder]
+
+#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
+#![cfg_attr(docsrs, feature(doc_cfg))]
+#![cfg_attr(not(feature = "unsafe"), forbid(unsafe_code))]
+#![warn(clippy::std_instead_of_core)]
+#![warn(clippy::std_instead_of_alloc)]
+#![warn(clippy::print_stderr)]
+#![warn(clippy::print_stdout)]
+
+#[cfg(feature = "alloc")]
+extern crate alloc;
+
+#[macro_use]
+mod macros;
+
+#[cfg(feature = "debug")]
+pub(crate) mod debug;
+mod error;
+mod source;
+
+pub mod decoder;
+pub mod lexer;
+pub mod parser;
+
+pub use error::ErrorSink;
+pub use error::Expected;
+pub use error::ParseError;
+pub use source::Raw;
+pub use source::Source;
+pub use source::SourceIndex;
+pub use source::Span;
+
+#[doc = include_str!("../README.md")]
+#[cfg(doctest)]
+pub struct ReadmeDoctests;
diff --git a/third_party/rust/toml_parser/src/macros.rs b/third_party/rust/toml_parser/src/macros.rs
@@ -0,0 +1 @@
+
diff --git a/third_party/rust/toml_parser/src/parser/document.rs b/third_party/rust/toml_parser/src/parser/document.rs
@@ -0,0 +1,1641 @@
+use winnow::stream::Offset as _;
+use winnow::stream::Stream as _;
+use winnow::stream::TokenSlice;
+
+use super::EventReceiver;
+#[cfg(feature = "debug")]
+use crate::debug::DebugErrorSink;
+#[cfg(feature = "debug")]
+use crate::debug::DebugEventReceiver;
+use crate::decoder::Encoding;
+use crate::lexer::Token;
+use crate::lexer::TokenKind;
+use crate::ErrorSink;
+use crate::Expected;
+use crate::ParseError;
+
+/// Parse lexed tokens into [`Event`][super::Event]s
+pub fn parse_document(
+ tokens: &[Token],
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ let mut tokens = TokenSlice::new(tokens);
+ #[cfg(feature = "debug")]
+ let mut receiver = DebugEventReceiver::new(receiver);
+ #[cfg(feature = "debug")]
+ let receiver = &mut receiver;
+ #[cfg(feature = "debug")]
+ let mut error = DebugErrorSink::new(error);
+ #[cfg(feature = "debug")]
+ let error = &mut error;
+ document(&mut tokens, receiver, error);
+ eof(&mut tokens, receiver, error);
+}
+
+/// Parse lexed tokens into [`Event`][super::Event]s
+pub fn parse_key(tokens: &[Token], receiver: &mut dyn EventReceiver, error: &mut dyn ErrorSink) {
+ let mut tokens = TokenSlice::new(tokens);
+ #[cfg(feature = "debug")]
+ let mut receiver = DebugEventReceiver::new(receiver);
+ #[cfg(feature = "debug")]
+ let receiver = &mut receiver;
+ #[cfg(feature = "debug")]
+ let mut error = DebugErrorSink::new(error);
+ #[cfg(feature = "debug")]
+ let error = &mut error;
+ key(&mut tokens, "invalid key", receiver, error);
+ eof(&mut tokens, receiver, error);
+}
+
+/// Parse lexed tokens into [`Event`][super::Event]s
+pub fn parse_simple_key(
+ tokens: &[Token],
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ let mut tokens = TokenSlice::new(tokens);
+ #[cfg(feature = "debug")]
+ let mut receiver = DebugEventReceiver::new(receiver);
+ #[cfg(feature = "debug")]
+ let receiver = &mut receiver;
+ #[cfg(feature = "debug")]
+ let mut error = DebugErrorSink::new(error);
+ #[cfg(feature = "debug")]
+ let error = &mut error;
+ simple_key(&mut tokens, "invalid key", receiver, error);
+ eof(&mut tokens, receiver, error);
+}
+
+/// Parse lexed tokens into [`Event`][super::Event]s
+pub fn parse_value(tokens: &[Token], receiver: &mut dyn EventReceiver, error: &mut dyn ErrorSink) {
+ let mut tokens = TokenSlice::new(tokens);
+ #[cfg(feature = "debug")]
+ let mut receiver = DebugEventReceiver::new(receiver);
+ #[cfg(feature = "debug")]
+ let receiver = &mut receiver;
+ #[cfg(feature = "debug")]
+ let mut error = DebugErrorSink::new(error);
+ #[cfg(feature = "debug")]
+ let error = &mut error;
+ value(&mut tokens, receiver, error);
+ eof(&mut tokens, receiver, error);
+}
+
+type Stream<'i> = TokenSlice<'i, Token>;
+
+/// Parse a TOML Document
+///
+/// Only the order of [`Event`][super::Event]s is validated and not [`Event`][super::Event] content nor semantics like duplicate
+/// keys.
+///
+/// ```bnf
+/// toml = expression *( newline expression )
+///
+/// expression = ws [ comment ]
+/// expression =/ ws keyval ws [ comment ]
+/// expression =/ ws table ws [ comment ]
+///
+/// ;; Key-Value pairs
+///
+/// keyval = key keyval-sep val
+///
+/// key = simple-key / dotted-key
+/// simple-key = quoted-key / unquoted-key
+///
+/// quoted-key = basic-string / literal-string
+/// dotted-key = simple-key 1*( dot-sep simple-key )
+///
+/// dot-sep = ws %x2E ws ; . Period
+/// keyval-sep = ws %x3D ws ; =
+///
+/// val = string / boolean / array / inline-table / date-time / float / integer
+///
+/// ;; Array
+///
+/// array = array-open [ array-values ] ws-comment-newline array-close
+///
+/// array-open = %x5B ; [
+/// array-close = %x5D ; ]
+///
+/// array-values = ws-comment-newline val ws-comment-newline array-sep array-values
+/// array-values =/ ws-comment-newline val ws-comment-newline [ array-sep ]
+///
+/// array-sep = %x2C ; , Comma
+///
+/// ;; Table
+///
+/// table = std-table / array-table
+///
+/// ;; Standard Table
+///
+/// std-table = std-table-open key std-table-close
+///
+/// ;; Inline Table
+///
+/// inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close
+///
+/// inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ]
+///
+/// ;; Array Table
+///
+/// array-table = array-table-open key array-table-close
+/// ```
+fn document(tokens: &mut Stream<'_>, receiver: &mut dyn EventReceiver, error: &mut dyn ErrorSink) {
+ while let Some(current_token) = tokens.next_token() {
+ match current_token.kind() {
+ TokenKind::LeftSquareBracket => on_table(tokens, current_token, receiver, error),
+ TokenKind::RightSquareBracket => {
+ on_missing_std_table(tokens, current_token, receiver, error);
+ }
+ TokenKind::LiteralString => on_expression_key(
+ tokens,
+ current_token,
+ Some(Encoding::LiteralString),
+ receiver,
+ error,
+ ),
+ TokenKind::BasicString => on_expression_key(
+ tokens,
+ current_token,
+ Some(Encoding::BasicString),
+ receiver,
+ error,
+ ),
+ TokenKind::MlLiteralString => on_expression_key(
+ tokens,
+ current_token,
+ Some(Encoding::MlLiteralString),
+ receiver,
+ error,
+ ),
+ TokenKind::MlBasicString => on_expression_key(
+ tokens,
+ current_token,
+ Some(Encoding::MlBasicString),
+ receiver,
+ error,
+ ),
+ TokenKind::Atom => on_expression_key(tokens, current_token, None, receiver, error),
+ TokenKind::Equals => {
+ let fake_key = current_token.span().before();
+ let encoding = None;
+ receiver.simple_key(fake_key, encoding, error);
+ on_expression_key_val_sep(tokens, current_token, receiver, error);
+ }
+ TokenKind::Dot => {
+ on_expression_dot(tokens, current_token, receiver, error);
+ }
+ TokenKind::Comma | TokenKind::RightCurlyBracket | TokenKind::LeftCurlyBracket => {
+ on_missing_expression_key(tokens, current_token, receiver, error);
+ }
+ TokenKind::Whitespace => receiver.whitespace(current_token.span(), error),
+ TokenKind::Newline => receiver.newline(current_token.span(), error),
+ TokenKind::Comment => on_comment(tokens, current_token, receiver, error),
+ TokenKind::Eof => {
+ break;
+ }
+ }
+ }
+}
+
+/// Start a table from the open token
+///
+/// This eats to EOL
+///
+/// ```bnf
+/// ;; Table
+///
+/// table = std-table / array-table
+///
+/// ;; Standard Table
+///
+/// std-table = std-table-open key std-table-close
+///
+/// ;; Array Table
+///
+/// array-table = array-table-open key array-table-close
+/// ```
+fn on_table(
+ tokens: &mut Stream<'_>,
+ open_token: &Token,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ let is_array_table = if let Some(second_open_token) =
+ next_token_if(tokens, |k| matches!(k, TokenKind::LeftSquareBracket))
+ {
+ let span = open_token.span().append(second_open_token.span());
+ receiver.array_table_open(span, error);
+ true
+ } else {
+ let span = open_token.span();
+ receiver.std_table_open(span, error);
+ false
+ };
+
+ opt_whitespace(tokens, receiver, error);
+
+ let valid_key = key(tokens, "invalid table", receiver, error);
+
+ opt_whitespace(tokens, receiver, error);
+
+ let mut success = false;
+ if let Some(close_token) = next_token_if(tokens, |k| matches!(k, TokenKind::RightSquareBracket))
+ {
+ if is_array_table {
+ if let Some(second_close_token) =
+ next_token_if(tokens, |k| matches!(k, TokenKind::RightSquareBracket))
+ {
+ let span = close_token.span().append(second_close_token.span());
+ receiver.array_table_close(span, error);
+ success = true;
+ } else {
+ let context = open_token.span().append(close_token.span());
+ error.report_error(
+ ParseError::new("unclosed array table")
+ .with_context(context)
+ .with_expected(&[Expected::Literal("]")])
+ .with_unexpected(close_token.span().after()),
+ );
+ }
+ } else {
+ receiver.std_table_close(close_token.span(), error);
+ success = true;
+ }
+ } else if valid_key {
+ let last_key_token = tokens
+ .previous_tokens()
+ .find(|t| t.kind() != TokenKind::Whitespace)
+ .unwrap_or(open_token);
+ let context = open_token.span().append(last_key_token.span());
+ if is_array_table {
+ error.report_error(
+ ParseError::new("unclosed array table")
+ .with_context(context)
+ .with_expected(&[Expected::Literal("]]")])
+ .with_unexpected(last_key_token.span().after()),
+ );
+ } else {
+ error.report_error(
+ ParseError::new("unclosed table")
+ .with_context(context)
+ .with_expected(&[Expected::Literal("]")])
+ .with_unexpected(last_key_token.span().after()),
+ );
+ }
+ }
+
+ if success {
+ ws_comment_newline(tokens, receiver, error);
+ } else {
+ ignore_to_newline(tokens, receiver, error);
+ }
+}
+
+/// Parse a TOML key
+///
+/// ```bnf
+/// ;; Key-Value pairs
+///
+/// key = simple-key / dotted-key
+/// simple-key = quoted-key / unquoted-key
+///
+/// quoted-key = basic-string / literal-string
+/// dotted-key = simple-key 1*( dot-sep simple-key )
+///
+/// dot-sep = ws %x2E ws ; . Period
+/// ```
+fn key(
+ tokens: &mut Stream<'_>,
+ invalid_description: &'static str,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) -> bool {
+ while let Some(current_token) = tokens.next_token() {
+ let encoding = match current_token.kind() {
+ TokenKind::RightSquareBracket
+ | TokenKind::Comment
+ | TokenKind::Equals
+ | TokenKind::Comma
+ | TokenKind::LeftSquareBracket
+ | TokenKind::LeftCurlyBracket
+ | TokenKind::RightCurlyBracket
+ | TokenKind::Newline
+ | TokenKind::Eof => {
+ let fake_key = current_token.span().before();
+ let encoding = None;
+ receiver.simple_key(fake_key, encoding, error);
+ seek(tokens, -1);
+ return false;
+ }
+ TokenKind::Whitespace => {
+ receiver.whitespace(current_token.span(), error);
+ continue;
+ }
+ TokenKind::Dot => {
+ let fake_key = current_token.span().before();
+ let encoding = None;
+ receiver.simple_key(fake_key, encoding, error);
+ receiver.key_sep(current_token.span(), error);
+ continue;
+ }
+ TokenKind::LiteralString => Some(Encoding::LiteralString),
+ TokenKind::BasicString => Some(Encoding::BasicString),
+ TokenKind::MlLiteralString => Some(Encoding::MlLiteralString),
+ TokenKind::MlBasicString => Some(Encoding::MlBasicString),
+ TokenKind::Atom => None,
+ };
+ receiver.simple_key(current_token.span(), encoding, error);
+ return opt_dot_keys(tokens, receiver, error);
+ }
+
+ let previous_span = tokens
+ .previous_tokens()
+ .find(|t| {
+ !matches!(
+ t.kind(),
+ TokenKind::Whitespace | TokenKind::Comment | TokenKind::Newline | TokenKind::Eof
+ )
+ })
+ .map(|t| t.span())
+ .unwrap_or_default();
+ error.report_error(
+ ParseError::new(invalid_description)
+ .with_context(previous_span)
+ .with_expected(&[Expected::Description("key")])
+ .with_unexpected(previous_span.after()),
+ );
+ false
+}
+
+/// Start an expression from a key compatible token type
+///
+/// ```abnf
+/// expression = ws [ comment ]
+/// expression =/ ws keyval ws [ comment ]
+/// expression =/ ws table ws [ comment ]
+///
+/// ;; Key-Value pairs
+///
+/// keyval = key keyval-sep val
+/// ```
+fn on_expression_key<'i>(
+ tokens: &mut Stream<'i>,
+ key_token: &'i Token,
+ encoding: Option<Encoding>,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ receiver.simple_key(key_token.span(), encoding, error);
+ opt_dot_keys(tokens, receiver, error);
+
+ opt_whitespace(tokens, receiver, error);
+
+ let Some(eq_token) = next_token_if(tokens, |k| matches!(k, TokenKind::Equals)) else {
+ if let Some(peek_token) = tokens.first() {
+ let span = peek_token.span().before();
+ error.report_error(
+ ParseError::new("key with no value")
+ .with_context(span)
+ .with_expected(&[Expected::Literal("=")])
+ .with_unexpected(span),
+ );
+ }
+ ignore_to_newline(tokens, receiver, error);
+ return;
+ };
+ on_expression_key_val_sep(tokens, eq_token, receiver, error);
+}
+
+fn on_expression_dot<'i>(
+ tokens: &mut Stream<'i>,
+ dot_token: &'i Token,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ receiver.simple_key(dot_token.span().before(), None, error);
+ seek(tokens, -1);
+ opt_dot_keys(tokens, receiver, error);
+
+ opt_whitespace(tokens, receiver, error);
+
+ let Some(eq_token) = next_token_if(tokens, |k| matches!(k, TokenKind::Equals)) else {
+ if let Some(peek_token) = tokens.first() {
+ let span = peek_token.span().before();
+ error.report_error(
+ ParseError::new("missing value for key")
+ .with_context(span)
+ .with_expected(&[Expected::Literal("=")])
+ .with_unexpected(span),
+ );
+ }
+ ignore_to_newline(tokens, receiver, error);
+ return;
+ };
+ on_expression_key_val_sep(tokens, eq_token, receiver, error);
+}
+
+fn on_expression_key_val_sep<'i>(
+ tokens: &mut Stream<'i>,
+ eq_token: &'i Token,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ receiver.key_val_sep(eq_token.span(), error);
+
+ opt_whitespace(tokens, receiver, error);
+
+ value(tokens, receiver, error);
+
+ ws_comment_newline(tokens, receiver, error);
+}
+
+/// Parse a TOML simple key
+///
+/// ```bnf
+/// ;; Key-Value pairs
+///
+/// simple-key = quoted-key / unquoted-key
+///
+/// quoted-key = basic-string / literal-string
+/// ```
+fn simple_key(
+ tokens: &mut Stream<'_>,
+ invalid_description: &'static str,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ let Some(current_token) = tokens.next_token() else {
+ let previous_span = tokens
+ .previous_tokens()
+ .find(|t| {
+ !matches!(
+ t.kind(),
+ TokenKind::Whitespace
+ | TokenKind::Comment
+ | TokenKind::Newline
+ | TokenKind::Eof
+ )
+ })
+ .map(|t| t.span())
+ .unwrap_or_default();
+ error.report_error(
+ ParseError::new(invalid_description)
+ .with_context(previous_span)
+ .with_expected(&[Expected::Description("key")])
+ .with_unexpected(previous_span.after()),
+ );
+ return;
+ };
+
+ const EXPECTED_KEYS: [Expected; 3] = [
+ Expected::Description(Encoding::LiteralString.description()),
+ Expected::Description(Encoding::BasicString.description()),
+ Expected::Description(UNQUOTED_STRING),
+ ];
+
+ let kind = match current_token.kind() {
+ TokenKind::Dot
+ | TokenKind::RightSquareBracket
+ | TokenKind::Comment
+ | TokenKind::Equals
+ | TokenKind::Comma
+ | TokenKind::LeftSquareBracket
+ | TokenKind::LeftCurlyBracket
+ | TokenKind::RightCurlyBracket
+ | TokenKind::Newline
+ | TokenKind::Eof
+ | TokenKind::Whitespace => {
+ on_missing_key(tokens, current_token, invalid_description, receiver, error);
+ return;
+ }
+ TokenKind::LiteralString => Some(Encoding::LiteralString),
+ TokenKind::BasicString => Some(Encoding::BasicString),
+ TokenKind::MlLiteralString => {
+ error.report_error(
+ ParseError::new(invalid_description)
+ .with_context(current_token.span())
+ .with_expected(&EXPECTED_KEYS)
+ .with_unexpected(current_token.span()),
+ );
+ Some(Encoding::MlLiteralString)
+ }
+ TokenKind::MlBasicString => {
+ error.report_error(
+ ParseError::new(invalid_description)
+ .with_context(current_token.span())
+ .with_expected(&EXPECTED_KEYS)
+ .with_unexpected(current_token.span()),
+ );
+ Some(Encoding::MlBasicString)
+ }
+ TokenKind::Atom => None,
+ };
+ receiver.simple_key(current_token.span(), kind, error);
+}
+
+/// Start a key from the first key compatible token type
+///
+/// Returns the last key on success
+///
+/// This will swallow the trailing [`TokenKind::Whitespace`]
+///
+/// ```abnf
+/// key = simple-key / dotted-key
+/// simple-key = quoted-key / unquoted-key
+///
+/// quoted-key = basic-string / literal-string
+/// dotted-key = simple-key 1*( dot-sep simple-key )
+///
+/// dot-sep = ws %x2E ws ; . Period
+/// ```
+fn opt_dot_keys(
+ tokens: &mut Stream<'_>,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) -> bool {
+ opt_whitespace(tokens, receiver, error);
+
+ let mut success = true;
+ 'dot: while let Some(dot_token) = next_token_if(tokens, |k| matches!(k, TokenKind::Dot)) {
+ receiver.key_sep(dot_token.span(), error);
+
+ while let Some(current_token) = tokens.next_token() {
+ let kind = match current_token.kind() {
+ TokenKind::Equals
+ | TokenKind::Comma
+ | TokenKind::LeftSquareBracket
+ | TokenKind::RightSquareBracket
+ | TokenKind::LeftCurlyBracket
+ | TokenKind::RightCurlyBracket
+ | TokenKind::Comment
+ | TokenKind::Newline
+ | TokenKind::Eof => {
+ let fake_key = current_token.span().before();
+ let encoding = None;
+ receiver.simple_key(fake_key, encoding, error);
+ seek(tokens, -1);
+
+ success = false;
+ break 'dot;
+ }
+ TokenKind::Whitespace => {
+ receiver.whitespace(current_token.span(), error);
+ continue;
+ }
+ TokenKind::Dot => {
+ let fake_key = current_token.span().before();
+ let encoding = None;
+ receiver.simple_key(fake_key, encoding, error);
+ receiver.key_sep(current_token.span(), error);
+ continue;
+ }
+ TokenKind::LiteralString => Some(Encoding::LiteralString),
+ TokenKind::BasicString => Some(Encoding::BasicString),
+ TokenKind::MlLiteralString => Some(Encoding::MlLiteralString),
+ TokenKind::MlBasicString => Some(Encoding::MlBasicString),
+ TokenKind::Atom => None,
+ };
+ receiver.simple_key(current_token.span(), kind, error);
+ opt_whitespace(tokens, receiver, error);
+ continue 'dot;
+ }
+
+ let fake_key = dot_token.span().after();
+ let encoding = None;
+ receiver.simple_key(fake_key, encoding, error);
+ }
+
+ success
+}
+
+/// Parse a value
+///
+/// ```abnf
+/// val = string / boolean / array / inline-table / date-time / float / integer
+/// ```
+fn value(tokens: &mut Stream<'_>, receiver: &mut dyn EventReceiver, error: &mut dyn ErrorSink) {
+ let Some(current_token) = tokens.next_token() else {
+ let previous_span = tokens
+ .previous_tokens()
+ .find(|t| {
+ !matches!(
+ t.kind(),
+ TokenKind::Whitespace
+ | TokenKind::Comment
+ | TokenKind::Newline
+ | TokenKind::Eof
+ )
+ })
+ .map(|t| t.span())
+ .unwrap_or_default();
+ error.report_error(
+ ParseError::new("missing value")
+ .with_context(previous_span)
+ .with_expected(&[Expected::Description("value")])
+ .with_unexpected(previous_span.after()),
+ );
+ return;
+ };
+
+ match current_token.kind() {
+ TokenKind::Comment
+ | TokenKind::Comma
+ | TokenKind::Newline
+ | TokenKind::Eof
+ | TokenKind::Whitespace => {
+ let fake_key = current_token.span().before();
+ let encoding = None;
+ receiver.scalar(fake_key, encoding, error);
+ seek(tokens, -1);
+ }
+ TokenKind::Equals => {
+ error.report_error(
+ ParseError::new("extra `=`")
+ .with_context(current_token.span())
+ .with_expected(&[])
+ .with_unexpected(current_token.span()),
+ );
+ receiver.error(current_token.span(), error);
+ value(tokens, receiver, error);
+ }
+ TokenKind::LeftCurlyBracket => {
+ on_inline_table_open(tokens, current_token, receiver, error);
+ }
+ TokenKind::RightCurlyBracket => {
+ error.report_error(
+ ParseError::new("missing inline table opening")
+ .with_context(current_token.span())
+ .with_expected(&[Expected::Literal("{")])
+ .with_unexpected(current_token.span().before()),
+ );
+
+ let _ = receiver.inline_table_open(current_token.span().before(), error);
+ receiver.inline_table_close(current_token.span(), error);
+ }
+ TokenKind::LeftSquareBracket => {
+ on_array_open(tokens, current_token, receiver, error);
+ }
+ TokenKind::RightSquareBracket => {
+ error.report_error(
+ ParseError::new("missing array opening")
+ .with_context(current_token.span())
+ .with_expected(&[Expected::Literal("[")])
+ .with_unexpected(current_token.span().before()),
+ );
+
+ let _ = receiver.array_open(current_token.span().before(), error);
+ receiver.array_close(current_token.span(), error);
+ }
+ TokenKind::LiteralString
+ | TokenKind::BasicString
+ | TokenKind::MlLiteralString
+ | TokenKind::MlBasicString
+ | TokenKind::Dot
+ | TokenKind::Atom => {
+ on_scalar(tokens, current_token, receiver, error);
+ }
+ }
+}
+
+/// Parse a scalar value
+///
+/// ```abnf
+/// val = string / boolean / array / inline-table / date-time / float / integer
+/// ```
+fn on_scalar(
+ tokens: &mut Stream<'_>,
+ scalar: &Token,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ let mut span = scalar.span();
+ let encoding = match scalar.kind() {
+ TokenKind::Comment
+ | TokenKind::Comma
+ | TokenKind::Newline
+ | TokenKind::Eof
+ | TokenKind::Whitespace
+ | TokenKind::Equals
+ | TokenKind::LeftCurlyBracket
+ | TokenKind::RightCurlyBracket
+ | TokenKind::LeftSquareBracket
+ | TokenKind::RightSquareBracket => {
+ unreachable!()
+ }
+ TokenKind::LiteralString => Some(Encoding::LiteralString),
+ TokenKind::BasicString => Some(Encoding::BasicString),
+ TokenKind::MlLiteralString => Some(Encoding::MlLiteralString),
+ TokenKind::MlBasicString => Some(Encoding::MlBasicString),
+ TokenKind::Dot | TokenKind::Atom => {
+ while let Some(next_token) = tokens.first() {
+ match next_token.kind() {
+ TokenKind::Comment
+ | TokenKind::Comma
+ | TokenKind::Newline
+ | TokenKind::Eof
+ | TokenKind::Equals
+ | TokenKind::LeftCurlyBracket
+ | TokenKind::RightCurlyBracket
+ | TokenKind::LeftSquareBracket
+ | TokenKind::RightSquareBracket
+ | TokenKind::LiteralString
+ | TokenKind::BasicString
+ | TokenKind::MlLiteralString
+ | TokenKind::MlBasicString => {
+ break;
+ }
+ TokenKind::Whitespace => {
+ if let Some(second) = tokens.get(1) {
+ if second.kind() == TokenKind::Atom {
+ span = span.append(second.span());
+ let _ = tokens.next_slice(2);
+ continue;
+ }
+ }
+ break;
+ }
+ TokenKind::Dot | TokenKind::Atom => {
+ span = span.append(next_token.span());
+ let _ = tokens.next_token();
+ }
+ }
+ }
+ None
+ }
+ };
+ receiver.scalar(span, encoding, error);
+}
+
+/// Parse an array
+///
+/// ```abnf
+/// ;; Array
+///
+/// array = array-open [ array-values ] ws-comment-newline array-close
+///
+/// array-values = ws-comment-newline val ws-comment-newline array-sep array-values
+/// array-values =/ ws-comment-newline val ws-comment-newline [ array-sep ]
+/// ```
+fn on_array_open(
+ tokens: &mut Stream<'_>,
+ array_open: &Token,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ if !receiver.array_open(array_open.span(), error) {
+ ignore_to_value_close(tokens, TokenKind::RightSquareBracket, receiver, error);
+ return;
+ }
+
+ enum State {
+ NeedsValue,
+ NeedsComma,
+ }
+
+ let mut state = State::NeedsValue;
+ while let Some(current_token) = tokens.next_token() {
+ match current_token.kind() {
+ TokenKind::Comment => {
+ on_comment(tokens, current_token, receiver, error);
+ }
+ TokenKind::Whitespace => {
+ receiver.whitespace(current_token.span(), error);
+ }
+ TokenKind::Newline => {
+ receiver.newline(current_token.span(), error);
+ }
+ TokenKind::Eof => {
+ error.report_error(
+ ParseError::new("unclosed array")
+ .with_context(array_open.span())
+ .with_expected(&[Expected::Literal("]")])
+ .with_unexpected(current_token.span()),
+ );
+ receiver.array_close(current_token.span().before(), error);
+ return;
+ }
+ TokenKind::Comma => match state {
+ State::NeedsValue => {
+ error.report_error(
+ ParseError::new("extra comma in array")
+ .with_context(array_open.span())
+ .with_expected(&[Expected::Description("value")])
+ .with_unexpected(current_token.span()),
+ );
+ receiver.error(current_token.span(), error);
+ }
+ State::NeedsComma => {
+ receiver.value_sep(current_token.span(), error);
+
+ state = State::NeedsValue;
+ }
+ },
+ TokenKind::Equals => {
+ error.report_error(
+ ParseError::new("unexpected `=` in array")
+ .with_context(array_open.span())
+ .with_expected(&[Expected::Description("value"), Expected::Literal("]")])
+ .with_unexpected(current_token.span()),
+ );
+ receiver.error(current_token.span(), error);
+ }
+ TokenKind::LeftCurlyBracket => {
+ if !matches!(state, State::NeedsValue) {
+ error.report_error(
+ ParseError::new("missing comma between array elements")
+ .with_context(array_open.span())
+ .with_expected(&[Expected::Literal(",")])
+ .with_unexpected(current_token.span().before()),
+ );
+ receiver.value_sep(current_token.span().before(), error);
+ }
+
+ on_inline_table_open(tokens, current_token, receiver, error);
+
+ state = State::NeedsComma;
+ }
+ TokenKind::RightCurlyBracket => {
+ if !matches!(state, State::NeedsValue) {
+ error.report_error(
+ ParseError::new("missing comma between array elements")
+ .with_context(array_open.span())
+ .with_expected(&[Expected::Literal(",")])
+ .with_unexpected(current_token.span().before()),
+ );
+ receiver.value_sep(current_token.span().before(), error);
+ }
+
+ error.report_error(
+ ParseError::new("missing inline table opening")
+ .with_context(current_token.span())
+ .with_expected(&[Expected::Literal("{")])
+ .with_unexpected(current_token.span().before()),
+ );
+
+ let _ = receiver.inline_table_open(current_token.span().before(), error);
+ receiver.inline_table_close(current_token.span(), error);
+
+ state = State::NeedsComma;
+ }
+ TokenKind::LeftSquareBracket => {
+ if !matches!(state, State::NeedsValue) {
+ error.report_error(
+ ParseError::new("missing comma between array elements")
+ .with_context(array_open.span())
+ .with_expected(&[Expected::Literal(",")])
+ .with_unexpected(current_token.span().before()),
+ );
+ receiver.value_sep(current_token.span().before(), error);
+ }
+
+ on_array_open(tokens, current_token, receiver, error);
+
+ state = State::NeedsComma;
+ }
+ TokenKind::RightSquareBracket => {
+ receiver.array_close(current_token.span(), error);
+
+ return;
+ }
+ TokenKind::LiteralString
+ | TokenKind::BasicString
+ | TokenKind::MlLiteralString
+ | TokenKind::MlBasicString
+ | TokenKind::Dot
+ | TokenKind::Atom => {
+ if !matches!(state, State::NeedsValue) {
+ error.report_error(
+ ParseError::new("missing comma between array elements")
+ .with_context(array_open.span())
+ .with_expected(&[Expected::Literal(",")])
+ .with_unexpected(current_token.span().before()),
+ );
+ receiver.value_sep(current_token.span().before(), error);
+ }
+
+ on_scalar(tokens, current_token, receiver, error);
+
+ state = State::NeedsComma;
+ }
+ }
+ }
+
+ let previous_span = tokens
+ .previous_tokens()
+ .find(|t| {
+ !matches!(
+ t.kind(),
+ TokenKind::Whitespace | TokenKind::Comment | TokenKind::Newline | TokenKind::Eof
+ )
+ })
+ .map(|t| t.span())
+ .unwrap_or_default();
+ error.report_error(
+ ParseError::new("unclosed array")
+ .with_context(array_open.span())
+ .with_expected(&[Expected::Literal("]")])
+ .with_unexpected(previous_span.after()),
+ );
+ receiver.array_close(previous_span.after(), error);
+}
+
+/// Parse an inline table
+///
+/// ```abnf
+/// ;; Inline Table
+///
+/// inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close
+///
+/// inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ]
+/// ```
+fn on_inline_table_open(
+ tokens: &mut Stream<'_>,
+ inline_table_open: &Token,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ if !receiver.inline_table_open(inline_table_open.span(), error) {
+ ignore_to_value_close(tokens, TokenKind::RightCurlyBracket, receiver, error);
+ return;
+ }
+
+ #[allow(clippy::enum_variant_names)]
+ #[derive(Debug)]
+ enum State {
+ NeedsKey,
+ NeedsEquals,
+ NeedsValue,
+ NeedsComma,
+ }
+
+ impl State {
+ fn expected(&self) -> &'static [Expected] {
+ match self {
+ Self::NeedsKey => &[Expected::Description("key")],
+ Self::NeedsEquals => &[Expected::Literal("=")],
+ Self::NeedsValue => &[Expected::Description("value")],
+ Self::NeedsComma => &[Expected::Literal(",")],
+ }
+ }
+ }
+
+ let mut empty = true;
+ let mut state = State::NeedsKey;
+ while let Some(current_token) = tokens.next_token() {
+ match current_token.kind() {
+ TokenKind::Comment => {
+ error.report_error(
+ ParseError::new("comments are unsupported in inline tables")
+ .with_context(inline_table_open.span())
+ .with_expected(&[])
+ .with_unexpected(current_token.span()),
+ );
+
+ on_comment(tokens, current_token, receiver, error);
+ }
+ TokenKind::Whitespace => {
+ receiver.whitespace(current_token.span(), error);
+ }
+ TokenKind::Newline => {
+ error.report_error(
+ ParseError::new("newlines are unsupported in inline tables")
+ .with_context(inline_table_open.span())
+ .with_expected(&[])
+ .with_unexpected(current_token.span()),
+ );
+
+ receiver.newline(current_token.span(), error);
+ }
+ TokenKind::Eof => {
+ error.report_error(
+ ParseError::new("unclosed inline table")
+ .with_context(inline_table_open.span())
+ .with_expected(&[Expected::Literal("}")])
+ .with_unexpected(current_token.span()),
+ );
+
+ receiver.inline_table_close(current_token.span().before(), error);
+ return;
+ }
+ TokenKind::Comma => match state {
+ State::NeedsKey | State::NeedsEquals | State::NeedsValue => {
+ error.report_error(
+ ParseError::new("extra comma in inline table")
+ .with_context(inline_table_open.span())
+ .with_expected(state.expected())
+ .with_unexpected(current_token.span().before()),
+ );
+ receiver.error(current_token.span(), error);
+ }
+ State::NeedsComma => {
+ receiver.value_sep(current_token.span(), error);
+
+ state = State::NeedsKey;
+ }
+ },
+ TokenKind::Equals => match state {
+ State::NeedsKey => {
+ let fake_key = current_token.span().before();
+ let encoding = None;
+ receiver.simple_key(fake_key, encoding, error);
+
+ receiver.key_val_sep(current_token.span(), error);
+
+ empty = false;
+ state = State::NeedsValue;
+ }
+ State::NeedsEquals => {
+ receiver.key_val_sep(current_token.span(), error);
+
+ empty = false;
+ state = State::NeedsValue;
+ }
+ State::NeedsValue | State::NeedsComma => {
+ error.report_error(
+ ParseError::new("extra assignment between key-value pairs")
+ .with_context(inline_table_open.span())
+ .with_expected(state.expected())
+ .with_unexpected(current_token.span().before()),
+ );
+ receiver.error(current_token.span(), error);
+ }
+ },
+ TokenKind::LeftCurlyBracket => match state {
+ State::NeedsKey | State::NeedsComma => {
+ error.report_error(
+ ParseError::new("missing key for inline table element")
+ .with_context(inline_table_open.span())
+ .with_expected(state.expected())
+ .with_unexpected(current_token.span().before()),
+ );
+ receiver.error(current_token.span(), error);
+ ignore_to_value_close(tokens, TokenKind::RightCurlyBracket, receiver, error);
+ }
+ State::NeedsEquals => {
+ error.report_error(
+ ParseError::new("missing assignment between key-value pairs")
+ .with_context(inline_table_open.span())
+ .with_expected(state.expected())
+ .with_unexpected(current_token.span().before()),
+ );
+
+ on_inline_table_open(tokens, current_token, receiver, error);
+
+ empty = false;
+ state = State::NeedsComma;
+ }
+ State::NeedsValue => {
+ on_inline_table_open(tokens, current_token, receiver, error);
+
+ empty = false;
+ state = State::NeedsComma;
+ }
+ },
+ TokenKind::RightCurlyBracket => {
+ if !empty && !matches!(state, State::NeedsComma) {
+ let unexpected = tokens
+ .previous_tokens()
+ .find(|t| t.kind() == TokenKind::Comma)
+ .map(|t| t.span())
+ .unwrap_or_else(|| current_token.span().before());
+ error.report_error(
+ ParseError::new("trailing commas are not supported in inline tables")
+ .with_context(inline_table_open.span())
+ .with_expected(&[])
+ .with_unexpected(unexpected),
+ );
+ }
+ receiver.inline_table_close(current_token.span(), error);
+
+ return;
+ }
+ TokenKind::LeftSquareBracket => match state {
+ State::NeedsKey | State::NeedsComma => {
+ error.report_error(
+ ParseError::new("missing key for inline table element")
+ .with_context(inline_table_open.span())
+ .with_expected(state.expected())
+ .with_unexpected(current_token.span().before()),
+ );
+ receiver.error(current_token.span(), error);
+ ignore_to_value_close(tokens, TokenKind::RightSquareBracket, receiver, error);
+ }
+ State::NeedsEquals => {
+ error.report_error(
+ ParseError::new("missing assignment between key-value pairs")
+ .with_context(inline_table_open.span())
+ .with_expected(state.expected())
+ .with_unexpected(current_token.span().before()),
+ );
+
+ on_array_open(tokens, current_token, receiver, error);
+
+ empty = false;
+ state = State::NeedsComma;
+ }
+ State::NeedsValue => {
+ on_array_open(tokens, current_token, receiver, error);
+
+ empty = false;
+ state = State::NeedsComma;
+ }
+ },
+ TokenKind::RightSquareBracket => match state {
+ State::NeedsKey | State::NeedsEquals | State::NeedsComma => {
+ error.report_error(
+ ParseError::new("invalid inline table element")
+ .with_context(inline_table_open.span())
+ .with_expected(state.expected())
+ .with_unexpected(current_token.span().before()),
+ );
+ receiver.error(current_token.span(), error);
+ }
+ State::NeedsValue => {
+ error.report_error(
+ ParseError::new("missing array opening")
+ .with_context(current_token.span())
+ .with_expected(&[Expected::Literal("[")])
+ .with_unexpected(current_token.span().before()),
+ );
+
+ let _ = receiver.array_open(current_token.span().before(), error);
+ receiver.array_close(current_token.span(), error);
+
+ empty = false;
+ state = State::NeedsComma;
+ }
+ },
+ TokenKind::LiteralString
+ | TokenKind::BasicString
+ | TokenKind::MlLiteralString
+ | TokenKind::MlBasicString
+ | TokenKind::Dot
+ | TokenKind::Atom => match state {
+ State::NeedsKey => {
+ if current_token.kind() == TokenKind::Dot {
+ receiver.simple_key(
+ current_token.span().before(),
+ current_token.kind().encoding(),
+ error,
+ );
+ seek(tokens, -1);
+ opt_dot_keys(tokens, receiver, error);
+ empty = false;
+ state = State::NeedsEquals;
+ } else {
+ receiver.simple_key(
+ current_token.span(),
+ current_token.kind().encoding(),
+ error,
+ );
+ opt_dot_keys(tokens, receiver, error);
+ empty = false;
+ state = State::NeedsEquals;
+ }
+ }
+ State::NeedsEquals => {
+ error.report_error(
+ ParseError::new("missing assignment between key-value pairs")
+ .with_context(inline_table_open.span())
+ .with_expected(state.expected())
+ .with_unexpected(current_token.span().before()),
+ );
+
+ on_scalar(tokens, current_token, receiver, error);
+
+ empty = false;
+ state = State::NeedsComma;
+ }
+ State::NeedsValue => {
+ on_scalar(tokens, current_token, receiver, error);
+
+ empty = false;
+ state = State::NeedsComma;
+ }
+ State::NeedsComma => {
+ error.report_error(
+ ParseError::new("missing comma between key-value pairs")
+ .with_context(inline_table_open.span())
+ .with_expected(state.expected())
+ .with_unexpected(current_token.span().before()),
+ );
+
+ if current_token.kind() == TokenKind::Dot {
+ receiver.simple_key(
+ current_token.span().before(),
+ current_token.kind().encoding(),
+ error,
+ );
+ seek(tokens, -1);
+ opt_dot_keys(tokens, receiver, error);
+ empty = false;
+ state = State::NeedsEquals;
+ } else {
+ receiver.simple_key(
+ current_token.span(),
+ current_token.kind().encoding(),
+ error,
+ );
+ opt_dot_keys(tokens, receiver, error);
+ empty = false;
+ state = State::NeedsEquals;
+ }
+ }
+ },
+ }
+ }
+
+ let previous_span = tokens
+ .previous_tokens()
+ .find(|t| {
+ !matches!(
+ t.kind(),
+ TokenKind::Whitespace | TokenKind::Comment | TokenKind::Newline | TokenKind::Eof
+ )
+ })
+ .map(|t| t.span())
+ .unwrap_or_default();
+ error.report_error(
+ ParseError::new("unclosed inline table")
+ .with_context(inline_table_open.span())
+ .with_expected(&[Expected::Literal("}")])
+ .with_unexpected(previous_span.after()),
+ );
+ receiver.array_close(previous_span.after(), error);
+}
+
+/// Parse whitespace, if present
+///
+/// ```bnf
+/// ws = *wschar
+/// ```
+fn opt_whitespace(
+ tokens: &mut Stream<'_>,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ if let Some(ws_token) = next_token_if(tokens, |k| matches!(k, TokenKind::Whitespace)) {
+ receiver.whitespace(ws_token.span(), error);
+ }
+}
+
+/// Parse EOL decor, if present
+///
+/// ```bnf
+/// toml = expression *( newline expression )
+///
+/// expression = ws [ on_comment ]
+/// expression =/ ws keyval ws [ on_comment ]
+/// expression =/ ws table ws [ on_comment ]
+///
+/// ;; Whitespace
+///
+/// ws = *wschar
+/// wschar = %x20 ; Space
+/// wschar =/ %x09 ; Horizontal tab
+///
+/// ;; Newline
+///
+/// newline = %x0A ; LF
+/// newline =/ %x0D.0A ; CRLF
+///
+/// ;; Comment
+///
+/// comment = comment-start-symbol *non-eol
+/// ```
+fn ws_comment_newline(
+ tokens: &mut Stream<'_>,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ let mut first = None;
+ while let Some(current_token) = tokens.next_token() {
+ let first = first.get_or_insert(current_token.span());
+ match current_token.kind() {
+ TokenKind::Dot
+ | TokenKind::Equals
+ | TokenKind::Comma
+ | TokenKind::LeftSquareBracket
+ | TokenKind::RightSquareBracket
+ | TokenKind::LeftCurlyBracket
+ | TokenKind::RightCurlyBracket
+ | TokenKind::LiteralString
+ | TokenKind::BasicString
+ | TokenKind::MlLiteralString
+ | TokenKind::MlBasicString
+ | TokenKind::Atom => {
+ let context = first.append(current_token.span());
+ error.report_error(
+ ParseError::new("unexpected key or value")
+ .with_context(context)
+ .with_expected(&[Expected::Literal("\n"), Expected::Literal("#")])
+ .with_unexpected(current_token.span().before()),
+ );
+
+ receiver.error(current_token.span(), error);
+ ignore_to_newline(tokens, receiver, error);
+ break;
+ }
+ TokenKind::Comment => {
+ on_comment(tokens, current_token, receiver, error);
+ break;
+ }
+ TokenKind::Whitespace => {
+ receiver.whitespace(current_token.span(), error);
+ continue;
+ }
+ TokenKind::Newline => {
+ receiver.newline(current_token.span(), error);
+ break;
+ }
+ TokenKind::Eof => {
+ break;
+ }
+ }
+ }
+}
+
+/// Start EOL from [`TokenKind::Comment`]
+fn on_comment(
+ tokens: &mut Stream<'_>,
+ comment_token: &Token,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ receiver.comment(comment_token.span(), error);
+
+ let Some(current_token) = tokens.next_token() else {
+ return;
+ };
+ match current_token.kind() {
+ TokenKind::Dot
+ | TokenKind::Equals
+ | TokenKind::Comma
+ | TokenKind::LeftSquareBracket
+ | TokenKind::RightSquareBracket
+ | TokenKind::LeftCurlyBracket
+ | TokenKind::RightCurlyBracket
+ | TokenKind::Whitespace
+ | TokenKind::Comment
+ | TokenKind::LiteralString
+ | TokenKind::BasicString
+ | TokenKind::MlLiteralString
+ | TokenKind::MlBasicString
+ | TokenKind::Atom => {
+ let context = comment_token.span().append(current_token.span());
+ error.report_error(
+ ParseError::new("unexpected content between comment and newline")
+ .with_context(context)
+ .with_expected(&[Expected::Literal("\n")])
+ .with_unexpected(current_token.span().before()),
+ );
+
+ receiver.error(current_token.span(), error);
+ ignore_to_newline(tokens, receiver, error);
+ }
+ TokenKind::Newline => {
+ receiver.newline(current_token.span(), error);
+ }
+ TokenKind::Eof => {}
+ }
+}
+
+fn eof(tokens: &mut Stream<'_>, receiver: &mut dyn EventReceiver, error: &mut dyn ErrorSink) {
+ let Some(current_token) = tokens.next_token() else {
+ return;
+ };
+
+ match current_token.kind() {
+ TokenKind::Dot
+ | TokenKind::Equals
+ | TokenKind::Comma
+ | TokenKind::LeftSquareBracket
+ | TokenKind::RightSquareBracket
+ | TokenKind::LeftCurlyBracket
+ | TokenKind::RightCurlyBracket
+ | TokenKind::LiteralString
+ | TokenKind::BasicString
+ | TokenKind::MlLiteralString
+ | TokenKind::MlBasicString
+ | TokenKind::Atom
+ | TokenKind::Comment
+ | TokenKind::Whitespace
+ | TokenKind::Newline => {
+ error.report_error(
+ ParseError::new("unexpected content")
+ .with_context(current_token.span())
+ .with_expected(&[])
+ .with_unexpected(current_token.span().before()),
+ );
+
+ receiver.error(current_token.span(), error);
+ while let Some(current_token) = tokens.next_token() {
+ if current_token.kind() == TokenKind::Eof {
+ continue;
+ }
+ receiver.error(current_token.span(), error);
+ }
+ }
+ TokenKind::Eof => {}
+ }
+}
+
+// Don't bother recovering until [`TokenKind::Newline`]
+#[cold]
+fn ignore_to_newline(
+ tokens: &mut Stream<'_>,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ while let Some(current_token) = tokens.next_token() {
+ match current_token.kind() {
+ TokenKind::Dot
+ | TokenKind::Equals
+ | TokenKind::Comma
+ | TokenKind::LeftSquareBracket
+ | TokenKind::RightSquareBracket
+ | TokenKind::LeftCurlyBracket
+ | TokenKind::RightCurlyBracket
+ | TokenKind::LiteralString
+ | TokenKind::BasicString
+ | TokenKind::MlLiteralString
+ | TokenKind::MlBasicString
+ | TokenKind::Atom => {
+ receiver.error(current_token.span(), error);
+ }
+ TokenKind::Comment => {
+ on_comment(tokens, current_token, receiver, error);
+ break;
+ }
+ TokenKind::Whitespace => {
+ receiver.whitespace(current_token.span(), error);
+ }
+ TokenKind::Newline => {
+ receiver.newline(current_token.span(), error);
+ break;
+ }
+ TokenKind::Eof => {
+ break;
+ }
+ }
+ }
+}
+
+/// Don't bother recovering until the matching [`TokenKind`]
+///
+/// Attempts to ignore nested `[]`, `{}`.
+#[cold]
+fn ignore_to_value_close(
+ tokens: &mut Stream<'_>,
+ closing_kind: TokenKind,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ let mut array_count: usize = 0;
+ let mut inline_table_count: usize = 0;
+ while let Some(current_token) = tokens.next_token() {
+ match current_token.kind() {
+ TokenKind::Dot
+ | TokenKind::Equals
+ | TokenKind::Comma
+ | TokenKind::LiteralString
+ | TokenKind::BasicString
+ | TokenKind::MlLiteralString
+ | TokenKind::MlBasicString
+ | TokenKind::Atom => {
+ receiver.error(current_token.span(), error);
+ }
+ TokenKind::Comment => {
+ on_comment(tokens, current_token, receiver, error);
+ }
+ TokenKind::Whitespace => {
+ receiver.whitespace(current_token.span(), error);
+ }
+ TokenKind::Newline => {
+ receiver.newline(current_token.span(), error);
+ }
+ TokenKind::LeftSquareBracket => {
+ receiver.error(current_token.span(), error);
+ array_count += 1;
+ }
+ TokenKind::RightSquareBracket => {
+ if array_count == 0 && current_token.kind() == closing_kind {
+ receiver.array_close(current_token.span(), error);
+ break;
+ } else {
+ receiver.error(current_token.span(), error);
+ array_count = array_count.saturating_sub(1);
+ }
+ }
+ TokenKind::LeftCurlyBracket => {
+ receiver.error(current_token.span(), error);
+ inline_table_count += 1;
+ }
+ TokenKind::RightCurlyBracket => {
+ if inline_table_count == 0 && current_token.kind() == closing_kind {
+ receiver.inline_table_close(current_token.span(), error);
+ break;
+ } else {
+ receiver.error(current_token.span(), error);
+ inline_table_count = inline_table_count.saturating_sub(1);
+ }
+ }
+ TokenKind::Eof => {
+ break;
+ }
+ }
+ }
+}
+
+#[cold]
+fn on_missing_key(
+ tokens: &mut Stream<'_>,
+ token: &Token,
+ invalid_description: &'static str,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ error.report_error(
+ ParseError::new(invalid_description)
+ .with_context(token.span())
+ .with_expected(&[Expected::Description("key")])
+ .with_unexpected(token.span().before()),
+ );
+
+ if token.kind() == TokenKind::Eof {
+ } else if token.kind() == TokenKind::Newline {
+ receiver.newline(token.span(), error);
+ } else if token.kind() == TokenKind::Comment {
+ on_comment(tokens, token, receiver, error);
+ } else {
+ receiver.error(token.span(), error);
+ }
+}
+
+#[cold]
+fn on_missing_expression_key(
+ tokens: &mut Stream<'_>,
+ token: &Token,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ error.report_error(
+ ParseError::new("invalid key-value pair")
+ .with_context(token.span())
+ .with_expected(&[Expected::Description("key")])
+ .with_unexpected(token.span().before()),
+ );
+
+ receiver.error(token.span(), error);
+ ignore_to_newline(tokens, receiver, error);
+}
+
+#[cold]
+fn on_missing_std_table(
+ tokens: &mut Stream<'_>,
+ token: &Token,
+ receiver: &mut dyn EventReceiver,
+ error: &mut dyn ErrorSink,
+) {
+ error.report_error(
+ ParseError::new("missing table open")
+ .with_context(token.span())
+ .with_expected(&[Expected::Literal("[")])
+ .with_unexpected(token.span().before()),
+ );
+
+ receiver.error(token.span(), error);
+ ignore_to_newline(tokens, receiver, error);
+}
+
+fn next_token_if<'i, F: Fn(TokenKind) -> bool>(
+ tokens: &mut Stream<'i>,
+ pred: F,
+) -> Option<&'i Token> {
+ match tokens.first() {
+ Some(next) if pred(next.kind()) => tokens.next_token(),
+ _ => None,
+ }
+}
+
+fn seek(stream: &mut Stream<'_>, offset: isize) {
+ let current = stream.checkpoint();
+ stream.reset_to_start();
+ let start = stream.checkpoint();
+ let old_offset = current.offset_from(&start);
+ let new_offset = (old_offset as isize).saturating_add(offset) as usize;
+ if new_offset < stream.eof_offset() {
+ #[cfg(feature = "unsafe")] // SAFETY: bounds were checked
+ unsafe {
+ stream.next_slice_unchecked(new_offset)
+ };
+ #[cfg(not(feature = "unsafe"))]
+ stream.next_slice(new_offset);
+ } else {
+ stream.finish();
+ }
+}
+
+const UNQUOTED_STRING: &str = "unquoted string";
diff --git a/third_party/rust/toml_parser/src/parser/event.rs b/third_party/rust/toml_parser/src/parser/event.rs
@@ -0,0 +1,543 @@
+use crate::decoder::Encoding;
+use crate::ErrorSink;
+use crate::ParseError;
+use crate::Source;
+use crate::Span;
+
+pub trait EventReceiver {
+ fn std_table_open(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ fn std_table_close(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ fn array_table_open(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ fn array_table_close(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ /// Returns if entering the inline table is allowed
+ #[must_use]
+ fn inline_table_open(&mut self, _span: Span, _error: &mut dyn ErrorSink) -> bool {
+ true
+ }
+ fn inline_table_close(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ /// Returns if entering the array is allowed
+ #[must_use]
+ fn array_open(&mut self, _span: Span, _error: &mut dyn ErrorSink) -> bool {
+ true
+ }
+ fn array_close(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ fn simple_key(&mut self, _span: Span, _kind: Option<Encoding>, _error: &mut dyn ErrorSink) {}
+ fn key_sep(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ fn key_val_sep(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ fn scalar(&mut self, _span: Span, _kind: Option<Encoding>, _error: &mut dyn ErrorSink) {}
+ fn value_sep(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ fn whitespace(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ fn comment(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ fn newline(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+ fn error(&mut self, _span: Span, _error: &mut dyn ErrorSink) {}
+}
+
+impl<F> EventReceiver for F
+where
+ F: FnMut(Event),
+{
+ fn std_table_open(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::StdTableOpen,
+ encoding: None,
+ span,
+ });
+ }
+ fn std_table_close(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::StdTableClose,
+ encoding: None,
+ span,
+ });
+ }
+ fn array_table_open(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::ArrayTableOpen,
+ encoding: None,
+ span,
+ });
+ }
+ fn array_table_close(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::ArrayTableClose,
+ encoding: None,
+ span,
+ });
+ }
+ fn inline_table_open(&mut self, span: Span, _error: &mut dyn ErrorSink) -> bool {
+ (self)(Event {
+ kind: EventKind::InlineTableOpen,
+ encoding: None,
+ span,
+ });
+ true
+ }
+ fn inline_table_close(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::InlineTableClose,
+ encoding: None,
+ span,
+ });
+ }
+ fn array_open(&mut self, span: Span, _error: &mut dyn ErrorSink) -> bool {
+ (self)(Event {
+ kind: EventKind::ArrayOpen,
+ encoding: None,
+ span,
+ });
+ true
+ }
+ fn array_close(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::ArrayClose,
+ encoding: None,
+ span,
+ });
+ }
+ fn simple_key(&mut self, span: Span, encoding: Option<Encoding>, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::SimpleKey,
+ encoding,
+ span,
+ });
+ }
+ fn key_sep(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::KeySep,
+ encoding: None,
+ span,
+ });
+ }
+ fn key_val_sep(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::KeyValSep,
+ encoding: None,
+ span,
+ });
+ }
+ fn scalar(&mut self, span: Span, encoding: Option<Encoding>, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::Scalar,
+ encoding,
+ span,
+ });
+ }
+ fn value_sep(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::ValueSep,
+ encoding: None,
+ span,
+ });
+ }
+ fn whitespace(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::Whitespace,
+ encoding: None,
+ span,
+ });
+ }
+ fn comment(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::Comment,
+ encoding: None,
+ span,
+ });
+ }
+ fn newline(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::Newline,
+ encoding: None,
+ span,
+ });
+ }
+ fn error(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ (self)(Event {
+ kind: EventKind::Error,
+ encoding: None,
+ span,
+ });
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[allow(unused_qualifications)]
+impl EventReceiver for alloc::vec::Vec<Event> {
+ fn std_table_open(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::StdTableOpen,
+ encoding: None,
+ span,
+ });
+ }
+ fn std_table_close(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::StdTableClose,
+ encoding: None,
+ span,
+ });
+ }
+ fn array_table_open(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::ArrayTableOpen,
+ encoding: None,
+ span,
+ });
+ }
+ fn array_table_close(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::ArrayTableClose,
+ encoding: None,
+ span,
+ });
+ }
+ fn inline_table_open(&mut self, span: Span, _error: &mut dyn ErrorSink) -> bool {
+ self.push(Event {
+ kind: EventKind::InlineTableOpen,
+ encoding: None,
+ span,
+ });
+ true
+ }
+ fn inline_table_close(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::InlineTableClose,
+ encoding: None,
+ span,
+ });
+ }
+ fn array_open(&mut self, span: Span, _error: &mut dyn ErrorSink) -> bool {
+ self.push(Event {
+ kind: EventKind::ArrayOpen,
+ encoding: None,
+ span,
+ });
+ true
+ }
+ fn array_close(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::ArrayClose,
+ encoding: None,
+ span,
+ });
+ }
+ fn simple_key(&mut self, span: Span, encoding: Option<Encoding>, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::SimpleKey,
+ encoding,
+ span,
+ });
+ }
+ fn key_sep(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::KeySep,
+ encoding: None,
+ span,
+ });
+ }
+ fn key_val_sep(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::KeyValSep,
+ encoding: None,
+ span,
+ });
+ }
+ fn scalar(&mut self, span: Span, encoding: Option<Encoding>, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::Scalar,
+ encoding,
+ span,
+ });
+ }
+ fn value_sep(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::ValueSep,
+ encoding: None,
+ span,
+ });
+ }
+ fn whitespace(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::Whitespace,
+ encoding: None,
+ span,
+ });
+ }
+ fn comment(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::Comment,
+ encoding: None,
+ span,
+ });
+ }
+ fn newline(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::Newline,
+ encoding: None,
+ span,
+ });
+ }
+ fn error(&mut self, span: Span, _error: &mut dyn ErrorSink) {
+ self.push(Event {
+ kind: EventKind::Error,
+ encoding: None,
+ span,
+ });
+ }
+}
+
+impl EventReceiver for () {}
+
+/// Centralize validation for all whitespace-like content
+pub struct ValidateWhitespace<'r, 's> {
+ receiver: &'r mut dyn EventReceiver,
+ source: Source<'s>,
+}
+
+impl<'r, 's> ValidateWhitespace<'r, 's> {
+ pub fn new(receiver: &'r mut dyn EventReceiver, source: Source<'s>) -> Self {
+ Self { receiver, source }
+ }
+}
+
+impl EventReceiver for ValidateWhitespace<'_, '_> {
+ fn std_table_open(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.std_table_open(span, error);
+ }
+ fn std_table_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.std_table_close(span, error);
+ }
+ fn array_table_open(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.array_table_open(span, error);
+ }
+ fn array_table_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.array_table_close(span, error);
+ }
+ fn inline_table_open(&mut self, span: Span, error: &mut dyn ErrorSink) -> bool {
+ self.receiver.inline_table_open(span, error)
+ }
+ fn inline_table_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.inline_table_close(span, error);
+ }
+ fn array_open(&mut self, span: Span, error: &mut dyn ErrorSink) -> bool {
+ self.receiver.array_open(span, error)
+ }
+ fn array_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.array_close(span, error);
+ }
+ fn simple_key(&mut self, span: Span, encoding: Option<Encoding>, error: &mut dyn ErrorSink) {
+ self.receiver.simple_key(span, encoding, error);
+ }
+ fn key_sep(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.key_sep(span, error);
+ }
+ fn key_val_sep(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.key_val_sep(span, error);
+ }
+ fn scalar(&mut self, span: Span, encoding: Option<Encoding>, error: &mut dyn ErrorSink) {
+ self.receiver.scalar(span, encoding, error);
+ }
+ fn value_sep(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.value_sep(span, error);
+ }
+ fn whitespace(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ #[cfg(feature = "unsafe")] // SAFETY: callers must use valid span
+ let raw = unsafe { self.source.get_unchecked(span) };
+ #[cfg(not(feature = "unsafe"))]
+ let raw = self.source.get(span).expect("token spans are valid");
+ raw.decode_whitespace(error);
+
+ self.receiver.whitespace(span, error);
+ }
+ fn comment(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ #[cfg(feature = "unsafe")] // SAFETY: callers must use valid span
+ let raw = unsafe { self.source.get_unchecked(span) };
+ #[cfg(not(feature = "unsafe"))]
+ let raw = self.source.get(span).expect("token spans are valid");
+ raw.decode_comment(error);
+
+ self.receiver.comment(span, error);
+ }
+ fn newline(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ #[cfg(feature = "unsafe")] // SAFETY: callers must use valid span
+ let raw = unsafe { self.source.get_unchecked(span) };
+ #[cfg(not(feature = "unsafe"))]
+ let raw = self.source.get(span).expect("token spans are valid");
+ raw.decode_newline(error);
+
+ self.receiver.newline(span, error);
+ }
+ fn error(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.error(span, error);
+ }
+}
+
+pub struct RecursionGuard<'r> {
+ receiver: &'r mut dyn EventReceiver,
+ max_depth: u32,
+ depth: i64,
+}
+
+impl<'r> RecursionGuard<'r> {
+ pub fn new(receiver: &'r mut dyn EventReceiver, max_depth: u32) -> Self {
+ Self {
+ receiver,
+ max_depth,
+ depth: 0,
+ }
+ }
+
+ fn within_depth(&self) -> bool {
+ self.depth <= self.max_depth as i64
+ }
+}
+
+impl EventReceiver for RecursionGuard<'_> {
+ fn std_table_open(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.std_table_open(span, error);
+ }
+ fn std_table_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.std_table_close(span, error);
+ }
+ fn array_table_open(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.array_table_open(span, error);
+ }
+ fn array_table_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.array_table_close(span, error);
+ }
+ fn inline_table_open(&mut self, span: Span, error: &mut dyn ErrorSink) -> bool {
+ let allowed = self.receiver.inline_table_open(span, error);
+ self.depth += 1;
+ let within_depth = self.within_depth();
+ if allowed && !within_depth {
+ error.report_error(
+ ParseError::new("cannot recurse further; max recursion depth met")
+ .with_unexpected(span),
+ );
+ }
+ allowed && within_depth
+ }
+ fn inline_table_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.depth -= 1;
+ self.receiver.inline_table_close(span, error);
+ }
+ fn array_open(&mut self, span: Span, error: &mut dyn ErrorSink) -> bool {
+ let allowed = self.receiver.array_open(span, error);
+ self.depth += 1;
+ let within_depth = self.within_depth();
+ if allowed && !within_depth {
+ error.report_error(
+ ParseError::new("cannot recurse further; max recursion depth met")
+ .with_unexpected(span),
+ );
+ }
+ allowed && within_depth
+ }
+ fn array_close(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.depth -= 1;
+ self.receiver.array_close(span, error);
+ }
+ fn simple_key(&mut self, span: Span, encoding: Option<Encoding>, error: &mut dyn ErrorSink) {
+ self.receiver.simple_key(span, encoding, error);
+ }
+ fn key_sep(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.key_sep(span, error);
+ }
+ fn key_val_sep(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.key_val_sep(span, error);
+ }
+ fn scalar(&mut self, span: Span, encoding: Option<Encoding>, error: &mut dyn ErrorSink) {
+ self.receiver.scalar(span, encoding, error);
+ }
+ fn value_sep(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.value_sep(span, error);
+ }
+ fn whitespace(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.whitespace(span, error);
+ }
+ fn comment(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.comment(span, error);
+ }
+ fn newline(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.newline(span, error);
+ }
+ fn error(&mut self, span: Span, error: &mut dyn ErrorSink) {
+ self.receiver.error(span, error);
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+pub struct Event {
+ kind: EventKind,
+ encoding: Option<Encoding>,
+ span: Span,
+}
+
+impl Event {
+ pub fn new_unchecked(kind: EventKind, encoding: Option<Encoding>, span: Span) -> Self {
+ Self {
+ kind,
+ encoding,
+ span,
+ }
+ }
+
+ #[inline(always)]
+ pub fn kind(&self) -> EventKind {
+ self.kind
+ }
+
+ #[inline(always)]
+ pub fn encoding(&self) -> Option<Encoding> {
+ self.encoding
+ }
+
+ #[inline(always)]
+ pub fn span(&self) -> Span {
+ self.span
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+pub enum EventKind {
+ StdTableOpen,
+ StdTableClose,
+ ArrayTableOpen,
+ ArrayTableClose,
+ InlineTableOpen,
+ InlineTableClose,
+ ArrayOpen,
+ ArrayClose,
+ SimpleKey,
+ KeySep,
+ KeyValSep,
+ Scalar,
+ ValueSep,
+ Whitespace,
+ Comment,
+ Newline,
+ Error,
+}
+
+impl EventKind {
+ pub const fn description(&self) -> &'static str {
+ match self {
+ Self::StdTableOpen => "std-table open",
+ Self::StdTableClose => "std-table close",
+ Self::ArrayTableOpen => "array-table open",
+ Self::ArrayTableClose => "array-table close",
+ Self::InlineTableOpen => "inline-table open",
+ Self::InlineTableClose => "inline-table close",
+ Self::ArrayOpen => "array open",
+ Self::ArrayClose => "array close",
+ Self::SimpleKey => "key",
+ Self::KeySep => "key separator",
+ Self::KeyValSep => "key-value separator",
+ Self::Scalar => "value",
+ Self::ValueSep => "value separator",
+ Self::Whitespace => "whitespace",
+ Self::Comment => "comment",
+ Self::Newline => "newline",
+ Self::Error => "error",
+ }
+ }
+}
diff --git a/third_party/rust/toml_parser/src/parser/mod.rs b/third_party/rust/toml_parser/src/parser/mod.rs
@@ -0,0 +1,16 @@
+//! A TOML push [parser][parse_document]
+//!
+//! This takes TOML [tokens][crate::lexer::Token] and [emits][EventReceiver] [events][Event].
+
+mod document;
+mod event;
+
+pub use document::parse_document;
+pub use document::parse_key;
+pub use document::parse_simple_key;
+pub use document::parse_value;
+pub use event::Event;
+pub use event::EventKind;
+pub use event::EventReceiver;
+pub use event::RecursionGuard;
+pub use event::ValidateWhitespace;
diff --git a/third_party/rust/toml_parser/src/source.rs b/third_party/rust/toml_parser/src/source.rs
@@ -0,0 +1,394 @@
+use crate::decoder::Encoding;
+use crate::decoder::StringBuilder;
+use crate::lexer::Lexer;
+use crate::ErrorSink;
+use crate::Expected;
+
+/// Data encoded as TOML
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct Source<'i> {
+ input: &'i str,
+}
+
+impl<'i> Source<'i> {
+ pub fn new(input: &'i str) -> Self {
+ Self { input }
+ }
+
+ /// Start lexing the TOML encoded data
+ pub fn lex(&self) -> Lexer<'i> {
+ Lexer::new(self.input)
+ }
+
+ /// Access the TOML encoded `&str`
+ pub fn input(&self) -> &'i str {
+ self.input
+ }
+
+ /// Return a subslice of the input
+ pub fn get(&self, span: impl SourceIndex) -> Option<Raw<'i>> {
+ span.get(self)
+ }
+
+ /// Return an unchecked subslice of the input
+ ///
+ /// ## Safety
+ ///
+ /// Callers of this function are responsible that these preconditions are satisfied:
+ /// - The starting index must not exceed the ending index;
+ /// - Indexes must be within bounds of the original slice;
+ /// - Indexes must lie on UTF-8 sequence boundaries.
+ ///
+ /// Or one of:
+ /// - `span` came from [`Source::lex`]
+ ///
+ /// Failing any of those, the returned string slice may reference invalid memory or violate the invariants communicated by `str` type.
+ #[cfg(feature = "unsafe")]
+ pub unsafe fn get_unchecked(&self, span: impl SourceIndex) -> Raw<'i> {
+ // SAFETY: Same safety guarantees are required
+ unsafe { span.get_unchecked(self) }
+ }
+
+ /// Return a subslice of the input
+ fn get_raw_str(&self, span: Span) -> Option<&'i str> {
+ let index = span.start()..span.end();
+ self.input.get(index)
+ }
+
+ /// Return an unchecked subslice of the input
+ ///
+ /// ## Safety
+ ///
+ /// Callers of this function are responsible that these preconditions are satisfied:
+ /// - The starting index must not exceed the ending index;
+ /// - Indexes must be within bounds of the original slice;
+ /// - Indexes must lie on UTF-8 sequence boundaries.
+ ///
+ /// Or one of:
+ /// - `span` came from [`Source::lex`]
+ ///
+ /// Failing any of those, the returned string slice may reference invalid memory or violate the invariants communicated by `str` type.
+ #[cfg(feature = "unsafe")]
+ unsafe fn get_raw_str_unchecked(&self, span: Span) -> &'i str {
+ let index = span.start()..span.end();
+ // SAFETY: Same safety guarantees are required
+ unsafe { self.input.get_unchecked(index) }
+ }
+}
+
+/// A slice of [`Source`]
+#[derive(Copy, Clone, Debug)]
+pub struct Raw<'i> {
+ raw: &'i str,
+ encoding: Option<Encoding>,
+ span: Span,
+}
+
+impl<'i> Raw<'i> {
+ pub fn new_unchecked(raw: &'i str, encoding: Option<Encoding>, span: Span) -> Self {
+ Self {
+ raw,
+ encoding,
+ span,
+ }
+ }
+
+ pub fn decode_key(&self, output: &mut dyn StringBuilder<'i>, error: &mut dyn ErrorSink) {
+ let mut error = |err: crate::ParseError| {
+ error.report_error(err.rebase_spans(self.span.start));
+ };
+ match self.encoding {
+ Some(Encoding::LiteralString) => {
+ crate::decoder::string::decode_literal_string(*self, output, &mut error);
+ }
+ Some(Encoding::BasicString) => {
+ crate::decoder::string::decode_basic_string(*self, output, &mut error);
+ }
+ Some(Encoding::MlLiteralString) => {
+ error.report_error(
+ crate::ParseError::new("keys cannot be multi-line literal strings")
+ .with_expected(&[
+ Expected::Description("basic string"),
+ Expected::Description("literal string"),
+ ])
+ .with_unexpected(Span::new_unchecked(0, self.len())),
+ );
+ crate::decoder::string::decode_ml_literal_string(*self, output, &mut error);
+ }
+ Some(Encoding::MlBasicString) => {
+ error.report_error(
+ crate::ParseError::new("keys cannot be multi-line basic strings")
+ .with_expected(&[
+ Expected::Description("basic string"),
+ Expected::Description("literal string"),
+ ])
+ .with_unexpected(Span::new_unchecked(0, self.len())),
+ );
+ crate::decoder::string::decode_ml_basic_string(*self, output, &mut error);
+ }
+ None => crate::decoder::string::decode_unquoted_key(*self, output, &mut error),
+ }
+ }
+
+ #[must_use]
+ pub fn decode_scalar(
+ &self,
+ output: &mut dyn StringBuilder<'i>,
+ error: &mut dyn ErrorSink,
+ ) -> crate::decoder::scalar::ScalarKind {
+ let mut error = |err: crate::ParseError| {
+ error.report_error(err.rebase_spans(self.span.start));
+ };
+ match self.encoding {
+ Some(Encoding::LiteralString) => {
+ crate::decoder::string::decode_literal_string(*self, output, &mut error);
+ crate::decoder::scalar::ScalarKind::String
+ }
+ Some(Encoding::BasicString) => {
+ crate::decoder::string::decode_basic_string(*self, output, &mut error);
+ crate::decoder::scalar::ScalarKind::String
+ }
+ Some(Encoding::MlLiteralString) => {
+ crate::decoder::string::decode_ml_literal_string(*self, output, &mut error);
+ crate::decoder::scalar::ScalarKind::String
+ }
+ Some(Encoding::MlBasicString) => {
+ crate::decoder::string::decode_ml_basic_string(*self, output, &mut error);
+ crate::decoder::scalar::ScalarKind::String
+ }
+ None => crate::decoder::scalar::decode_unquoted_scalar(*self, output, &mut error),
+ }
+ }
+
+ pub fn decode_whitespace(&self, _error: &mut dyn ErrorSink) {
+ // whitespace is always valid
+ }
+
+ pub fn decode_comment(&self, error: &mut dyn ErrorSink) {
+ let mut error = |err: crate::ParseError| {
+ error.report_error(err.rebase_spans(self.span.start));
+ };
+ crate::decoder::ws::decode_comment(*self, &mut error);
+ }
+
+ pub fn decode_newline(&self, error: &mut dyn ErrorSink) {
+ let mut error = |err: crate::ParseError| {
+ error.report_error(err.rebase_spans(self.span.start));
+ };
+ crate::decoder::ws::decode_newline(*self, &mut error);
+ }
+
+ pub fn as_str(&self) -> &'i str {
+ self.raw
+ }
+
+ pub fn as_bytes(&self) -> &'i [u8] {
+ self.raw.as_bytes()
+ }
+
+ pub fn len(&self) -> usize {
+ self.raw.len()
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.raw.is_empty()
+ }
+}
+
+/// Location within the [`Source`]
+#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct Span {
+ start: usize,
+ end: usize,
+}
+
+impl Span {
+ pub fn new_unchecked(start: usize, end: usize) -> Self {
+ Self { start, end }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.end <= self.start
+ }
+
+ pub fn len(&self) -> usize {
+ self.end - self.start
+ }
+
+ pub fn start(&self) -> usize {
+ self.start
+ }
+
+ pub fn end(&self) -> usize {
+ self.end
+ }
+
+ pub fn before(&self) -> Self {
+ Self::new_unchecked(self.start, self.start)
+ }
+
+ pub fn after(&self) -> Self {
+ Self::new_unchecked(self.end, self.end)
+ }
+
+ /// Extend this `Raw` to the end of `after`
+ #[must_use]
+ pub fn append(&self, after: Self) -> Self {
+ Self::new_unchecked(self.start, after.end)
+ }
+}
+
+impl core::fmt::Debug for Span {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ (self.start..self.end).fmt(f)
+ }
+}
+
+impl core::ops::Add<usize> for Span {
+ type Output = Self;
+
+ fn add(self, offset: usize) -> Self::Output {
+ Self::Output {
+ start: self.start + offset,
+ end: self.end + offset,
+ }
+ }
+}
+
+impl core::ops::Add<Span> for usize {
+ type Output = Span;
+
+ fn add(self, span: Span) -> Self::Output {
+ Self::Output {
+ start: span.start + self,
+ end: span.end + self,
+ }
+ }
+}
+
+impl core::ops::AddAssign<usize> for Span {
+ fn add_assign(&mut self, rhs: usize) {
+ self.start += rhs;
+ self.end += rhs;
+ }
+}
+
+/// A helper trait used for indexing operations on [`Source`]
+pub trait SourceIndex: sealed::Sealed {
+ /// Return a subslice of the input
+ fn get<'i>(self, source: &Source<'i>) -> Option<Raw<'i>>;
+
+ /// Return an unchecked subslice of the input
+ ///
+ /// ## Safety
+ ///
+ /// Callers of this function are responsible that these preconditions are satisfied:
+ /// - The starting index must not exceed the ending index;
+ /// - Indexes must be within bounds of the original slice;
+ /// - Indexes must lie on UTF-8 sequence boundaries.
+ ///
+ /// Or one of:
+ /// - `span` came from [`Source::lex`]
+ ///
+ /// Failing any of those, the returned string slice may reference invalid memory or violate the invariants communicated by `str` type.
+ #[cfg(feature = "unsafe")]
+ unsafe fn get_unchecked<'i>(self, source: &Source<'i>) -> Raw<'i>;
+}
+
+impl SourceIndex for Span {
+ fn get<'i>(self, source: &Source<'i>) -> Option<Raw<'i>> {
+ (&self).get(source)
+ }
+
+ #[cfg(feature = "unsafe")]
+ unsafe fn get_unchecked<'i>(self, source: &Source<'i>) -> Raw<'i> {
+ // SAFETY: Same safety guarantees are required
+ unsafe { (&self).get_unchecked(source) }
+ }
+}
+
+impl SourceIndex for &Span {
+ fn get<'i>(self, source: &Source<'i>) -> Option<Raw<'i>> {
+ let encoding = None;
+ source
+ .get_raw_str(*self)
+ .map(|s| Raw::new_unchecked(s, encoding, *self))
+ }
+
+ #[cfg(feature = "unsafe")]
+ unsafe fn get_unchecked<'i>(self, source: &Source<'i>) -> Raw<'i> {
+ let encoding = None;
+ // SAFETY: Same safety guarantees are required
+ let raw = unsafe { source.get_raw_str_unchecked(*self) };
+ Raw::new_unchecked(raw, encoding, *self)
+ }
+}
+
+impl SourceIndex for crate::lexer::Token {
+ fn get<'i>(self, source: &Source<'i>) -> Option<Raw<'i>> {
+ (&self).get(source)
+ }
+
+ #[cfg(feature = "unsafe")]
+ unsafe fn get_unchecked<'i>(self, source: &Source<'i>) -> Raw<'i> {
+ // SAFETY: Same safety guarantees are required
+ unsafe { (&self).get_unchecked(source) }
+ }
+}
+
+impl SourceIndex for &crate::lexer::Token {
+ fn get<'i>(self, source: &Source<'i>) -> Option<Raw<'i>> {
+ let encoding = self.kind().encoding();
+ source
+ .get_raw_str(self.span())
+ .map(|s| Raw::new_unchecked(s, encoding, self.span()))
+ }
+
+ #[cfg(feature = "unsafe")]
+ unsafe fn get_unchecked<'i>(self, source: &Source<'i>) -> Raw<'i> {
+ let encoding = self.kind().encoding();
+ // SAFETY: Same safety guarantees are required
+ let raw = unsafe { source.get_raw_str_unchecked(self.span()) };
+ Raw::new_unchecked(raw, encoding, self.span())
+ }
+}
+
+impl SourceIndex for crate::parser::Event {
+ fn get<'i>(self, source: &Source<'i>) -> Option<Raw<'i>> {
+ (&self).get(source)
+ }
+
+ #[cfg(feature = "unsafe")]
+ unsafe fn get_unchecked<'i>(self, source: &Source<'i>) -> Raw<'i> {
+ // SAFETY: Same safety guarantees are required
+ unsafe { (&self).get_unchecked(source) }
+ }
+}
+
+impl SourceIndex for &crate::parser::Event {
+ fn get<'i>(self, source: &Source<'i>) -> Option<Raw<'i>> {
+ let encoding = self.encoding();
+ source
+ .get_raw_str(self.span())
+ .map(|s| Raw::new_unchecked(s, encoding, self.span()))
+ }
+
+ #[cfg(feature = "unsafe")]
+ unsafe fn get_unchecked<'i>(self, source: &Source<'i>) -> Raw<'i> {
+ let encoding = self.encoding();
+ // SAFETY: Same safety guarantees are required
+ let raw = unsafe { source.get_raw_str_unchecked(self.span()) };
+ Raw::new_unchecked(raw, encoding, self.span())
+ }
+}
+
+mod sealed {
+ pub trait Sealed {}
+
+ impl Sealed for crate::Span {}
+ impl Sealed for &crate::Span {}
+ impl Sealed for crate::lexer::Token {}
+ impl Sealed for &crate::lexer::Token {}
+ impl Sealed for crate::parser::Event {}
+ impl Sealed for &crate::parser::Event {}
+}
diff --git a/third_party/rust/toml_writer/.cargo-checksum.json b/third_party/rust/toml_writer/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.lock":"5ece8330688e8a3e0cb438bd08e38a01278f646f95820b2b718926b520fbb96f","Cargo.toml":"5508d5917ca9a87850ad20371b298be8c9d9051c2f679d617dd610852611755d","LICENSE-APACHE":"c6596eb7be8581c18be736c846fb9173b69eccf6ef94c5135893ec56bd92ba08","LICENSE-MIT":"6efb0476a1cc085077ed49357026d8c173bf33017278ef440f222fb9cbcb66e6","README.md":"712ff0f28b2488fdac8c874c6f52ec47acdf269994162d5408f0f84efd83fcc9","src/integer.rs":"744df1e43452ef68f8be813c6b4eae4a83b06e79590af6f02b94afa3294b291f","src/key.rs":"69e65eb19dfe3ce1b59528b5789fb51c2847276c26bb1a4f0a5ae25b7e7ec127","src/lib.rs":"cfe8495977dd215fd689373305e2ba7ac92e2f3742fc2a9687648a042832d9b6","src/string.rs":"23aee37f00ffeff4eee5c249a6b0abe6a2b50f49ecbb87943ec0ae6e34b1386a","src/value.rs":"35a959654ab1bcedfd73e5cf5cd6e2c28926ef3ef6838b80f200e12d6bf8ed8c","src/write.rs":"7285be4d1c8d334387404dfaeeec21d060d625b9651a1c646275b3e6840e6c94"},"package":"df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2"}
+\ No newline at end of file
diff --git a/third_party/rust/toml_writer/Cargo.lock b/third_party/rust/toml_writer/Cargo.lock
@@ -0,0 +1,561 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "anstream"
+version = "0.6.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192"
+dependencies = [
+ "anstyle",
+ "anstyle-parse",
+ "anstyle-query",
+ "anstyle-wincon",
+ "colorchoice",
+ "is_terminal_polyfill",
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
+
+[[package]]
+name = "anstyle-parse"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
+dependencies = [
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle-query"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2"
+dependencies = [
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "anstyle-wincon"
+version = "3.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a"
+dependencies = [
+ "anstyle",
+ "once_cell_polyfill",
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
+
+[[package]]
+name = "bit-set"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
+dependencies = [
+ "bit-vec",
+]
+
+[[package]]
+name = "bit-vec"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
+
+[[package]]
+name = "bitflags"
+version = "2.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9"
+
+[[package]]
+name = "colorchoice"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
+
+[[package]]
+name = "errno"
+version = "0.3.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
+dependencies = [
+ "libc",
+ "windows-sys 0.61.0",
+]
+
+[[package]]
+name = "fastrand"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "getrandom"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "r-efi",
+ "wasi",
+]
+
+[[package]]
+name = "is_terminal_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
+
+[[package]]
+name = "lazy_static"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
+
+[[package]]
+name = "libc"
+version = "0.2.175"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543"
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
+
+[[package]]
+name = "normalize-line-endings"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
+
+[[package]]
+name = "num-traits"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.21.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
+
+[[package]]
+name = "once_cell_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
+dependencies = [
+ "zerocopy",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.101"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "proptest"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6fcdab19deb5195a31cf7726a210015ff1496ba1464fd42cb4f537b8b01b471f"
+dependencies = [
+ "bit-set",
+ "bit-vec",
+ "bitflags",
+ "lazy_static",
+ "num-traits",
+ "rand",
+ "rand_chacha",
+ "rand_xorshift",
+ "regex-syntax",
+ "rusty-fork",
+ "tempfile",
+ "unarray",
+]
+
+[[package]]
+name = "quick-error"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+
+[[package]]
+name = "quote"
+version = "1.0.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "r-efi"
+version = "5.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
+
+[[package]]
+name = "rand"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
+dependencies = [
+ "rand_chacha",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "rand_xorshift"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a"
+dependencies = [
+ "rand_core",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001"
+
+[[package]]
+name = "rustix"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e"
+dependencies = [
+ "bitflags",
+ "errno",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys 0.61.0",
+]
+
+[[package]]
+name = "rusty-fork"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
+dependencies = [
+ "fnv",
+ "quick-error",
+ "tempfile",
+ "wait-timeout",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.225"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd6c24dee235d0da097043389623fb913daddf92c76e9f5a1db88607a0bcbd1d"
+dependencies = [
+ "serde_core",
+]
+
+[[package]]
+name = "serde_core"
+version = "1.0.225"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "659356f9a0cb1e529b24c01e43ad2bdf520ec4ceaf83047b83ddcc2251f96383"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.225"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ea936adf78b1f766949a4977b91d2f5595825bd6ec079aa9543ad2685fc4516"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "similar"
+version = "2.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa"
+
+[[package]]
+name = "snapbox"
+version = "0.6.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96dcfc4581e3355d70ac2ee14cfdf81dce3d85c85f1ed9e2c1d3013f53b3436b"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "normalize-line-endings",
+ "similar",
+ "snapbox-macros",
+]
+
+[[package]]
+name = "snapbox-macros"
+version = "0.3.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "16569f53ca23a41bb6f62e0a5084aa1661f4814a67fa33696a79073e03a664af"
+dependencies = [
+ "anstream",
+]
+
+[[package]]
+name = "syn"
+version = "2.0.106"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84fa4d11fadde498443cca10fd3ac23c951f0dc59e080e9f4b93d4df4e4eea53"
+dependencies = [
+ "fastrand",
+ "getrandom",
+ "once_cell",
+ "rustix",
+ "windows-sys 0.61.0",
+]
+
+[[package]]
+name = "toml"
+version = "0.5.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "toml_writer"
+version = "1.0.4"
+dependencies = [
+ "proptest",
+ "snapbox",
+ "toml",
+]
+
+[[package]]
+name = "unarray"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d"
+
+[[package]]
+name = "utf8parse"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
+
+[[package]]
+name = "wait-timeout"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "wasi"
+version = "0.14.4+wasi-0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88a5f4a424faf49c3c2c344f166f0662341d470ea185e939657aaff130f0ec4a"
+dependencies = [
+ "wit-bindgen",
+]
+
+[[package]]
+name = "windows-link"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
+
+[[package]]
+name = "windows-link"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65"
+
+[[package]]
+name = "windows-sys"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.61.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e201184e40b2ede64bc2ea34968b28e33622acdbbf37104f0e4a33f7abe657aa"
+dependencies = [
+ "windows-link 0.2.0",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.53.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91"
+dependencies = [
+ "windows-link 0.1.3",
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
+
+[[package]]
+name = "wit-bindgen"
+version = "0.45.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c573471f125075647d03df72e026074b7203790d41351cd6edc96f46bcccd36"
+
+[[package]]
+name = "zerocopy"
+version = "0.8.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c"
+dependencies = [
+ "zerocopy-derive",
+]
+
+[[package]]
+name = "zerocopy-derive"
+version = "0.8.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
diff --git a/third_party/rust/toml_writer/Cargo.toml b/third_party/rust/toml_writer/Cargo.toml
@@ -0,0 +1,179 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.76"
+name = "toml_writer"
+version = "1.0.4"
+build = false
+include = [
+ "build.rs",
+ "src/**/*",
+ "Cargo.toml",
+ "Cargo.lock",
+ "LICENSE*",
+ "README.md",
+ "examples/**/*",
+]
+autolib = false
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
+description = """
+A low-level interface for writing out TOML
+"""
+readme = "README.md"
+keywords = [
+ "encoding",
+ "toml",
+ "no_std",
+]
+categories = ["encoding"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/toml-rs/toml"
+
+[package.metadata.docs.rs]
+all-features = true
+rustdoc-args = ["--generate-link-to-definition"]
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "Unreleased"
+replace = "{{version}}"
+min = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = '\.\.\.HEAD'
+replace = "...{{tag_name}}"
+exactly = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "ReleaseDate"
+replace = "{{date}}"
+min = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "<!-- next-header -->"
+replace = """
+<!-- next-header -->
+## [Unreleased] - ReleaseDate
+"""
+exactly = 1
+
+[[package.metadata.release.pre-release-replacements]]
+file = "CHANGELOG.md"
+search = "<!-- next-url -->"
+replace = """
+<!-- next-url -->
+[Unreleased]: https://github.com/toml-rs/toml/compare/{{tag_name}}...HEAD"""
+exactly = 1
+
+[features]
+alloc = []
+default = ["std"]
+std = ["alloc"]
+
+[lib]
+name = "toml_writer"
+path = "src/lib.rs"
+
+[dependencies]
+
+[dev-dependencies.proptest]
+version = "1.7.0"
+
+[dev-dependencies.snapbox]
+version = "0.6.21"
+
+[dev-dependencies.toml_old]
+version = "0.5.11"
+package = "toml"
+
+[lints.clippy]
+bool_assert_comparison = "allow"
+branches_sharing_code = "allow"
+checked_conversions = "warn"
+collapsible_else_if = "allow"
+create_dir = "warn"
+dbg_macro = "warn"
+debug_assert_with_mut_call = "warn"
+doc_markdown = "warn"
+empty_enum = "warn"
+enum_glob_use = "warn"
+expl_impl_clone_on_copy = "warn"
+explicit_deref_methods = "warn"
+explicit_into_iter_loop = "warn"
+fallible_impl_from = "warn"
+filter_map_next = "warn"
+flat_map_option = "warn"
+float_cmp_const = "warn"
+fn_params_excessive_bools = "warn"
+from_iter_instead_of_collect = "warn"
+get_first = "allow"
+if_same_then_else = "allow"
+implicit_clone = "warn"
+imprecise_flops = "warn"
+inconsistent_struct_constructor = "warn"
+inefficient_to_string = "warn"
+infinite_loop = "warn"
+invalid_upcast_comparisons = "warn"
+large_digit_groups = "warn"
+large_stack_arrays = "warn"
+large_types_passed_by_value = "warn"
+let_and_return = "allow"
+linkedlist = "warn"
+lossy_float_literal = "warn"
+macro_use_imports = "warn"
+mem_forget = "warn"
+mutex_integer = "warn"
+needless_bool = "allow"
+needless_continue = "allow"
+needless_for_each = "warn"
+negative_feature_names = "warn"
+path_buf_push_overwrite = "warn"
+ptr_as_ptr = "warn"
+rc_mutex = "warn"
+redundant_feature_names = "warn"
+ref_option_ref = "warn"
+rest_pat_in_fully_bound_structs = "warn"
+result_large_err = "allow"
+same_functions_in_if_condition = "warn"
+self_named_module_files = "warn"
+semicolon_if_nothing_returned = "warn"
+str_to_string = "warn"
+string_add = "warn"
+string_add_assign = "warn"
+string_lit_as_bytes = "warn"
+string_to_string = "warn"
+todo = "warn"
+trait_duplication_in_bounds = "warn"
+uninlined_format_args = "warn"
+use_self = "warn"
+verbose_file_reads = "warn"
+wildcard_imports = "warn"
+zero_sized_map_values = "warn"
+
+[lints.rust]
+unnameable_types = "allow"
+unreachable_pub = "warn"
+unsafe_op_in_unsafe_fn = "warn"
+unused_lifetimes = "warn"
+unused_macro_rules = "warn"
+unused_qualifications = "warn"
+
+[lints.rust.rust_2018_idioms]
+level = "warn"
+priority = -1
diff --git a/third_party/rust/toml_writer/LICENSE-APACHE b/third_party/rust/toml_writer/LICENSE-APACHE
@@ -0,0 +1,202 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/third_party/rust/toml_writer/LICENSE-MIT b/third_party/rust/toml_writer/LICENSE-MIT
@@ -0,0 +1,19 @@
+Copyright (c) Individual contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/rust/toml_writer/README.md b/third_party/rust/toml_writer/README.md
@@ -0,0 +1,22 @@
+# toml_writer
+
+[](https://crates.io/crates/toml)
+[](https://docs.rs/toml)
+
+A low-level interface for writing out TOML
+
+## License
+
+Licensed under either of
+
+* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or <https://www.apache.org/licenses/LICENSE-2.0>)
+* MIT license ([LICENSE-MIT](LICENSE-MIT) or <https://opensource.org/license/mit>)
+
+at your option.
+
+### Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally
+submitted for inclusion in the work by you, as defined in the Apache-2.0
+license, shall be dual-licensed as above, without any additional terms or
+conditions.
diff --git a/third_party/rust/toml_writer/src/integer.rs b/third_party/rust/toml_writer/src/integer.rs
@@ -0,0 +1,220 @@
+use core::fmt::{self, Display};
+
+/// Describes how a TOML integer should be formatted.
+///
+/// # Example
+///
+/// ```rust
+/// # #[cfg(feature = "alloc")] {
+/// # use toml_writer::ToTomlValue as _;
+/// let format = toml_writer::TomlIntegerFormat::new().as_hex_lower();
+/// let number = 10;
+/// let number = format.format(number).unwrap_or(toml_writer::TomlInteger::new(number));
+/// let number = number.to_toml_value();
+/// assert_eq!(number, "0xa");
+/// # }
+/// ```
+#[derive(Copy, Clone, Debug)]
+pub struct TomlIntegerFormat {
+ radix: Radix,
+}
+
+impl TomlIntegerFormat {
+ /// Creates a new integer format (decimal).
+ pub fn new() -> Self {
+ Self {
+ radix: Radix::Decimal,
+ }
+ }
+
+ /// Sets the format to decimal.
+ pub fn as_decimal(mut self) -> Self {
+ self.radix = Radix::Decimal;
+ self
+ }
+
+ /// Sets the format to hexadecimal with all characters in uppercase.
+ pub fn as_hex_upper(mut self) -> Self {
+ self.radix = Radix::Hexadecimal {
+ case: HexCase::Upper,
+ };
+ self
+ }
+
+ /// Sets the format to hexadecimal with all characters in lowercase.
+ pub fn as_hex_lower(mut self) -> Self {
+ self.radix = Radix::Hexadecimal {
+ case: HexCase::Lower,
+ };
+ self
+ }
+
+ /// Sets the format to octal.
+ pub fn as_octal(mut self) -> Self {
+ self.radix = Radix::Octal;
+ self
+ }
+
+ /// Sets the format to binary.
+ pub fn as_binary(mut self) -> Self {
+ self.radix = Radix::Binary;
+ self
+ }
+
+ /// Formats `value` as a TOML integer.
+ ///
+ /// Returns `None` if the value cannot be formatted
+ /// (e.g. value is negative and the radix is not decimal).
+ pub fn format<N: PartialOrd<i32>>(self, value: N) -> Option<TomlInteger<N>>
+ where
+ TomlInteger<N>: crate::WriteTomlValue,
+ {
+ match self.radix {
+ Radix::Decimal => (),
+ Radix::Hexadecimal { .. } | Radix::Octal | Radix::Binary => {
+ if value < 0 {
+ return None;
+ }
+ }
+ }
+
+ Some(TomlInteger {
+ value,
+ format: self,
+ })
+ }
+}
+
+impl Default for TomlIntegerFormat {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+/// Helper struct for formatting TOML integers.
+///
+/// This may be constructed by calling [`TomlIntegerFormat::format()`].
+#[derive(Copy, Clone, Debug)]
+pub struct TomlInteger<N> {
+ value: N,
+ format: TomlIntegerFormat,
+}
+
+impl<N> TomlInteger<N>
+where
+ Self: crate::WriteTomlValue,
+{
+ /// Apply default formatting
+ pub fn new(value: N) -> Self {
+ Self {
+ value,
+ format: TomlIntegerFormat::new(),
+ }
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<u8> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<i8> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<u16> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<i16> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<u32> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<i32> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<u64> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<i64> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<u128> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<i128> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<usize> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+impl crate::WriteTomlValue for TomlInteger<isize> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> fmt::Result {
+ write_toml_value(self.value, &self.format, writer)
+ }
+}
+
+#[derive(Copy, Clone, Debug)]
+enum Radix {
+ Decimal,
+ Hexadecimal { case: HexCase },
+ Octal,
+ Binary,
+}
+
+#[derive(Copy, Clone, Debug)]
+enum HexCase {
+ Upper,
+ Lower,
+}
+
+fn write_toml_value<
+ N: Display + fmt::UpperHex + fmt::LowerHex + fmt::Octal + fmt::Binary,
+ W: crate::TomlWrite + ?Sized,
+>(
+ value: N,
+ format: &TomlIntegerFormat,
+ writer: &mut W,
+) -> fmt::Result {
+ match format.radix {
+ Radix::Decimal => write!(writer, "{value}")?,
+ Radix::Hexadecimal { case } => match case {
+ HexCase::Upper => write!(writer, "0x{value:X}")?,
+ HexCase::Lower => write!(writer, "0x{value:x}")?,
+ },
+ Radix::Octal => write!(writer, "0o{value:o}")?,
+ Radix::Binary => write!(writer, "0b{value:b}")?,
+ }
+ Ok(())
+}
diff --git a/third_party/rust/toml_writer/src/key.rs b/third_party/rust/toml_writer/src/key.rs
@@ -0,0 +1,55 @@
+#[cfg(feature = "alloc")]
+use alloc::borrow::Cow;
+#[cfg(feature = "alloc")]
+use alloc::string::String;
+
+use crate::TomlWrite;
+
+#[cfg(feature = "alloc")]
+pub trait ToTomlKey {
+ fn to_toml_key(&self) -> String;
+}
+
+#[cfg(feature = "alloc")]
+impl<T> ToTomlKey for T
+where
+ T: WriteTomlKey + ?Sized,
+{
+ fn to_toml_key(&self) -> String {
+ let mut result = String::new();
+ let _ = self.write_toml_key(&mut result);
+ result
+ }
+}
+
+pub trait WriteTomlKey {
+ fn write_toml_key<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result;
+}
+
+impl WriteTomlKey for str {
+ fn write_toml_key<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ crate::TomlKeyBuilder::new(self)
+ .as_default()
+ .write_toml_key(writer)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl WriteTomlKey for String {
+ fn write_toml_key<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ self.as_str().write_toml_key(writer)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl WriteTomlKey for Cow<'_, str> {
+ fn write_toml_key<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ self.as_ref().write_toml_key(writer)
+ }
+}
+
+impl<V: WriteTomlKey + ?Sized> WriteTomlKey for &V {
+ fn write_toml_key<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ (*self).write_toml_key(writer)
+ }
+}
diff --git a/third_party/rust/toml_writer/src/lib.rs b/third_party/rust/toml_writer/src/lib.rs
@@ -0,0 +1,84 @@
+//! A low-level interface for writing out TOML
+//!
+//! Considerations when serializing arbitrary data:
+//! - Verify the implementation with [`toml-test-harness`](https://docs.rs/toml-test-harness)
+//! - Be sure to group keys under a table before writing another table
+//! - Watch for extra trailing newlines and leading newlines, both when starting with top-level
+//! keys or a table
+//! - When serializing an array-of-tables, be sure to verify that all elements of the array
+//! serialize as tables
+//! - Standard tables and inline tables may need separate implementations of corner cases,
+//! requiring verifying them both
+//!
+//! When serializing Rust data structures
+//! - `Option`: Skip key-value pairs with a value of `None`, otherwise error when seeing `None`
+//! - When skipping key-value pairs, be careful that a deeply nested `None` doesn't get skipped
+//! - Scalars and arrays are unsupported as top-level data types
+//! - Tuples and tuple variants seriallize as arrays
+//! - Structs, struct variants, and maps serialize as tables
+//! - Newtype variants serialize as to the inner type
+//! - Unit variants serialize to a string
+//! - Unit and unit structs don't have a clear meaning in TOML
+//!
+//! # Example
+//!
+//! ```rust
+//! use toml_writer::TomlWrite as _;
+//!
+//! # fn main() -> std::fmt::Result {
+//! let mut output = String::new();
+//! output.newline()?;
+//! output.open_table_header()?;
+//! output.key("table")?;
+//! output.close_table_header()?;
+//! output.newline()?;
+//!
+//! output.key("key")?;
+//! output.space()?;
+//! output.keyval_sep()?;
+//! output.space()?;
+//! output.value("value")?;
+//! output.newline()?;
+//!
+//! assert_eq!(output, r#"
+//! [table]
+//! key = "value"
+//! "#);
+//! # Ok(())
+//! # }
+//! ```
+
+#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
+#![cfg_attr(docsrs, feature(doc_cfg))]
+#![forbid(unsafe_code)]
+#![warn(clippy::std_instead_of_core)]
+#![warn(clippy::std_instead_of_alloc)]
+#![warn(clippy::print_stderr)]
+#![warn(clippy::print_stdout)]
+
+#[cfg(feature = "alloc")]
+extern crate alloc;
+
+mod integer;
+mod key;
+mod string;
+mod value;
+mod write;
+
+pub use integer::TomlInteger;
+pub use integer::TomlIntegerFormat;
+#[cfg(feature = "alloc")]
+pub use key::ToTomlKey;
+pub use key::WriteTomlKey;
+pub use string::TomlKey;
+pub use string::TomlKeyBuilder;
+pub use string::TomlString;
+pub use string::TomlStringBuilder;
+#[cfg(feature = "alloc")]
+pub use value::ToTomlValue;
+pub use value::WriteTomlValue;
+pub use write::TomlWrite;
+
+#[doc = include_str!("../README.md")]
+#[cfg(doctest)]
+pub struct ReadmeDoctests;
diff --git a/third_party/rust/toml_writer/src/string.rs b/third_party/rust/toml_writer/src/string.rs
@@ -0,0 +1,436 @@
+/// Describes how a TOML string (key or value) should be formatted.
+///
+/// # Example
+///
+/// ```rust
+/// # #[cfg(feature = "alloc")] {
+/// # use toml_writer::ToTomlValue as _;
+/// let string = "Hello
+/// world!
+/// ";
+/// let string = toml_writer::TomlStringBuilder::new(string).as_default();
+/// let string = string.to_toml_value();
+/// assert_eq!(string, r#""""
+/// Hello
+/// world!
+/// """"#);
+/// # }
+/// ```
+#[derive(Copy, Clone, Debug)]
+pub struct TomlStringBuilder<'s> {
+ decoded: &'s str,
+ metrics: ValueMetrics,
+}
+
+impl<'s> TomlStringBuilder<'s> {
+ pub fn new(decoded: &'s str) -> Self {
+ Self {
+ decoded,
+ metrics: ValueMetrics::calculate(decoded),
+ }
+ }
+
+ pub fn as_default(&self) -> TomlString<'s> {
+ self.as_basic_pretty()
+ .or_else(|| self.as_literal())
+ .or_else(|| self.as_ml_basic_pretty())
+ .or_else(|| self.as_ml_literal())
+ .unwrap_or_else(|| {
+ if self.metrics.newline {
+ self.as_ml_basic()
+ } else {
+ self.as_basic()
+ }
+ })
+ }
+
+ pub fn as_literal(&self) -> Option<TomlString<'s>> {
+ if self.metrics.escape_codes
+ || 0 < self.metrics.max_seq_single_quotes
+ || self.metrics.newline
+ {
+ None
+ } else {
+ Some(TomlString {
+ decoded: self.decoded,
+ encoding: Encoding::LiteralString,
+ newline: self.metrics.newline,
+ })
+ }
+ }
+
+ pub fn as_ml_literal(&self) -> Option<TomlString<'s>> {
+ if self.metrics.escape_codes || 2 < self.metrics.max_seq_single_quotes {
+ None
+ } else {
+ Some(TomlString {
+ decoded: self.decoded,
+ encoding: Encoding::MlLiteralString,
+ newline: self.metrics.newline,
+ })
+ }
+ }
+
+ pub fn as_basic_pretty(&self) -> Option<TomlString<'s>> {
+ if self.metrics.escape_codes
+ || self.metrics.escape
+ || 0 < self.metrics.max_seq_double_quotes
+ || self.metrics.newline
+ {
+ None
+ } else {
+ Some(self.as_basic())
+ }
+ }
+
+ pub fn as_ml_basic_pretty(&self) -> Option<TomlString<'s>> {
+ if self.metrics.escape_codes
+ || self.metrics.escape
+ || 2 < self.metrics.max_seq_double_quotes
+ {
+ None
+ } else {
+ Some(self.as_ml_basic())
+ }
+ }
+
+ pub fn as_basic(&self) -> TomlString<'s> {
+ TomlString {
+ decoded: self.decoded,
+ encoding: Encoding::BasicString,
+ newline: self.metrics.newline,
+ }
+ }
+
+ pub fn as_ml_basic(&self) -> TomlString<'s> {
+ TomlString {
+ decoded: self.decoded,
+ encoding: Encoding::MlBasicString,
+ newline: self.metrics.newline,
+ }
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+pub struct TomlString<'s> {
+ decoded: &'s str,
+ encoding: Encoding,
+ newline: bool,
+}
+
+impl crate::WriteTomlValue for TomlString<'_> {
+ fn write_toml_value<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write_toml_value(self.decoded, Some(self.encoding), self.newline, writer)
+ }
+}
+
+#[derive(Copy, Clone, Debug)]
+pub struct TomlKeyBuilder<'s> {
+ decoded: &'s str,
+ metrics: KeyMetrics,
+}
+
+impl<'s> TomlKeyBuilder<'s> {
+ pub fn new(decoded: &'s str) -> Self {
+ Self {
+ decoded,
+ metrics: KeyMetrics::calculate(decoded),
+ }
+ }
+
+ pub fn as_default(&self) -> TomlKey<'s> {
+ self.as_unquoted()
+ .or_else(|| self.as_basic_pretty())
+ .or_else(|| self.as_literal())
+ .unwrap_or_else(|| self.as_basic())
+ }
+
+ pub fn as_unquoted(&self) -> Option<TomlKey<'s>> {
+ if self.metrics.unquoted {
+ Some(TomlKey {
+ decoded: self.decoded,
+ encoding: None,
+ })
+ } else {
+ None
+ }
+ }
+
+ pub fn as_literal(&self) -> Option<TomlKey<'s>> {
+ if self.metrics.escape_codes || self.metrics.single_quotes {
+ None
+ } else {
+ Some(TomlKey {
+ decoded: self.decoded,
+ encoding: Some(Encoding::LiteralString),
+ })
+ }
+ }
+
+ pub fn as_basic_pretty(&self) -> Option<TomlKey<'s>> {
+ if self.metrics.escape_codes || self.metrics.escape || self.metrics.double_quotes {
+ None
+ } else {
+ Some(self.as_basic())
+ }
+ }
+
+ pub fn as_basic(&self) -> TomlKey<'s> {
+ TomlKey {
+ decoded: self.decoded,
+ encoding: Some(Encoding::BasicString),
+ }
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+pub struct TomlKey<'s> {
+ decoded: &'s str,
+ encoding: Option<Encoding>,
+}
+
+impl crate::WriteTomlKey for TomlKey<'_> {
+ fn write_toml_key<W: crate::TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ let newline = false;
+ write_toml_value(self.decoded, self.encoding, newline, writer)
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+#[repr(u8)]
+#[allow(clippy::enum_variant_names)]
+enum Encoding {
+ LiteralString,
+ BasicString,
+ MlLiteralString,
+ MlBasicString,
+}
+
+impl Encoding {}
+
+fn write_toml_value<W: crate::TomlWrite + ?Sized>(
+ decoded: &str,
+ encoding: Option<Encoding>,
+ newline: bool,
+ writer: &mut W,
+) -> core::fmt::Result {
+ let delimiter = match encoding {
+ Some(Encoding::LiteralString) => "'",
+ Some(Encoding::BasicString) => "\"",
+ Some(Encoding::MlLiteralString) => "'''",
+ Some(Encoding::MlBasicString) => "\"\"\"",
+ None => "",
+ };
+ let escaped = match encoding {
+ Some(Encoding::LiteralString) | Some(Encoding::MlLiteralString) => false,
+ Some(Encoding::BasicString) | Some(Encoding::MlBasicString) => true,
+ None => false,
+ };
+ let is_ml = match encoding {
+ Some(Encoding::LiteralString) | Some(Encoding::BasicString) => false,
+ Some(Encoding::MlLiteralString) | Some(Encoding::MlBasicString) => true,
+ None => false,
+ };
+ let newline_prefix = newline && is_ml;
+
+ write!(writer, "{delimiter}")?;
+ if newline_prefix {
+ writer.newline()?;
+ }
+ if escaped {
+ // ```bnf
+ // basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+ // wschar = %x20 ; Space
+ // wschar =/ %x09 ; Horizontal tab
+ // escape = %x5C ; \
+ // ```
+ let max_seq_double_quotes = if is_ml { 2 } else { 0 };
+ let mut stream = decoded;
+ while !stream.is_empty() {
+ let mut unescaped_end = 0;
+ let mut escaped = None;
+ let mut seq_double_quotes = 0;
+ for (i, b) in stream.as_bytes().iter().enumerate() {
+ if *b == b'"' {
+ seq_double_quotes += 1;
+ if max_seq_double_quotes < seq_double_quotes {
+ escaped = Some(r#"\""#);
+ break;
+ }
+ } else {
+ seq_double_quotes = 0;
+ }
+
+ match *b {
+ 0x8 => {
+ escaped = Some(r#"\b"#);
+ break;
+ }
+ 0x9 => {
+ escaped = Some(r#"\t"#);
+ break;
+ }
+ 0xa => {
+ if !is_ml {
+ escaped = Some(r#"\n"#);
+ break;
+ }
+ }
+ 0xc => {
+ escaped = Some(r#"\f"#);
+ break;
+ }
+ 0xd => {
+ escaped = Some(r#"\r"#);
+ break;
+ }
+ 0x22 => {} // double quote handled earlier
+ 0x5c => {
+ escaped = Some(r#"\\"#);
+ break;
+ }
+ c if c <= 0x1f || c == 0x7f => {
+ break;
+ }
+ _ => {}
+ }
+
+ unescaped_end = i + 1;
+ }
+ let unescaped = &stream[0..unescaped_end];
+ let escaped_str = escaped.unwrap_or("");
+ let end = unescaped_end + if escaped.is_some() { 1 } else { 0 };
+ stream = &stream[end..];
+ write!(writer, "{unescaped}{escaped_str}")?;
+ if escaped.is_none() && !stream.is_empty() {
+ let b = stream.as_bytes().first().unwrap();
+ write!(writer, "\\u{:04X}", *b as u32)?;
+ stream = &stream[1..];
+ }
+ }
+ } else {
+ write!(writer, "{decoded}")?;
+ }
+ write!(writer, "{delimiter}")?;
+ Ok(())
+}
+
+#[derive(Copy, Clone, Debug)]
+struct ValueMetrics {
+ max_seq_single_quotes: u8,
+ max_seq_double_quotes: u8,
+ escape_codes: bool,
+ escape: bool,
+ newline: bool,
+}
+
+impl ValueMetrics {
+ fn new() -> Self {
+ Self {
+ max_seq_single_quotes: 0,
+ max_seq_double_quotes: 0,
+ escape_codes: false,
+ escape: false,
+ newline: false,
+ }
+ }
+
+ fn calculate(s: &str) -> Self {
+ let mut metrics = Self::new();
+
+ let mut prev_single_quotes = 0;
+ let mut prev_double_quotes = 0;
+ for byte in s.as_bytes() {
+ if *byte == b'\'' {
+ prev_single_quotes += 1;
+ metrics.max_seq_single_quotes =
+ metrics.max_seq_single_quotes.max(prev_single_quotes);
+ } else {
+ prev_single_quotes = 0;
+ }
+ if *byte == b'"' {
+ prev_double_quotes += 1;
+ metrics.max_seq_double_quotes =
+ metrics.max_seq_double_quotes.max(prev_double_quotes);
+ } else {
+ prev_double_quotes = 0;
+ }
+
+ // ```bnf
+ // literal-char = %x09 / %x20-26 / %x28-7E / non-ascii
+ //
+ // basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+ // wschar = %x20 ; Space
+ // wschar =/ %x09 ; Horizontal tab
+ // escape = %x5C ; \
+ // ```
+ match *byte {
+ b'\\' => metrics.escape = true,
+ // Escape codes are needed if any ascii control
+ // characters are present, including \b \f \r.
+ b'\t' => {} // always allowed; remaining neutral on this
+ b'\n' => metrics.newline = true,
+ c if c <= 0x1f || c == 0x7f => metrics.escape_codes = true,
+ _ => {}
+ }
+ }
+
+ metrics
+ }
+}
+
+#[derive(Copy, Clone, Debug)]
+struct KeyMetrics {
+ unquoted: bool,
+ single_quotes: bool,
+ double_quotes: bool,
+ escape_codes: bool,
+ escape: bool,
+}
+
+impl KeyMetrics {
+ fn new() -> Self {
+ Self {
+ unquoted: true,
+ single_quotes: false,
+ double_quotes: false,
+ escape_codes: false,
+ escape: false,
+ }
+ }
+
+ fn calculate(s: &str) -> Self {
+ let mut metrics = Self::new();
+
+ metrics.unquoted = !s.is_empty();
+
+ for byte in s.as_bytes() {
+ if !matches!(*byte, b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'-' | b'_') {
+ metrics.unquoted = false;
+ }
+
+ // ```bnf
+ // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
+ //
+ // literal-char = %x09 / %x20-26 / %x28-7E / non-ascii
+ //
+ // basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+ // wschar = %x20 ; Space
+ // wschar =/ %x09 ; Horizontal tab
+ // escape = %x5C ; \
+ // ```
+ match *byte {
+ b'\'' => metrics.single_quotes = true,
+ b'"' => metrics.double_quotes = true,
+ b'\\' => metrics.escape = true,
+ // Escape codes are needed if any ascii control
+ // characters are present, including \b \f \r.
+ b'\t' => {} // always allowed
+ c if c <= 0x1f || c == 0x7f => metrics.escape_codes = true,
+ _ => {}
+ }
+ }
+
+ metrics
+ }
+}
diff --git a/third_party/rust/toml_writer/src/value.rs b/third_party/rust/toml_writer/src/value.rs
@@ -0,0 +1,249 @@
+#[cfg(feature = "alloc")]
+use alloc::borrow::Cow;
+#[cfg(feature = "alloc")]
+use alloc::string::String;
+#[cfg(feature = "alloc")]
+use alloc::vec::Vec;
+
+use crate::TomlWrite;
+use crate::WriteTomlKey;
+
+#[cfg(feature = "alloc")]
+pub trait ToTomlValue {
+ fn to_toml_value(&self) -> String;
+}
+
+#[cfg(feature = "alloc")]
+impl<T> ToTomlValue for T
+where
+ T: WriteTomlValue + ?Sized,
+{
+ fn to_toml_value(&self) -> String {
+ let mut result = String::new();
+ let _ = self.write_toml_value(&mut result);
+ result
+ }
+}
+
+pub trait WriteTomlValue {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result;
+}
+
+impl WriteTomlValue for bool {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write!(writer, "{self}")
+ }
+}
+
+impl WriteTomlValue for u8 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write!(writer, "{self}")
+ }
+}
+
+impl WriteTomlValue for i8 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write!(writer, "{self}")
+ }
+}
+
+impl WriteTomlValue for u16 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write!(writer, "{self}")
+ }
+}
+
+impl WriteTomlValue for i16 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write!(writer, "{self}")
+ }
+}
+
+impl WriteTomlValue for u32 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write!(writer, "{self}")
+ }
+}
+
+impl WriteTomlValue for i32 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write!(writer, "{self}")
+ }
+}
+
+impl WriteTomlValue for u64 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write!(writer, "{self}")
+ }
+}
+
+impl WriteTomlValue for i64 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write!(writer, "{self}")
+ }
+}
+
+impl WriteTomlValue for u128 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write!(writer, "{self}")
+ }
+}
+
+impl WriteTomlValue for i128 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write!(writer, "{self}")
+ }
+}
+
+impl WriteTomlValue for f32 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ match (self.is_sign_negative(), self.is_nan(), *self == 0.0) {
+ (true, true, _) => write!(writer, "-nan"),
+ (false, true, _) => write!(writer, "nan"),
+ (true, false, true) => write!(writer, "-0.0"),
+ (false, false, true) => write!(writer, "0.0"),
+ (_, false, false) => {
+ if self % 1.0 == 0.0 {
+ write!(writer, "{self}.0")
+ } else {
+ write!(writer, "{self}")
+ }
+ }
+ }
+ }
+}
+
+impl WriteTomlValue for f64 {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ match (self.is_sign_negative(), self.is_nan(), *self == 0.0) {
+ (true, true, _) => write!(writer, "-nan"),
+ (false, true, _) => write!(writer, "nan"),
+ (true, false, true) => write!(writer, "-0.0"),
+ (false, false, true) => write!(writer, "0.0"),
+ (_, false, false) => {
+ if self % 1.0 == 0.0 {
+ write!(writer, "{self}.0")
+ } else {
+ write!(writer, "{self}")
+ }
+ }
+ }
+ }
+}
+
+impl WriteTomlValue for char {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ let mut buf = [0; 4];
+ let v = self.encode_utf8(&mut buf);
+ v.write_toml_value(writer)
+ }
+}
+
+impl WriteTomlValue for str {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ crate::TomlStringBuilder::new(self)
+ .as_default()
+ .write_toml_value(writer)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl WriteTomlValue for String {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ self.as_str().write_toml_value(writer)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl WriteTomlValue for Cow<'_, str> {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ self.as_ref().write_toml_value(writer)
+ }
+}
+
+impl<V: WriteTomlValue> WriteTomlValue for [V] {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ writer.open_array()?;
+ let mut iter = self.iter();
+ if let Some(v) = iter.next() {
+ writer.value(v)?;
+ }
+ for v in iter {
+ writer.val_sep()?;
+ writer.space()?;
+ writer.value(v)?;
+ }
+ writer.close_array()?;
+ Ok(())
+ }
+}
+
+impl<V: WriteTomlValue, const N: usize> WriteTomlValue for [V; N] {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ self.as_slice().write_toml_value(writer)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl<V: WriteTomlValue> WriteTomlValue for Vec<V> {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ self.as_slice().write_toml_value(writer)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl<K: WriteTomlKey, V: WriteTomlValue> WriteTomlValue for alloc::collections::BTreeMap<K, V> {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write_toml_inline_table(self.iter(), writer)
+ }
+}
+
+#[cfg(feature = "std")]
+impl<K: WriteTomlKey, V: WriteTomlValue> WriteTomlValue for std::collections::HashMap<K, V> {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ write_toml_inline_table(self.iter(), writer)
+ }
+}
+
+fn write_toml_inline_table<
+ 'i,
+ I: Iterator<Item = (&'i K, &'i V)>,
+ K: WriteTomlKey + 'i,
+ V: WriteTomlValue + 'i,
+ W: TomlWrite + ?Sized,
+>(
+ mut iter: I,
+ writer: &mut W,
+) -> core::fmt::Result {
+ writer.open_inline_table()?;
+ let mut trailing_space = false;
+ if let Some((key, value)) = iter.next() {
+ writer.space()?;
+ writer.key(key)?;
+ writer.space()?;
+ writer.keyval_sep()?;
+ writer.space()?;
+ writer.value(value)?;
+ trailing_space = true;
+ }
+ for (key, value) in iter {
+ writer.val_sep()?;
+ writer.space()?;
+ writer.key(key)?;
+ writer.space()?;
+ writer.keyval_sep()?;
+ writer.space()?;
+ writer.value(value)?;
+ }
+ if trailing_space {
+ writer.space()?;
+ }
+ writer.close_inline_table()?;
+ Ok(())
+}
+
+impl<V: WriteTomlValue + ?Sized> WriteTomlValue for &V {
+ fn write_toml_value<W: TomlWrite + ?Sized>(&self, writer: &mut W) -> core::fmt::Result {
+ (*self).write_toml_value(writer)
+ }
+}
diff --git a/third_party/rust/toml_writer/src/write.rs b/third_party/rust/toml_writer/src/write.rs
@@ -0,0 +1,88 @@
+pub trait TomlWrite: core::fmt::Write {
+ fn open_table_header(&mut self) -> core::fmt::Result {
+ write!(self, "[")
+ }
+ fn close_table_header(&mut self) -> core::fmt::Result {
+ write!(self, "]")
+ }
+
+ fn open_array_of_tables_header(&mut self) -> core::fmt::Result {
+ write!(self, "[[")
+ }
+ fn close_array_of_tables_header(&mut self) -> core::fmt::Result {
+ write!(self, "]]")
+ }
+
+ fn open_inline_table(&mut self) -> core::fmt::Result {
+ write!(self, "{{")
+ }
+ fn close_inline_table(&mut self) -> core::fmt::Result {
+ write!(self, "}}")
+ }
+
+ fn open_array(&mut self) -> core::fmt::Result {
+ write!(self, "[")
+ }
+ fn close_array(&mut self) -> core::fmt::Result {
+ write!(self, "]")
+ }
+
+ fn key_sep(&mut self) -> core::fmt::Result {
+ write!(self, ".")
+ }
+
+ fn keyval_sep(&mut self) -> core::fmt::Result {
+ write!(self, "=")
+ }
+
+ /// Write an encoded TOML key
+ ///
+ /// To customize the encoding, see [`TomlStringBuilder`][crate::TomlStringBuilder].
+ fn key(&mut self, value: impl crate::WriteTomlKey) -> core::fmt::Result {
+ value.write_toml_key(self)
+ }
+
+ /// Write an encoded TOML scalar value
+ ///
+ /// To customize the encoding, see
+ /// - [`TomlStringBuilder`][crate::TomlStringBuilder]
+ /// - [`TomlIntegerFormat`][crate::TomlIntegerFormat]
+ ///
+ /// <div class="warning">
+ ///
+ /// For floats, this preserves the sign bit for [`f32::NAN`] / [`f64::NAN`] for the sake of
+ /// format-preserving editing.
+ /// However, in most cases the sign bit is indeterminate and outputting signed NANs can be a
+ /// cause of non-repeatable behavior.
+ ///
+ /// For general serialization, you should discard the sign bit. For example:
+ /// ```
+ /// # let mut v = f64::NAN;
+ /// if v.is_nan() {
+ /// v = v.copysign(1.0);
+ /// }
+ /// ```
+ ///
+ /// </div>
+ fn value(&mut self, value: impl crate::WriteTomlValue) -> core::fmt::Result {
+ value.write_toml_value(self)
+ }
+
+ fn val_sep(&mut self) -> core::fmt::Result {
+ write!(self, ",")
+ }
+
+ fn space(&mut self) -> core::fmt::Result {
+ write!(self, " ")
+ }
+
+ fn open_comment(&mut self) -> core::fmt::Result {
+ write!(self, "#")
+ }
+
+ fn newline(&mut self) -> core::fmt::Result {
+ writeln!(self)
+ }
+}
+
+impl<W> TomlWrite for W where W: core::fmt::Write {}
diff --git a/third_party/rust/winnow/.cargo-checksum.json b/third_party/rust/winnow/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"d4ccbe26e163b2acad4574fb30873a521c8dec2eccf839893db4d6848cc48c59","Cargo.toml":"f90be032ff9119a110b4dcd4085187acd118491b475a5fbbbd51722314541feb","LICENSE-MIT":"cb5aedb296c5246d1f22e9099f925a65146f9f0d6b4eebba97fd27a6cdbbab2d","README.md":"0b7e4cad5ef0cbb47dc1c1c34ff3b5d090aaba2cc3b3c5401539e5c4a22b7938","benches/contains_token.rs":"aa7a4d51fff528436f37cc91e79bee52c71d11a14fddb1fc923e1ab3641493f3","benches/find_slice.rs":"7536f32e73ee75aaecb8450e55d2b7a1f65fb10e7b21927010c066756ab8dced","benches/iter.rs":"e190201a65a24170ac0a80b8e43f117bafa683584255ed3c28cab3d7fe550143","benches/next_slice.rs":"820681d2e707eb4360bec25efbaef3cbb6e84344b158059b53bb0afbf0b85e51","benches/number.rs":"c801bd72bac2b59ec3990068796644359a68c13e38e2cfad5a645eca51ed42f1","examples/arithmetic/bench.rs":"a162e4f91ce2ed19b5b53f79d77c92eb52278f0788da9af8fee3ee62cebfe0a9","examples/arithmetic/main.rs":"b55ead482dd72a28ffbae141daa9d1837286862bf8950ac594f57afe2a3288ad","examples/arithmetic/parser.rs":"cc2ee2de1e10bca9fc10ab7b22bc77f7690b0bc287dabfdf006c61c51a44ee9b","examples/arithmetic/parser_ast.rs":"eeb12535c386632e88ce18781d23e50c79512ff63f2349231f0d423df00bc4ad","examples/arithmetic/parser_lexer.rs":"354bcc2882fed52d7951f668a0d3932b96e08ff6ea56dbaef64b0496d3c9d93b","examples/arithmetic/test_parser.rs":"3f4568e039c4a5a66cdc76b41a8b0c1352f44a664bc2932d9386849b88d49095","examples/arithmetic/test_parser_ast.rs":"19a9f18a4fb719a976bd470d5b8b403665e9a871bf9ab47dcb1c1c92b0083e0f","examples/arithmetic/test_parser_lexer.rs":"bcfa81f0ecda25f19f46115af73e7896a99624cc082ce42f3989db7fe84f2e1b","examples/css/main.rs":"06647d68a57f8ef47198bda81dd88b2e123e70c7e1a79a0109a759ead1212904","examples/css/parser.rs":"5adb8d1c3f0d5c838e3d942be31c65d3592b0971ce3377d3e6381fe78638dd85","examples/custom_error.rs":"64bf5a6bde9feb457e6e5332328e431f4815c0ba455d8bb95cb6fd53549514b4","examples/http/bench.rs":"b6d3e9e5833b53ac58f8336617cc42a2f3f4c5e5ce88feb23184fcdbb36843f6","examples/http/main.rs":"ae6fbf6b7083e73b3bd29cb63094f0aace6f7834c90504fa1f3abdca87d81826","examples/http/parser.rs":"f3bd2faf2a459f8991bd324fc346894ce211b5afabebfb97a6130173a003e4b3","examples/http/parser_streaming.rs":"ee1b7dc0d43ec150c9a2398215743c45be8ebbca3c48f87ad87a4720645620a0","examples/ini/bench.rs":"eefe731c56eb47f84689724f861cc249f9dffc0703cbb93d86d394947f42140d","examples/ini/main.rs":"b3b6d508a6e8fa99e34fb400a625d346da6eeeea909ad5e365636acd1a8a354f","examples/ini/parser.rs":"51151f5782c3b0da6a2e1ee238d6d50e6413fe51bda99f93e101b547f21d0658","examples/ini/parser_str.rs":"ded7877edce1ad8222fad206f6f9df62862dbcadf1e82c7a96f852d7f6b5dea8","examples/iterator.rs":"3058436efd78dd3d50bbeff3f61f76c1b560c1b1acc5b48bc2371a3271c755fa","examples/json/bench.rs":"a2a7389fe5e55a3d60d89438958dc4fcde1c3891a22dbce49c065a658ac9d205","examples/json/json.rs":"fd9fa5e4b76ba1f7d9180ae8127751d2c665892ad6d1c95f4eba16ceca6ff576","examples/json/main.rs":"eb72a1b50766d594e1452ebcbc6e953147c2d0961b8e62e08181455dfc658be9","examples/json/parser_alt.rs":"0ebfa22e0fcc840808c48d9dfd606a94a865b4fad2894bd32f84f62754cbd8d1","examples/json/parser_dispatch.rs":"055ed9a28d96339c49124edab67b2e1e605fc62210df5f795df37f1304423b2e","examples/json/parser_partial.rs":"38af4364f0a99f1aa0310b2073089179a3591be5490ac504f1799b2af86506fa","examples/json_iterator.rs":"9e6661a4826bf76bf395ee3eb794ac51376e2a6b5ea659c44ff66f7fe685138a","examples/ndjson/example.ndjson":"c44c130731008bca76181348d40f46b183577949b5a4d229e0e1a56e1e405e5d","examples/ndjson/main.rs":"597fce15ca0e0c76e0c2d02641d2bb65b157449bdc758ef9071da96173052039","examples/ndjson/parser.rs":"90629e692c1a27767982cca02ae2b5c4e342f7885ded31270fcf096903313715","examples/s_expression/main.rs":"5a2de0b477807c7d6ab8bb72579f03692e8f5ea89d5b6f3a8342cfdbe4d84b96","examples/s_expression/parser.rs":"1daa2702c08559716970b50bada9670e855670fadfdd46c23a67dd8acb1d73cd","examples/string/main.rs":"5537748a97de502ba5965f70fc6b9dcb66a4c051e88dbe5dfe604ce1bc0cae35","examples/string/parser.rs":"701b748fd2751c2218dac36b086743d89ad0ccd77e750dba93e9b49037969d24","src/_topic/arithmetic.rs":"8270c897c8799d837026d3b8720280d661df6e6c879601fdf707cb7bf2deab79","src/_topic/error.rs":"98fad96735246bd12757c596926f65757b2942564a8cccc159f74715631defc7","src/_topic/fromstr.rs":"01abdab296cd30067ae5f508b2b21ebe97c0571ace58e7817876eb2110e8d23a","src/_topic/http.rs":"19b9ec78a031fe5b3086fb34d04d6c13308c50b6e7bfe30229f5b682d3605ac8","src/_topic/ini.rs":"b2b04d48eac3158f4e26ee9dce748a699d02acaa0b12ae8d54421cad0fdc4ad7","src/_topic/json.rs":"c5d1d102eed4d44e2ba9975309c6476a54a0a7940c34b11c928ed24e0f73d2a4","src/_topic/language.rs":"94046b75711f4bf1c4b0802aa8c61b8e87e1503db78e44eab75a132394aa64b9","src/_topic/lexing.rs":"cdd9e6c038051ab7189ff4e89c6bf1294fe0063b7f98d2f6238e0b19dbbf1a44","src/_topic/mod.rs":"a4c880e35cba897d5b3deab9811ddb7b97b6ab9d1ad9a374bcb5d478772f165a","src/_topic/nom.rs":"1f446972d1329cb489bb29250b569a663f38a0172bdbdecf98fd8e14b87c0d63","src/_topic/partial.rs":"f9c4574219fc28720fad4e43266debe62352290039114c208c97157430073031","src/_topic/performance.rs":"115b636769c307bd80ecc5b89ffad47877352bc93ef47bf664093d3d7a2562cc","src/_topic/s_expression.rs":"6ca9a22a5c3344e120421f2ab353a1e822777aebfa1faa0acffc9632f7668cb2","src/_topic/stream.rs":"474a35b9a05f9f858618a97c398352b8feb57287ef9489ae7af5991cb4226a7b","src/_topic/why.rs":"a1152f88b8faa987352be093ef07c3de911b47a9e129fd6070e873ebc2b681ef","src/_tutorial/chapter_0.rs":"dbda2c50ccfc3bc04c798a57d7019ec93a99954327a733a7f48449da8c2ceba5","src/_tutorial/chapter_1.rs":"26379bba2b6e677f619f57227da75882f38f411ad2d356ccd6e3f6ff228cf563","src/_tutorial/chapter_2.rs":"c0922c261f76f31f1805d360799dcb9711711f09da85cf4d8bd7e43abb3b7335","src/_tutorial/chapter_3.rs":"e2d6ab100599d9680e1effcc55cc43e1ad4703f61bb57d5237981e9df0beafd6","src/_tutorial/chapter_4.rs":"58591edcf1fe0f58b914d51fc5c9d64f930bb7b8973c5962e371197d32219e64","src/_tutorial/chapter_5.rs":"40cc87cffd0801a2126246aa223b49e7c98699243fba3ede2fbfbe289b225774","src/_tutorial/chapter_6.rs":"9438e227406cc76eb4a2f462ab4f8e628ee63d0aa42936c511baefdc2f1e2a0b","src/_tutorial/chapter_7.rs":"858c68b5d7eb69df6305faf4dce73ef0acca39e2490e06c4d7de1949034343d6","src/_tutorial/chapter_8.rs":"1af16a8bc947d64093c0a61187fbb17e14c88bbd40027edf8587dc6347e381ec","src/_tutorial/mod.rs":"afba818d7d1c8309bee5399dc8f695d93b08e93f406734d41e7e66c019b43c53","src/ascii/mod.rs":"88d999dcbe7a83831a68a1d41b1b4fe7dda69d600342b35bf2a7c090586fd505","src/ascii/tests.rs":"aa2c930e0421465be2d1c0d0959fc7164af582354f345375f84303ba9c9ac648","src/binary/bits/mod.rs":"0b7d0a71d1b833816ab84535dccb54bc04fd5e6aa554af339404b238cbf230e9","src/binary/bits/tests.rs":"fc2629882bb5124e365fe732e38bf670397b4c7defcbbee687d0f45d12e0916b","src/binary/mod.rs":"66a70f0cb08bc3a5a1d424eba74ab6c6ac6b87a3946742d13d5a6389fd036592","src/binary/tests.rs":"390df3945b0e1bd0fd534b47494f3de5582856152d2848c53662ca2a70ce3d66","src/combinator/branch.rs":"1587e18a6f9a7c6ce427ab79bfeb90ea7e36647edef9c32e99dcdb4c625aec9d","src/combinator/core.rs":"6c53923e4a81e2fdd0f0297cc6499b5e2dd9cf0f5706dd5959ffb22c3e72d472","src/combinator/debug/internals.rs":"c2a94a8ab4836dc4ea5b337e1a0957932b38f27f579e38c7b5fd8427e3d7a0cc","src/combinator/debug/mod.rs":"589134d981d06051eab75dfbc14f068d0e092aca077e64b899cfb00dfe98aee2","src/combinator/impls.rs":"14368309fa23642823b4c4acb587eb050c22ddab3b4b9a099c1e6ae237fed8ab","src/combinator/mod.rs":"0ed8c965225d8eef38b00149ce45ba0ed9446563af99be2c4be0ecdb5a50c915","src/combinator/multi.rs":"ff9afe8063ae97da8583d4edc4b730e08b6946ebffef72337968784eabfd53f4","src/combinator/sequence.rs":"4d7b9ed0e37928e97f549496fa6aa19c7137f01051d0d206f6e868a1d5fb2c5a","src/combinator/tests.rs":"b321998366b6a7405194020fb48c32dadf9c5625b9f778671ae56ca4912cf267","src/error.rs":"6f2cac3b69cc073fa0e4405bf90c5aec9b0c8d53f8f3286b41d4b46accd46405","src/lib.rs":"27605b25d55ff29c566c9baec8011ccbad5b7ac3edb9ca1771a86820f3668cdf","src/macros/dispatch.rs":"c01e13dc2050ac6917080120e353142eaed3389da451de5da1829391f11b6790","src/macros/mod.rs":"873d60cd38902cfe5f7ab8c462aa1794e13cd7f7902e2d224baef5f4d4d94877","src/macros/seq.rs":"02c798c652c7807945b476fce1e8e7c0b69e436f341995ccd0a056846822b650","src/macros/tests.rs":"6d43e0027083b7da950eb93b76da7689bfbbe48aeb06effffce71cd888996f2d","src/parser.rs":"11abeaa4c32f0e0bfe395a2d315414e0db4d7998ee060c26827cbf53d80dd7a9","src/stream/bstr.rs":"c1a2dab0c95216345c8915bce8d27ff020e41800c221a21f556011f72fcb2bca","src/stream/bytes.rs":"a3dbf1f6526c6d8e84d615da0e6d9100d80dd23d8cb561c44d70846f2ec2a5c0","src/stream/locating.rs":"1faabc9d47c3b555fbf9b066480904f1572c2f20fc975e63997d83479bd190aa","src/stream/mod.rs":"0c0693cb8ea5d2a1135421ff8612446264973f9dc9e119cb41fec4b47a497afb","src/stream/partial.rs":"4c2e250a41183e2d67563de9f0ba90b4e50a02ec792d409e46000dd0735df090","src/stream/range.rs":"9add51533636d72f172cc02c3e6838717fd4fea15f32af09d188373c8a693154","src/stream/recoverable.rs":"79fe903fe73c31f210a72b1c5fb4d055d41d0e3a77be71a7053ba03227809a45","src/stream/stateful.rs":"8139b944e114f9bd2e07c6a9d7b75df43c8a14f3ff47c9dd4a990913aa63a577","src/stream/tests.rs":"f4b950271678b36262dd61fa7616ecec84be0add56f3130e567ea08097a4f93c","src/stream/token.rs":"3831f503f5dd7f8ac7524e5565e21bb812d388344543c8d097987237d58c741b","src/token/mod.rs":"1b357a65b13e9331a9d24ed6052726dc31f187e7cb2a0f0a46cd720a0866244e","src/token/tests.rs":"d959295d62a861ec68f9c231b6c1c786bbf868fd429f3dcb10f064a512c0d91c"},"package":"d9fb597c990f03753e08d3c29efbfcf2019a003b4bf4ba19225c158e1549f0f3"}
-\ No newline at end of file
+{"files":{"Cargo.lock":"ee369a938cfd7c65dcc5e1421f00142d284263d660b895870df13d94f18904c1","Cargo.toml":"ece60664a01610ae4be890fb1a534ef75aa419ee1bccbef1d9a8f2ed5de77b03","LICENSE-MIT":"cb5aedb296c5246d1f22e9099f925a65146f9f0d6b4eebba97fd27a6cdbbab2d","README.md":"0b7e4cad5ef0cbb47dc1c1c34ff3b5d090aaba2cc3b3c5401539e5c4a22b7938","examples/arithmetic/bench.rs":"a162e4f91ce2ed19b5b53f79d77c92eb52278f0788da9af8fee3ee62cebfe0a9","examples/arithmetic/main.rs":"b55ead482dd72a28ffbae141daa9d1837286862bf8950ac594f57afe2a3288ad","examples/arithmetic/parser.rs":"cc2ee2de1e10bca9fc10ab7b22bc77f7690b0bc287dabfdf006c61c51a44ee9b","examples/arithmetic/parser_ast.rs":"eeb12535c386632e88ce18781d23e50c79512ff63f2349231f0d423df00bc4ad","examples/arithmetic/parser_lexer.rs":"ff80d4d01947384a4a9382b88eb83cd5bb72181557058ab6ef5814ee29b14ea0","examples/arithmetic/test_parser.rs":"3f4568e039c4a5a66cdc76b41a8b0c1352f44a664bc2932d9386849b88d49095","examples/arithmetic/test_parser_ast.rs":"19a9f18a4fb719a976bd470d5b8b403665e9a871bf9ab47dcb1c1c92b0083e0f","examples/arithmetic/test_parser_lexer.rs":"ea6825693dece6fb60f39bdc80f6714b94dddac5482e27b17090355ed699a0d6","examples/css/main.rs":"06647d68a57f8ef47198bda81dd88b2e123e70c7e1a79a0109a759ead1212904","examples/css/parser.rs":"5adb8d1c3f0d5c838e3d942be31c65d3592b0971ce3377d3e6381fe78638dd85","examples/custom_error.rs":"64bf5a6bde9feb457e6e5332328e431f4815c0ba455d8bb95cb6fd53549514b4","examples/http/bench.rs":"b6d3e9e5833b53ac58f8336617cc42a2f3f4c5e5ce88feb23184fcdbb36843f6","examples/http/main.rs":"ae6fbf6b7083e73b3bd29cb63094f0aace6f7834c90504fa1f3abdca87d81826","examples/http/parser.rs":"f3bd2faf2a459f8991bd324fc346894ce211b5afabebfb97a6130173a003e4b3","examples/http/parser_streaming.rs":"ee1b7dc0d43ec150c9a2398215743c45be8ebbca3c48f87ad87a4720645620a0","examples/ini/bench.rs":"eefe731c56eb47f84689724f861cc249f9dffc0703cbb93d86d394947f42140d","examples/ini/main.rs":"b3b6d508a6e8fa99e34fb400a625d346da6eeeea909ad5e365636acd1a8a354f","examples/ini/parser.rs":"51151f5782c3b0da6a2e1ee238d6d50e6413fe51bda99f93e101b547f21d0658","examples/ini/parser_str.rs":"ded7877edce1ad8222fad206f6f9df62862dbcadf1e82c7a96f852d7f6b5dea8","examples/iterator.rs":"3058436efd78dd3d50bbeff3f61f76c1b560c1b1acc5b48bc2371a3271c755fa","examples/json/bench.rs":"a2a7389fe5e55a3d60d89438958dc4fcde1c3891a22dbce49c065a658ac9d205","examples/json/json.rs":"fd9fa5e4b76ba1f7d9180ae8127751d2c665892ad6d1c95f4eba16ceca6ff576","examples/json/main.rs":"eb72a1b50766d594e1452ebcbc6e953147c2d0961b8e62e08181455dfc658be9","examples/json/parser_alt.rs":"0ebfa22e0fcc840808c48d9dfd606a94a865b4fad2894bd32f84f62754cbd8d1","examples/json/parser_dispatch.rs":"055ed9a28d96339c49124edab67b2e1e605fc62210df5f795df37f1304423b2e","examples/json/parser_partial.rs":"38af4364f0a99f1aa0310b2073089179a3591be5490ac504f1799b2af86506fa","examples/json_iterator.rs":"9e6661a4826bf76bf395ee3eb794ac51376e2a6b5ea659c44ff66f7fe685138a","examples/ndjson/example.ndjson":"c44c130731008bca76181348d40f46b183577949b5a4d229e0e1a56e1e405e5d","examples/ndjson/main.rs":"597fce15ca0e0c76e0c2d02641d2bb65b157449bdc758ef9071da96173052039","examples/ndjson/parser.rs":"90629e692c1a27767982cca02ae2b5c4e342f7885ded31270fcf096903313715","examples/s_expression/main.rs":"5a2de0b477807c7d6ab8bb72579f03692e8f5ea89d5b6f3a8342cfdbe4d84b96","examples/s_expression/parser.rs":"1daa2702c08559716970b50bada9670e855670fadfdd46c23a67dd8acb1d73cd","examples/string/main.rs":"5537748a97de502ba5965f70fc6b9dcb66a4c051e88dbe5dfe604ce1bc0cae35","examples/string/parser.rs":"701b748fd2751c2218dac36b086743d89ad0ccd77e750dba93e9b49037969d24","src/_topic/arithmetic.rs":"8270c897c8799d837026d3b8720280d661df6e6c879601fdf707cb7bf2deab79","src/_topic/error.rs":"98fad96735246bd12757c596926f65757b2942564a8cccc159f74715631defc7","src/_topic/fromstr.rs":"01abdab296cd30067ae5f508b2b21ebe97c0571ace58e7817876eb2110e8d23a","src/_topic/http.rs":"19b9ec78a031fe5b3086fb34d04d6c13308c50b6e7bfe30229f5b682d3605ac8","src/_topic/ini.rs":"b2b04d48eac3158f4e26ee9dce748a699d02acaa0b12ae8d54421cad0fdc4ad7","src/_topic/json.rs":"c5d1d102eed4d44e2ba9975309c6476a54a0a7940c34b11c928ed24e0f73d2a4","src/_topic/language.rs":"94046b75711f4bf1c4b0802aa8c61b8e87e1503db78e44eab75a132394aa64b9","src/_topic/lexing.rs":"cdd9e6c038051ab7189ff4e89c6bf1294fe0063b7f98d2f6238e0b19dbbf1a44","src/_topic/mod.rs":"a4c880e35cba897d5b3deab9811ddb7b97b6ab9d1ad9a374bcb5d478772f165a","src/_topic/nom.rs":"1f446972d1329cb489bb29250b569a663f38a0172bdbdecf98fd8e14b87c0d63","src/_topic/partial.rs":"f9c4574219fc28720fad4e43266debe62352290039114c208c97157430073031","src/_topic/performance.rs":"115b636769c307bd80ecc5b89ffad47877352bc93ef47bf664093d3d7a2562cc","src/_topic/s_expression.rs":"6ca9a22a5c3344e120421f2ab353a1e822777aebfa1faa0acffc9632f7668cb2","src/_topic/stream.rs":"474a35b9a05f9f858618a97c398352b8feb57287ef9489ae7af5991cb4226a7b","src/_topic/why.rs":"856608165593e2b0b38c1c45e57dfb1064d954a4caeaaed190049d58f6f091fc","src/_tutorial/chapter_0.rs":"dbda2c50ccfc3bc04c798a57d7019ec93a99954327a733a7f48449da8c2ceba5","src/_tutorial/chapter_1.rs":"26379bba2b6e677f619f57227da75882f38f411ad2d356ccd6e3f6ff228cf563","src/_tutorial/chapter_2.rs":"c0922c261f76f31f1805d360799dcb9711711f09da85cf4d8bd7e43abb3b7335","src/_tutorial/chapter_3.rs":"e2d6ab100599d9680e1effcc55cc43e1ad4703f61bb57d5237981e9df0beafd6","src/_tutorial/chapter_4.rs":"58591edcf1fe0f58b914d51fc5c9d64f930bb7b8973c5962e371197d32219e64","src/_tutorial/chapter_5.rs":"40cc87cffd0801a2126246aa223b49e7c98699243fba3ede2fbfbe289b225774","src/_tutorial/chapter_6.rs":"9438e227406cc76eb4a2f462ab4f8e628ee63d0aa42936c511baefdc2f1e2a0b","src/_tutorial/chapter_7.rs":"858c68b5d7eb69df6305faf4dce73ef0acca39e2490e06c4d7de1949034343d6","src/_tutorial/chapter_8.rs":"1af16a8bc947d64093c0a61187fbb17e14c88bbd40027edf8587dc6347e381ec","src/_tutorial/mod.rs":"afba818d7d1c8309bee5399dc8f695d93b08e93f406734d41e7e66c019b43c53","src/ascii/mod.rs":"88d999dcbe7a83831a68a1d41b1b4fe7dda69d600342b35bf2a7c090586fd505","src/ascii/tests.rs":"aa2c930e0421465be2d1c0d0959fc7164af582354f345375f84303ba9c9ac648","src/binary/bits/mod.rs":"0b7d0a71d1b833816ab84535dccb54bc04fd5e6aa554af339404b238cbf230e9","src/binary/bits/tests.rs":"fc2629882bb5124e365fe732e38bf670397b4c7defcbbee687d0f45d12e0916b","src/binary/mod.rs":"66a70f0cb08bc3a5a1d424eba74ab6c6ac6b87a3946742d13d5a6389fd036592","src/binary/tests.rs":"390df3945b0e1bd0fd534b47494f3de5582856152d2848c53662ca2a70ce3d66","src/combinator/branch.rs":"1587e18a6f9a7c6ce427ab79bfeb90ea7e36647edef9c32e99dcdb4c625aec9d","src/combinator/core.rs":"6c53923e4a81e2fdd0f0297cc6499b5e2dd9cf0f5706dd5959ffb22c3e72d472","src/combinator/debug/internals.rs":"b5b1285b58c56d4cad69cf1579a36264faa536af8aed38382c02ad9a0ae9fcab","src/combinator/debug/mod.rs":"589134d981d06051eab75dfbc14f068d0e092aca077e64b899cfb00dfe98aee2","src/combinator/impls.rs":"14368309fa23642823b4c4acb587eb050c22ddab3b4b9a099c1e6ae237fed8ab","src/combinator/mod.rs":"0ed8c965225d8eef38b00149ce45ba0ed9446563af99be2c4be0ecdb5a50c915","src/combinator/multi.rs":"ff9afe8063ae97da8583d4edc4b730e08b6946ebffef72337968784eabfd53f4","src/combinator/sequence.rs":"4d7b9ed0e37928e97f549496fa6aa19c7137f01051d0d206f6e868a1d5fb2c5a","src/combinator/tests.rs":"b321998366b6a7405194020fb48c32dadf9c5625b9f778671ae56ca4912cf267","src/error.rs":"6f2cac3b69cc073fa0e4405bf90c5aec9b0c8d53f8f3286b41d4b46accd46405","src/lib.rs":"71303746596712961ab917a5825529154c2476a836899149b330f923eed67c83","src/macros/dispatch.rs":"c01e13dc2050ac6917080120e353142eaed3389da451de5da1829391f11b6790","src/macros/mod.rs":"887bf50d983cfc4807f89b93f1a375f83526abefbce5ad9c6c53d90123c09578","src/macros/seq.rs":"02c798c652c7807945b476fce1e8e7c0b69e436f341995ccd0a056846822b650","src/macros/tests.rs":"6d43e0027083b7da950eb93b76da7689bfbbe48aeb06effffce71cd888996f2d","src/parser.rs":"bf6818b19c6e596545325e9224156484cf5c8561b29153fb8b247b09a4fa79c4","src/stream/bstr.rs":"51556d12802b7d1bed8c67a452b06615a4c14cf02a9648f7f10649ff35571e58","src/stream/bytes.rs":"15d7c67b3fe55474e96c50cc9e220b5d3e061f9f8680703c97da00cfaac79c1d","src/stream/locating.rs":"7cac856554c62a46e47026844c34e1a38bb565bac7484b368f4f700b15b21580","src/stream/mod.rs":"96a504da4ea3d2a1cfc79dd2ce83e6dbfa74b7095dd6e26380fcb6b24638ad06","src/stream/partial.rs":"4c2e250a41183e2d67563de9f0ba90b4e50a02ec792d409e46000dd0735df090","src/stream/range.rs":"9add51533636d72f172cc02c3e6838717fd4fea15f32af09d188373c8a693154","src/stream/recoverable.rs":"79fe903fe73c31f210a72b1c5fb4d055d41d0e3a77be71a7053ba03227809a45","src/stream/stateful.rs":"d56242bd30c82551889121b60f6f0232a57689b0b903a695c1566bf66cbbbea4","src/stream/tests.rs":"f4b950271678b36262dd61fa7616ecec84be0add56f3130e567ea08097a4f93c","src/stream/token.rs":"4f9a94a4a8837bf48382b2b566018575260ebebaafe633a38714bc6cdc7fef5e","src/token/mod.rs":"1b357a65b13e9331a9d24ed6052726dc31f187e7cb2a0f0a46cd720a0866244e","src/token/tests.rs":"d959295d62a861ec68f9c231b6c1c786bbf868fd429f3dcb10f064a512c0d91c"},"package":"21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"}
+\ No newline at end of file
diff --git a/third_party/rust/winnow/Cargo.lock b/third_party/rust/winnow/Cargo.lock
@@ -117,9 +117,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "automod"
-version = "1.0.14"
+version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "edf3ee19dbc0a46d740f6f0926bde8c50f02bdbc7b536842da28f6ac56513a8b"
+checksum = "ebb4bd301db2e2ca1f5be131c24eb8ebf2d9559bc3744419e93baf8ddea7e670"
dependencies = [
"proc-macro2",
"quote",
@@ -1534,7 +1534,7 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winnow"
-version = "0.7.9"
+version = "0.7.13"
dependencies = [
"annotate-snippets",
"anstream 0.3.2",
@@ -1543,7 +1543,7 @@ dependencies = [
"automod",
"circular",
"criterion",
- "is-terminal",
+ "is_terminal_polyfill",
"lexopt",
"memchr",
"proptest",
diff --git a/third_party/rust/winnow/Cargo.toml b/third_party/rust/winnow/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.65.0"
name = "winnow"
-version = "0.7.9"
+version = "0.7.13"
build = false
include = [
"build.rs",
@@ -22,7 +22,6 @@ include = [
"Cargo.lock",
"LICENSE*",
"README.md",
- "benches/**/*",
"examples/**/*",
]
autolib = false
@@ -53,44 +52,44 @@ rustdoc-args = [
[[package.metadata.release.pre-release-replacements]]
file = "CHANGELOG.md"
-min = 1
-replace = "{{version}}"
search = "Unreleased"
+replace = "{{version}}"
+min = 1
[[package.metadata.release.pre-release-replacements]]
-exactly = 1
file = "CHANGELOG.md"
-replace = "...{{tag_name}}"
search = '\.\.\.HEAD'
+replace = "...{{tag_name}}"
+exactly = 1
[[package.metadata.release.pre-release-replacements]]
file = "CHANGELOG.md"
-min = 1
-replace = "{{date}}"
search = "ReleaseDate"
+replace = "{{date}}"
+min = 1
[[package.metadata.release.pre-release-replacements]]
-exactly = 1
file = "CHANGELOG.md"
+search = "<!-- next-header -->"
replace = """
<!-- next-header -->
## [Unreleased] - ReleaseDate
"""
-search = "<!-- next-header -->"
+exactly = 1
[[package.metadata.release.pre-release-replacements]]
-exactly = 1
file = "CHANGELOG.md"
+search = "<!-- next-url -->"
replace = """
<!-- next-url -->
[Unreleased]: https://github.com/winnow-rs/winnow/compare/{{tag_name}}...HEAD"""
-search = "<!-- next-url -->"
+exactly = 1
[[package.metadata.release.pre-release-replacements]]
-exactly = 1
file = "src/lib.rs"
-replace = "blob/v{{version}}/CHANGELOG.md"
search = 'blob/v.+\..+\..+/CHANGELOG.md'
+replace = "blob/v{{version}}/CHANGELOG.md"
+exactly = 1
[features]
alloc = []
@@ -98,7 +97,7 @@ debug = [
"std",
"dep:anstream",
"dep:anstyle",
- "dep:is-terminal",
+ "dep:is_terminal_polyfill",
"dep:terminal_size",
]
default = ["std"]
@@ -186,16 +185,6 @@ harness = false
required-features = ["alloc"]
[[bench]]
-name = "contains_token"
-path = "benches/contains_token.rs"
-harness = false
-
-[[bench]]
-name = "find_slice"
-path = "benches/find_slice.rs"
-harness = false
-
-[[bench]]
name = "http"
path = "examples/http/bench.rs"
harness = false
@@ -208,26 +197,11 @@ harness = false
required-features = ["std"]
[[bench]]
-name = "iter"
-path = "benches/iter.rs"
-harness = false
-
-[[bench]]
name = "json"
path = "examples/json/bench.rs"
harness = false
required-features = ["std"]
-[[bench]]
-name = "next_slice"
-path = "benches/next_slice.rs"
-harness = false
-
-[[bench]]
-name = "number"
-path = "benches/number.rs"
-harness = false
-
[dependencies.anstream]
version = "0.3.2"
optional = true
@@ -236,8 +210,8 @@ optional = true
version = "1.0.1"
optional = true
-[dependencies.is-terminal]
-version = "0.4.9"
+[dependencies.is_terminal_polyfill]
+version = "1.48.0"
optional = true
[dependencies.memchr]
diff --git a/third_party/rust/winnow/benches/contains_token.rs b/third_party/rust/winnow/benches/contains_token.rs
@@ -1,114 +0,0 @@
-use criterion::black_box;
-
-use winnow::combinator::alt;
-use winnow::combinator::repeat;
-use winnow::prelude::*;
-use winnow::token::take_till;
-use winnow::token::take_while;
-
-fn contains_token(c: &mut criterion::Criterion) {
- let data = [
- ("contiguous", CONTIGUOUS),
- ("interleaved", INTERLEAVED),
- ("canada", CANADA),
- ];
- let mut group = c.benchmark_group("contains_token");
- for (name, sample) in data {
- let len = sample.len();
- group.throughput(criterion::Throughput::Bytes(len as u64));
-
- group.bench_with_input(criterion::BenchmarkId::new("slice", name), &len, |b, _| {
- b.iter(|| black_box(parser_slice.parse_peek(black_box(sample)).unwrap()));
- });
- group.bench_with_input(criterion::BenchmarkId::new("array", name), &len, |b, _| {
- b.iter(|| black_box(parser_array.parse_peek(black_box(sample)).unwrap()));
- });
- group.bench_with_input(criterion::BenchmarkId::new("tuple", name), &len, |b, _| {
- b.iter(|| black_box(parser_tuple.parse_peek(black_box(sample)).unwrap()));
- });
- group.bench_with_input(
- criterion::BenchmarkId::new("closure-or", name),
- &len,
- |b, _| {
- b.iter(|| black_box(parser_closure_or.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("closure-matches", name),
- &len,
- |b, _| {
- b.iter(|| {
- black_box(
- parser_closure_matches
- .parse_peek(black_box(sample))
- .unwrap(),
- )
- });
- },
- );
- }
- group.finish();
-}
-
-fn parser_slice(input: &mut &str) -> ModalResult<usize> {
- let contains = &['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'][..];
- repeat(
- 0..,
- alt((take_while(1.., contains), take_till(1.., contains))),
- )
- .parse_next(input)
-}
-
-fn parser_array(input: &mut &str) -> ModalResult<usize> {
- let contains = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'];
- repeat(
- 0..,
- alt((take_while(1.., contains), take_till(1.., contains))),
- )
- .parse_next(input)
-}
-
-fn parser_tuple(input: &mut &str) -> ModalResult<usize> {
- let contains = ('0', '1', '2', '3', '4', '5', '6', '7', '8', '9');
- repeat(
- 0..,
- alt((take_while(1.., contains), take_till(1.., contains))),
- )
- .parse_next(input)
-}
-
-fn parser_closure_or(input: &mut &str) -> ModalResult<usize> {
- let contains = |c: char| {
- c == '0'
- || c == '1'
- || c == '2'
- || c == '3'
- || c == '4'
- || c == '5'
- || c == '6'
- || c == '7'
- || c == '8'
- || c == '9'
- };
- repeat(
- 0..,
- alt((take_while(1.., contains), take_till(1.., contains))),
- )
- .parse_next(input)
-}
-
-fn parser_closure_matches(input: &mut &str) -> ModalResult<usize> {
- let contains = |c: char| matches!(c, '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9');
- repeat(
- 0..,
- alt((take_while(1.., contains), take_till(1.., contains))),
- )
- .parse_next(input)
-}
-
-const CONTIGUOUS: &str = "012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789";
-const INTERLEAVED: &str = "0123456789abc0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab";
-const CANADA: &str = include_str!("../third_party/nativejson-benchmark/data/canada.json");
-
-criterion::criterion_group!(benches, contains_token);
-criterion::criterion_main!(benches);
diff --git a/third_party/rust/winnow/benches/find_slice.rs b/third_party/rust/winnow/benches/find_slice.rs
@@ -1,50 +0,0 @@
-use criterion::black_box;
-
-use winnow::combinator::repeat;
-use winnow::prelude::*;
-use winnow::token::take_until;
-
-fn find_slice(c: &mut criterion::Criterion) {
- let empty = "";
- let start_byte = "\r".repeat(100);
- let start_slice = "\r\n".repeat(100);
- let small = format!("{:>10}\r\n", "").repeat(100);
- let large = format!("{:>10000}\r\n", "").repeat(100);
-
- let data = [
- ("empty", (empty, empty)),
- ("start", (&start_byte, &start_slice)),
- ("medium", (&small, &small)),
- ("large", (&large, &large)),
- ];
- let mut group = c.benchmark_group("find_slice");
- for (name, samples) in data {
- group.bench_with_input(
- criterion::BenchmarkId::new("byte", name),
- samples.0,
- |b, sample| {
- b.iter(|| black_box(parser_byte.parse_peek(black_box(sample)).unwrap()));
- },
- );
-
- group.bench_with_input(
- criterion::BenchmarkId::new("slice", name),
- samples.1,
- |b, sample| {
- b.iter(|| black_box(parser_slice.parse_peek(black_box(sample)).unwrap()));
- },
- );
- }
- group.finish();
-}
-
-fn parser_byte(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., (take_until(0.., "\r"), "\r")).parse_next(input)
-}
-
-fn parser_slice(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., (take_until(0.., "\r\n"), "\r\n")).parse_next(input)
-}
-
-criterion::criterion_group!(benches, find_slice);
-criterion::criterion_main!(benches);
diff --git a/third_party/rust/winnow/benches/iter.rs b/third_party/rust/winnow/benches/iter.rs
@@ -1,120 +0,0 @@
-use criterion::black_box;
-
-use winnow::combinator::opt;
-use winnow::prelude::*;
-use winnow::stream::AsChar;
-use winnow::token::one_of;
-
-fn iter(c: &mut criterion::Criterion) {
- let data = [
- ("contiguous", CONTIGUOUS.as_bytes()),
- ("interleaved", INTERLEAVED.as_bytes()),
- ("canada", CANADA.as_bytes()),
- ];
- let mut group = c.benchmark_group("iter");
- for (name, sample) in data {
- let len = sample.len();
- group.throughput(criterion::Throughput::Bytes(len as u64));
-
- group.bench_with_input(
- criterion::BenchmarkId::new("iterate", name),
- &len,
- |b, _| {
- b.iter(|| black_box(iterate.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("next_token", name),
- &len,
- |b, _| {
- b.iter(|| black_box(next_token.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("opt(one_of)", name),
- &len,
- |b, _| {
- b.iter(|| black_box(opt_one_of.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("take_while", name),
- &len,
- |b, _| {
- b.iter(|| black_box(take_while.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(criterion::BenchmarkId::new("repeat", name), &len, |b, _| {
- b.iter(|| black_box(repeat.parse_peek(black_box(sample)).unwrap()));
- });
- }
- group.finish();
-}
-
-fn iterate(input: &mut &[u8]) -> ModalResult<usize> {
- let mut count = 0;
- for byte in input.iter() {
- if byte.is_dec_digit() {
- count += 1;
- }
- }
- input.finish();
- Ok(count)
-}
-
-fn next_token(input: &mut &[u8]) -> ModalResult<usize> {
- let mut count = 0;
- while let Some(byte) = input.next_token() {
- if byte.is_dec_digit() {
- count += 1;
- }
- }
- Ok(count)
-}
-
-fn opt_one_of(input: &mut &[u8]) -> ModalResult<usize> {
- let mut count = 0;
- while !input.is_empty() {
- while opt(one_of(AsChar::is_dec_digit))
- .parse_next(input)?
- .is_some()
- {
- count += 1;
- }
- while opt(one_of(|b: u8| !b.is_dec_digit()))
- .parse_next(input)?
- .is_some()
- {}
- }
- Ok(count)
-}
-
-fn take_while(input: &mut &[u8]) -> ModalResult<usize> {
- let mut count = 0;
- while !input.is_empty() {
- count += winnow::token::take_while(0.., AsChar::is_dec_digit)
- .parse_next(input)?
- .len();
- let _ = winnow::token::take_while(0.., |b: u8| !b.is_dec_digit()).parse_next(input)?;
- }
- Ok(count)
-}
-
-fn repeat(input: &mut &[u8]) -> ModalResult<usize> {
- let mut count = 0;
- while !input.is_empty() {
- count += winnow::combinator::repeat(0.., one_of(AsChar::is_dec_digit))
- .map(|count: usize| count)
- .parse_next(input)?;
- let () =
- winnow::combinator::repeat(0.., one_of(|b: u8| !b.is_dec_digit())).parse_next(input)?;
- }
- Ok(count)
-}
-
-const CONTIGUOUS: &str = "012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789";
-const INTERLEAVED: &str = "0123456789abc0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab0123456789ab";
-const CANADA: &str = include_str!("../third_party/nativejson-benchmark/data/canada.json");
-
-criterion::criterion_group!(benches, iter);
-criterion::criterion_main!(benches);
diff --git a/third_party/rust/winnow/benches/next_slice.rs b/third_party/rust/winnow/benches/next_slice.rs
@@ -1,133 +0,0 @@
-use criterion::black_box;
-
-use winnow::combinator::repeat;
-use winnow::prelude::*;
-use winnow::token::literal;
-use winnow::token::one_of;
-
-fn next_slice(c: &mut criterion::Criterion) {
- let mut group = c.benchmark_group("next_slice");
-
- let name = "ascii";
- let sample = "h".repeat(100);
- let sample = sample.as_str();
- group.bench_with_input(
- criterion::BenchmarkId::new("char", name),
- sample,
- |b, sample| {
- b.iter(|| black_box(parser_ascii_char.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("str", name),
- sample,
- |b, sample| {
- b.iter(|| black_box(parser_ascii_str.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("one_of", name),
- sample,
- |b, sample| {
- b.iter(|| black_box(parser_ascii_one_of.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("tag_char", name),
- sample,
- |b, sample| {
- b.iter(|| black_box(parser_ascii_tag_char.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("tag_str", name),
- sample,
- |b, sample| {
- b.iter(|| black_box(parser_ascii_tag_str.parse_peek(black_box(sample)).unwrap()));
- },
- );
-
- let name = "utf8";
- let sample = "🧑".repeat(100);
- let sample = sample.as_str();
- group.bench_with_input(
- criterion::BenchmarkId::new("char", name),
- sample,
- |b, sample| {
- b.iter(|| black_box(parser_utf8_char.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("str", name),
- sample,
- |b, sample| {
- b.iter(|| black_box(parser_utf8_str.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("one_of", name),
- sample,
- |b, sample| {
- b.iter(|| black_box(parser_utf8_one_of.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("tag_char", name),
- sample,
- |b, sample| {
- b.iter(|| black_box(parser_utf8_tag_char.parse_peek(black_box(sample)).unwrap()));
- },
- );
- group.bench_with_input(
- criterion::BenchmarkId::new("tag_str", name),
- sample,
- |b, sample| {
- b.iter(|| black_box(parser_utf8_tag_str.parse_peek(black_box(sample)).unwrap()));
- },
- );
-
- group.finish();
-}
-
-fn parser_ascii_char(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., 'h').parse_next(input)
-}
-
-fn parser_ascii_str(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., "h").parse_next(input)
-}
-
-fn parser_ascii_one_of(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., one_of('h')).parse_next(input)
-}
-
-fn parser_ascii_tag_char(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., literal('h')).parse_next(input)
-}
-
-fn parser_ascii_tag_str(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., literal("h")).parse_next(input)
-}
-
-fn parser_utf8_char(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., '🧑').parse_next(input)
-}
-
-fn parser_utf8_str(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., "🧑").parse_next(input)
-}
-
-fn parser_utf8_one_of(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., one_of('🧑')).parse_next(input)
-}
-
-fn parser_utf8_tag_char(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., literal('🧑')).parse_next(input)
-}
-
-fn parser_utf8_tag_str(input: &mut &str) -> ModalResult<usize> {
- repeat(0.., literal("🧑")).parse_next(input)
-}
-
-criterion::criterion_group!(benches, next_slice);
-criterion::criterion_main!(benches);
diff --git a/third_party/rust/winnow/benches/number.rs b/third_party/rust/winnow/benches/number.rs
@@ -1,68 +0,0 @@
-#[macro_use]
-extern crate criterion;
-
-use criterion::Criterion;
-
-use winnow::ascii::float;
-use winnow::binary::be_u64;
-use winnow::error::InputError;
-use winnow::error::ParserError;
-use winnow::prelude::*;
-use winnow::stream::ParseSlice;
-
-type Stream<'i> = &'i [u8];
-
-fn parser(i: &mut Stream<'_>) -> ModalResult<u64> {
- be_u64.parse_next(i)
-}
-
-fn number(c: &mut Criterion) {
- let data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12];
-
- parser
- .parse_peek(&data[..])
- .expect("should parse correctly");
- c.bench_function("number", move |b| {
- b.iter(|| parser.parse_peek(&data[..]).unwrap());
- });
-}
-
-fn float_bytes(c: &mut Criterion) {
- println!(
- "float_bytes result: {:?}",
- float::<_, f64, InputError<_>>.parse_peek(&b"-1.234E-12"[..])
- );
- c.bench_function("float bytes", |b| {
- b.iter(|| float::<_, f64, InputError<_>>.parse_peek(&b"-1.234E-12"[..]));
- });
-}
-
-fn float_str(c: &mut Criterion) {
- println!(
- "float_str result: {:?}",
- float::<_, f64, InputError<_>>.parse_peek("-1.234E-12")
- );
- c.bench_function("float str", |b| {
- b.iter(|| float::<_, f64, InputError<_>>.parse_peek("-1.234E-12"));
- });
-}
-
-fn std_float(input: &mut &[u8]) -> ModalResult<f64> {
- match input.parse_slice() {
- Some(n) => Ok(n),
- None => Err(ParserError::from_input(input)),
- }
-}
-
-fn std_float_bytes(c: &mut Criterion) {
- println!(
- "std_float_bytes result: {:?}",
- std_float.parse_peek(&b"-1.234E-12"[..])
- );
- c.bench_function("std_float bytes", |b| {
- b.iter(|| std_float.parse_peek(&b"-1.234E-12"[..]));
- });
-}
-
-criterion_group!(benches, number, float_bytes, std_float_bytes, float_str);
-criterion_main!(benches);
diff --git a/third_party/rust/winnow/examples/arithmetic/parser_lexer.rs b/third_party/rust/winnow/examples/arithmetic/parser_lexer.rs
@@ -29,12 +29,26 @@ pub(crate) fn expr2(i: &mut &str) -> Result<Expr> {
expr.parse_next(&mut tokens)
}
-#[derive(Clone, Debug, PartialEq, Eq)]
+#[derive(Clone, PartialEq, Eq)]
pub(crate) struct Token<'s> {
kind: TokenKind,
raw: &'s str,
}
+impl fmt::Debug for Token<'_> {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Customized for brevity for a better `winnow/debug` experience
+ match self.kind {
+ TokenKind::Value => Debug::fmt(self.raw, fmt),
+ TokenKind::Oper(oper) => Debug::fmt(&oper, fmt),
+ TokenKind::OpenParen => fmt.write_str("OpenParen"),
+ TokenKind::CloseParen => fmt.write_str("CloseParen"),
+ TokenKind::Unknown => fmt.write_str("Unknown"),
+ TokenKind::Eof => fmt.write_str("Eof"),
+ }
+ }
+}
+
impl PartialEq<TokenKind> for Token<'_> {
fn eq(&self, other: &TokenKind) -> bool {
self.kind == *other
diff --git a/third_party/rust/winnow/examples/arithmetic/test_parser_lexer.rs b/third_party/rust/winnow/examples/arithmetic/test_parser_lexer.rs
@@ -13,14 +13,8 @@ Ok(
(
"",
[
- Token {
- kind: Value,
- raw: "3",
- },
- Token {
- kind: Eof,
- raw: "",
- },
+ "3",
+ Eof,
],
),
)
@@ -34,14 +28,8 @@ Ok(
(
"",
[
- Token {
- kind: Value,
- raw: "24",
- },
- Token {
- kind: Eof,
- raw: "",
- },
+ "24",
+ Eof,
],
),
)
@@ -55,34 +43,12 @@ Ok(
(
"",
[
- Token {
- kind: Value,
- raw: "12",
- },
- Token {
- kind: Oper(
- Mul,
- ),
- raw: "*",
- },
- Token {
- kind: Value,
- raw: "2",
- },
- Token {
- kind: Oper(
- Div,
- ),
- raw: "/",
- },
- Token {
- kind: Value,
- raw: "3",
- },
- Token {
- kind: Eof,
- raw: "",
- },
+ "12",
+ Mul,
+ "2",
+ Div,
+ "3",
+ Eof,
],
),
)
@@ -96,62 +62,18 @@ Ok(
(
"",
[
- Token {
- kind: Value,
- raw: "2",
- },
- Token {
- kind: Oper(
- Mul,
- ),
- raw: "*",
- },
- Token {
- kind: Value,
- raw: "2",
- },
- Token {
- kind: Oper(
- Div,
- ),
- raw: "/",
- },
- Token {
- kind: OpenParen,
- raw: "(",
- },
- Token {
- kind: Value,
- raw: "5",
- },
- Token {
- kind: Oper(
- Sub,
- ),
- raw: "-",
- },
- Token {
- kind: Value,
- raw: "1",
- },
- Token {
- kind: CloseParen,
- raw: ")",
- },
- Token {
- kind: Oper(
- Add,
- ),
- raw: "+",
- },
- Token {
- kind: Value,
- raw: "3",
- },
- Token {
- kind: Eof,
- raw: "",
- },
+ "2",
+ Mul,
+ "2",
+ Div,
+ OpenParen,
+ "5",
+ Sub,
+ "1",
+ CloseParen,
+ Add,
+ "3",
+ Eof,
],
),
)
@@ -166,24 +88,9 @@ fn factor_test() {
let expected = str![[r#"
Ok(
(
- TokenSlice {
- initial: [
- Token {
- kind: Value,
- raw: "3",
- },
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- input: [
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- },
+ [
+ Eof,
+ ],
Value(
3,
),
@@ -199,24 +106,9 @@ Ok(
let expected = str![[r#"
Ok(
(
- TokenSlice {
- initial: [
- Token {
- kind: Value,
- raw: "12",
- },
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- input: [
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- },
+ [
+ Eof,
+ ],
Value(
12,
),
@@ -232,24 +124,9 @@ Ok(
let expected = str![[r#"
Ok(
(
- TokenSlice {
- initial: [
- Token {
- kind: Value,
- raw: "537",
- },
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- input: [
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- },
+ [
+ Eof,
+ ],
Value(
537,
),
@@ -265,24 +142,9 @@ Ok(
let expected = str![[r#"
Ok(
(
- TokenSlice {
- initial: [
- Token {
- kind: Value,
- raw: "24",
- },
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- input: [
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- },
+ [
+ Eof,
+ ],
Value(
24,
),
@@ -301,44 +163,9 @@ fn term_test() {
let expected = str![[r#"
Ok(
(
- TokenSlice {
- initial: [
- Token {
- kind: Value,
- raw: "12",
- },
- Token {
- kind: Oper(
- Mul,
- ),
- raw: "*",
- },
- Token {
- kind: Value,
- raw: "2",
- },
- Token {
- kind: Oper(
- Div,
- ),
- raw: "/",
- },
- Token {
- kind: Value,
- raw: "3",
- },
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- input: [
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- },
+ [
+ Eof,
+ ],
Div(
Mul(
Value(
@@ -364,44 +191,9 @@ Ok(
let expected = str![[r#"
Ok(
(
- TokenSlice {
- initial: [
- Token {
- kind: Value,
- raw: "12",
- },
- Token {
- kind: Oper(
- Mul,
- ),
- raw: "*",
- },
- Token {
- kind: Value,
- raw: "2",
- },
- Token {
- kind: Oper(
- Div,
- ),
- raw: "/",
- },
- Token {
- kind: Value,
- raw: "3",
- },
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- input: [
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- },
+ [
+ Eof,
+ ],
Div(
Mul(
Value(
@@ -427,64 +219,9 @@ Ok(
let expected = str![[r#"
Ok(
(
- TokenSlice {
- initial: [
- Token {
- kind: Value,
- raw: "2",
- },
- Token {
- kind: Oper(
- Mul,
- ),
- raw: "*",
- },
- Token {
- kind: Value,
- raw: "3",
- },
- Token {
- kind: Oper(
- Mul,
- ),
- raw: "*",
- },
- Token {
- kind: Value,
- raw: "2",
- },
- Token {
- kind: Oper(
- Mul,
- ),
- raw: "*",
- },
- Token {
- kind: Value,
- raw: "2",
- },
- Token {
- kind: Oper(
- Div,
- ),
- raw: "/",
- },
- Token {
- kind: Value,
- raw: "3",
- },
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- input: [
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- },
+ [
+ Eof,
+ ],
Div(
Mul(
Mul(
@@ -520,44 +257,9 @@ Ok(
let expected = str![[r#"
Ok(
(
- TokenSlice {
- initial: [
- Token {
- kind: Value,
- raw: "48",
- },
- Token {
- kind: Oper(
- Div,
- ),
- raw: "/",
- },
- Token {
- kind: Value,
- raw: "3",
- },
- Token {
- kind: Oper(
- Div,
- ),
- raw: "/",
- },
- Token {
- kind: Value,
- raw: "2",
- },
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- input: [
- Token {
- kind: Eof,
- raw: "",
- },
- ],
- },
+ [
+ Eof,
+ ],
Div(
Div(
Value(
diff --git a/third_party/rust/winnow/src/_topic/why.rs b/third_party/rust/winnow/src/_topic/why.rs
@@ -4,7 +4,7 @@
//!
//! <div class="warning">
//!
-//! **Note:** This will focus on principles and priorities. For a deeper and wider wider
+//! **Note:** This will focus on principles and priorities. For a deeper and wider
//! comparison with other Rust parser libraries, see
//! [parse-rosetta-rs](https://github.com/rosetta-rs/parse-rosetta-rs).
//!
diff --git a/third_party/rust/winnow/src/combinator/debug/internals.rs b/third_party/rust/winnow/src/combinator/debug/internals.rs
@@ -149,7 +149,7 @@ pub(crate) fn start<I: Stream>(
// The debug version of `slice` might be wider, either due to rendering one byte as two nibbles or
// escaping in strings.
- let mut debug_slice = format!("{:#?}", input.raw());
+ let mut debug_slice = format!("{:?}", crate::util::from_fn(|f| input.trace(f)));
let (debug_slice, eof) = if let Some(debug_offset) = debug_slice
.char_indices()
.enumerate()
@@ -289,7 +289,7 @@ fn term_width() -> usize {
}
fn query_width() -> Option<usize> {
- use is_terminal::IsTerminal;
+ use is_terminal_polyfill::IsTerminal;
if std::io::stderr().is_terminal() {
terminal_size::terminal_size().map(|(w, _h)| w.0.into())
} else {
diff --git a/third_party/rust/winnow/src/lib.rs b/third_party/rust/winnow/src/lib.rs
@@ -7,7 +7,7 @@
//! - [Tutorial][_tutorial::chapter_0]
//! - [Special Topics][_topic]
//! - [Discussions](https://github.com/winnow-rs/winnow/discussions)
-//! - [CHANGELOG](https://github.com/winnow-rs/winnow/blob/v0.7.9/CHANGELOG.md) (includes major version migration
+//! - [CHANGELOG](https://github.com/winnow-rs/winnow/blob/v0.7.13/CHANGELOG.md) (includes major version migration
//! guides)
//!
//! ## Aspirations
@@ -103,6 +103,37 @@ pub(crate) mod lib {
}
}
+pub(crate) mod util {
+ #[allow(dead_code)]
+ pub(crate) fn from_fn<F: Fn(&mut core::fmt::Formatter<'_>) -> core::fmt::Result>(
+ f: F,
+ ) -> FromFn<F> {
+ FromFn(f)
+ }
+
+ pub(crate) struct FromFn<F>(F)
+ where
+ F: Fn(&mut core::fmt::Formatter<'_>) -> core::fmt::Result;
+
+ impl<F> core::fmt::Debug for FromFn<F>
+ where
+ F: Fn(&mut core::fmt::Formatter<'_>) -> core::fmt::Result,
+ {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ (self.0)(f)
+ }
+ }
+
+ impl<F> core::fmt::Display for FromFn<F>
+ where
+ F: Fn(&mut core::fmt::Formatter<'_>) -> core::fmt::Result,
+ {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ (self.0)(f)
+ }
+ }
+}
+
#[macro_use]
mod macros;
diff --git a/third_party/rust/winnow/src/macros/mod.rs b/third_party/rust/winnow/src/macros/mod.rs
@@ -15,7 +15,7 @@ macro_rules! impl_partial_eq {
impl<'a> PartialEq<$rhs> for $lhs {
#[inline]
fn eq(&self, other: &$rhs) -> bool {
- let l = self.as_ref();
+ let l = self;
let r: &Self = other.as_ref();
PartialEq::eq(l, r)
}
@@ -37,7 +37,7 @@ macro_rules! impl_partial_ord {
impl<'a> PartialOrd<$rhs> for $lhs {
#[inline]
fn partial_cmp(&self, other: &$rhs) -> Option<Ordering> {
- let l = self.as_ref();
+ let l = self;
let r: &Self = other.as_ref();
PartialOrd::partial_cmp(l, r)
}
diff --git a/third_party/rust/winnow/src/parser.rs b/third_party/rust/winnow/src/parser.rs
@@ -47,6 +47,18 @@ use crate::stream::{Recover, Recoverable};
/// - `&[u8]` and `&str`, see [`winnow::token::literal`][crate::token::literal]
pub trait Parser<I, O, E> {
/// Parse all of `input`, generating `O` from it
+ ///
+ /// This is intended for integrating your parser into the rest of your application.
+ ///
+ /// For one [`Parser`] to drive another [`Parser`] forward or for
+ /// [incremental parsing][StreamIsPartial], see instead [`Parser::parse_next`].
+ ///
+ /// This assumes the [`Parser`] intends to read all of `input` and will return an
+ /// [`eof`][crate::combinator::eof] error if it does not
+ /// To ignore trailing `input`, combine your parser with a [`rest`][crate::token::rest]
+ /// (e.g. `(parser, rest).parse(input)`).
+ ///
+ /// See also the [tutorial][crate::_tutorial::chapter_6].
#[inline]
fn parse(&mut self, mut input: I) -> Result<O, ParseError<I, <E as ParserError<I>>::Inner>>
where
@@ -76,9 +88,12 @@ pub trait Parser<I, O, E> {
/// Take tokens from the [`Stream`], turning it into the output
///
- /// This includes advancing the [`Stream`] to the next location.
+ /// This includes advancing the input [`Stream`] to the next location.
///
/// On error, `input` will be left pointing at the error location.
+ ///
+ /// This is intended for a [`Parser`] to drive another [`Parser`] forward or for
+ /// [incremental parsing][StreamIsPartial]
fn parse_next(&mut self, input: &mut I) -> Result<O, E>;
/// Take tokens from the [`Stream`], turning it into the output
diff --git a/third_party/rust/winnow/src/stream/bstr.rs b/third_party/rust/winnow/src/stream/bstr.rs
@@ -432,6 +432,67 @@ impl_partial_ord!(BStr, &'a [u8]);
impl_partial_ord!(BStr, str);
impl_partial_ord!(BStr, &'a str);
+#[cfg(test)]
+mod test {
+ use crate::stream::BStr;
+
+ #[test]
+ fn partial_eq_bstr_byte_slice() {
+ let input = b"foo".as_slice();
+ let actual = BStr::new(input);
+ assert!(actual == input);
+ }
+
+ #[test]
+ fn partial_eq_byte_slice_bstr() {
+ let input = b"foo".as_slice();
+ let actual = BStr::new(input);
+ assert!(input == actual);
+ }
+
+ #[test]
+ fn partial_eq_bstr_str() {
+ let input = "foo";
+ let actual = BStr::new(input);
+ assert!(actual == input);
+ }
+
+ #[test]
+ fn partial_eq_str_bstr() {
+ let input = "foo";
+ let actual = BStr::new(input);
+ assert!(input == actual);
+ }
+
+ #[test]
+ fn partial_ord_bstr_byte_slice() {
+ let input = b"foo".as_slice();
+ let actual = BStr::new(input);
+ assert!(actual.partial_cmp(input) == Some(core::cmp::Ordering::Equal));
+ }
+
+ #[test]
+ fn partial_ord_byte_slice_bstr() {
+ let input = b"foo".as_slice();
+ let actual = BStr::new(input);
+ assert!(input.partial_cmp(actual) == Some(core::cmp::Ordering::Equal));
+ }
+
+ #[test]
+ fn partial_ord_bstr_str() {
+ let input = "foo";
+ let actual = BStr::new(input);
+ assert!(actual.partial_cmp(input) == Some(core::cmp::Ordering::Equal));
+ }
+
+ #[test]
+ fn partial_ord_str_bstr() {
+ let input = "foo";
+ let actual = BStr::new(input);
+ assert!(input.partial_cmp(actual) == Some(core::cmp::Ordering::Equal));
+ }
+}
+
#[cfg(all(test, feature = "std"))]
mod display {
use crate::stream::BStr;
@@ -446,22 +507,35 @@ mod display {
#[cfg(all(test, feature = "std"))]
mod debug {
use crate::stream::BStr;
+ use crate::stream::Stream as _;
+ use snapbox::assert_data_eq;
+ use snapbox::str;
#[test]
fn test_debug() {
- assert_eq!(&format!("{:?}", BStr::new(b"abc")), "\"abc\"");
-
- assert_eq!(
- "\"\\0\\0\\0 ftypisom\\0\\0\\u{2}\\0isomiso2avc1mp\"",
- format!(
- "{:?}",
- BStr::new(b"\0\0\0 ftypisom\0\0\x02\0isomiso2avc1mp")
- ),
- );
+ let input = BStr::new(b"abc");
+ let expected = str![[r#""abc""#]];
+ assert_data_eq!(&format!("{input:?}"), expected);
+
+ let input = BStr::new(b"\0\0\0 ftypisom\0\0\x02\0isomiso2avc1mp");
+ let expected = str![[r#""/0/0/0 ftypisom/0/0/u{2}/0isomiso2avc1mp""#]];
+ assert_data_eq!(&format!("{input:?}"), expected);
}
#[test]
fn test_pretty_debug() {
- assert_eq!(&format!("{:#?}", BStr::new(b"abc")), "abc");
+ let input = BStr::new(b"abc");
+ let expected = str!["abc"];
+ assert_data_eq!(&format!("{input:#?}"), expected);
+ }
+
+ #[test]
+ fn test_trace() {
+ let input = BStr::new(b"abc");
+ let expected = str!["abc"];
+ assert_data_eq!(
+ crate::util::from_fn(|f| input.trace(f)).to_string(),
+ expected
+ );
}
}
diff --git a/third_party/rust/winnow/src/stream/bytes.rs b/third_party/rust/winnow/src/stream/bytes.rs
@@ -447,6 +447,67 @@ impl_partial_ord!(Bytes, &'a [u8]);
impl_partial_ord!(Bytes, str);
impl_partial_ord!(Bytes, &'a str);
+#[cfg(test)]
+mod test {
+ use crate::stream::Bytes;
+
+ #[test]
+ fn partial_eq_bytes_byte_slice() {
+ let input = b"foo".as_slice();
+ let actual = Bytes::new(input);
+ assert!(actual == input);
+ }
+
+ #[test]
+ fn partial_eq_byte_slice_bytes() {
+ let input = b"foo".as_slice();
+ let actual = Bytes::new(input);
+ assert!(input == actual);
+ }
+
+ #[test]
+ fn partial_eq_bytes_str() {
+ let input = "foo";
+ let actual = Bytes::new(input);
+ assert!(actual == input);
+ }
+
+ #[test]
+ fn partial_eq_str_bytes() {
+ let input = "foo";
+ let actual = Bytes::new(input);
+ assert!(input == actual);
+ }
+
+ #[test]
+ fn partial_ord_bytes_byte_slice() {
+ let input = b"foo".as_slice();
+ let actual = Bytes::new(input);
+ assert!(actual.partial_cmp(input) == Some(core::cmp::Ordering::Equal));
+ }
+
+ #[test]
+ fn partial_ord_byte_slice_bytes() {
+ let input = b"foo".as_slice();
+ let actual = Bytes::new(input);
+ assert!(input.partial_cmp(actual) == Some(core::cmp::Ordering::Equal));
+ }
+
+ #[test]
+ fn partial_ord_bytes_str() {
+ let input = "foo";
+ let actual = Bytes::new(input);
+ assert!(actual.partial_cmp(input) == Some(core::cmp::Ordering::Equal));
+ }
+
+ #[test]
+ fn partial_ord_str_bytes() {
+ let input = "foo";
+ let actual = Bytes::new(input);
+ assert!(input.partial_cmp(actual) == Some(core::cmp::Ordering::Equal));
+ }
+}
+
#[cfg(all(test, feature = "std"))]
mod display {
use crate::stream::Bytes;
@@ -461,24 +522,33 @@ mod display {
#[cfg(all(test, feature = "std"))]
mod debug {
use crate::stream::Bytes;
+ use crate::stream::Stream as _;
+ use snapbox::assert_data_eq;
+ use snapbox::str;
#[test]
fn test_debug() {
- assert_eq!(
- "000000206674797069736F6D0000020069736F6D69736F32617663316D70",
- format!(
- "{:?}",
- Bytes::new(b"\0\0\0 ftypisom\0\0\x02\0isomiso2avc1mp")
- ),
- );
+ let input = Bytes::new(b"\0\0\0 ftypisom\0\0\x02\0isomiso2avc1mp");
+ let expected = str!["000000206674797069736F6D0000020069736F6D69736F32617663316D70"];
+ assert_data_eq!(&format!("{input:?}"), expected);
}
#[test]
fn test_pretty_debug() {
- // Output can change from run-to-run
- let _ = format!(
- "{:#?}",
- Bytes::new(b"\0\0\0 ftypisom\0\0\x02\0isomiso2avc1mp")
+ let input = Bytes::new(b"\0\0\0 ftypisom\0\0\x02\0isomiso2avc1mp");
+ let expected = str!["000000206674797069736F6D0000020069736F6D69736F32617663316D70"];
+ assert_data_eq!(&format!("{input:#?}").replace('_', ""), expected);
+ }
+
+ #[test]
+ fn test_trace() {
+ let input = Bytes::new(b"\0\0\0 ftypisom\0\0\x02\0isomiso2avc1mp");
+ let expected = str!["000000206674797069736F6D0000020069736F6D69736F32617663316D70"];
+ assert_data_eq!(
+ crate::util::from_fn(|f| input.trace(f))
+ .to_string()
+ .replace('_', ""),
+ expected
);
}
diff --git a/third_party/rust/winnow/src/stream/locating.rs b/third_party/rust/winnow/src/stream/locating.rs
@@ -25,7 +25,7 @@ use crate::stream::UpdateSlice;
/// byte offsets to line numbers.
///
/// See [`Parser::span`][crate::Parser::span] and [`Parser::with_span`][crate::Parser::with_span] for more details
-#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord)]
+#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord)]
#[doc(alias = "LocatingSliceSpan")]
#[doc(alias = "Located")]
pub struct LocatingSlice<I> {
@@ -81,6 +81,12 @@ impl<I> AsRef<I> for LocatingSlice<I> {
}
}
+impl<I: core::fmt::Debug> core::fmt::Debug for LocatingSlice<I> {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.input.fmt(f)
+ }
+}
+
impl<I> crate::lib::std::ops::Deref for LocatingSlice<I> {
type Target = I;
@@ -174,8 +180,13 @@ impl<I: Stream> Stream for LocatingSlice<I> {
#[inline(always)]
fn raw(&self) -> &dyn crate::lib::std::fmt::Debug {
+ #![allow(deprecated)]
self.input.raw()
}
+
+ fn trace(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.input.trace(f)
+ }
}
impl<I> Location for LocatingSlice<I>
diff --git a/third_party/rust/winnow/src/stream/mod.rs b/third_party/rust/winnow/src/stream/mod.rs
@@ -25,6 +25,8 @@ use crate::lib::std::str::FromStr;
use crate::error::ErrMode;
#[cfg(feature = "alloc")]
+use crate::lib::std::borrow::Cow;
+#[cfg(feature = "alloc")]
use crate::lib::std::collections::BTreeMap;
#[cfg(feature = "alloc")]
use crate::lib::std::collections::BTreeSet;
@@ -33,6 +35,8 @@ use crate::lib::std::collections::HashMap;
#[cfg(feature = "std")]
use crate::lib::std::collections::HashSet;
#[cfg(feature = "alloc")]
+use crate::lib::std::collections::VecDeque;
+#[cfg(feature = "alloc")]
use crate::lib::std::string::String;
#[cfg(feature = "alloc")]
use crate::lib::std::vec::Vec;
@@ -252,8 +256,15 @@ pub trait Stream: Offset<<Self as Stream>::Checkpoint> + crate::lib::std::fmt::D
/// May panic if an invalid [`Self::Checkpoint`] is provided
fn reset(&mut self, checkpoint: &Self::Checkpoint);
- /// Return the inner-most stream
+ /// Deprecated for callers as of 0.7.10, instead call [`Stream::trace`]
+ #[deprecated(since = "0.7.10", note = "Replaced with `Stream::trace`")]
fn raw(&self) -> &dyn crate::lib::std::fmt::Debug;
+
+ /// Write out a single-line summary of the current parse location
+ fn trace(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ #![allow(deprecated)]
+ write!(f, "{:#?}", self.raw())
+ }
}
impl<'i, T> Stream for &'i [T]
@@ -352,6 +363,10 @@ where
fn raw(&self) -> &dyn crate::lib::std::fmt::Debug {
self
}
+
+ fn trace(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ write!(f, "{self:?}")
+ }
}
impl<'i> Stream for &'i str {
@@ -373,9 +388,9 @@ impl<'i> Stream for &'i str {
#[inline(always)]
fn next_token(&mut self) -> Option<Self::Token> {
- let c = self.chars().next()?;
- let offset = c.len();
- *self = &self[offset..];
+ let mut iter = self.chars();
+ let c = iter.next()?;
+ *self = iter.as_str();
Some(c)
}
@@ -1467,6 +1482,36 @@ impl<'i> Accumulate<&'i str> for String {
}
#[cfg(feature = "alloc")]
+impl<'i> Accumulate<Cow<'i, str>> for String {
+ #[inline(always)]
+ fn initial(capacity: Option<usize>) -> Self {
+ match capacity {
+ Some(capacity) => String::with_capacity(clamp_capacity::<char>(capacity)),
+ None => String::new(),
+ }
+ }
+ #[inline(always)]
+ fn accumulate(&mut self, acc: Cow<'i, str>) {
+ self.push_str(&acc);
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl Accumulate<String> for String {
+ #[inline(always)]
+ fn initial(capacity: Option<usize>) -> Self {
+ match capacity {
+ Some(capacity) => String::with_capacity(clamp_capacity::<char>(capacity)),
+ None => String::new(),
+ }
+ }
+ #[inline(always)]
+ fn accumulate(&mut self, acc: String) {
+ self.push_str(&acc);
+ }
+}
+
+#[cfg(feature = "alloc")]
impl<K, V> Accumulate<(K, V)> for BTreeMap<K, V>
where
K: crate::lib::std::cmp::Ord,
@@ -1539,6 +1584,21 @@ where
}
#[cfg(feature = "alloc")]
+impl<'i, T: Clone> Accumulate<&'i [T]> for VecDeque<T> {
+ #[inline(always)]
+ fn initial(capacity: Option<usize>) -> Self {
+ match capacity {
+ Some(capacity) => VecDeque::with_capacity(clamp_capacity::<T>(capacity)),
+ None => VecDeque::new(),
+ }
+ }
+ #[inline(always)]
+ fn accumulate(&mut self, acc: &'i [T]) {
+ self.extend(acc.iter().cloned());
+ }
+}
+
+#[cfg(feature = "alloc")]
#[inline]
pub(crate) fn clamp_capacity<T>(capacity: usize) -> usize {
/// Don't pre-allocate more than 64KiB when calling `Vec::with_capacity`.
diff --git a/third_party/rust/winnow/src/stream/stateful.rs b/third_party/rust/winnow/src/stream/stateful.rs
@@ -162,8 +162,13 @@ impl<I: Stream, S: crate::lib::std::fmt::Debug> Stream for Stateful<I, S> {
#[inline(always)]
fn raw(&self) -> &dyn crate::lib::std::fmt::Debug {
+ #![allow(deprecated)]
self.input.raw()
}
+
+ fn trace(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.input.trace(f)
+ }
}
impl<I, S> Location for Stateful<I, S>
diff --git a/third_party/rust/winnow/src/stream/token.rs b/third_party/rust/winnow/src/stream/token.rs
@@ -24,7 +24,7 @@ use crate::stream::UpdateSlice;
/// - [`Location`] for `T` to extract spans from tokens
///
/// See also [Lexing and Parsing][crate::_topic::lexing].
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[derive(Copy, Clone, PartialEq, Eq)]
pub struct TokenSlice<'t, T> {
initial: &'t [T],
input: &'t [T],
@@ -100,6 +100,12 @@ impl<T> crate::lib::std::ops::Deref for TokenSlice<'_, T> {
}
}
+impl<T: core::fmt::Debug> core::fmt::Debug for TokenSlice<'_, T> {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.input.fmt(f)
+ }
+}
+
impl<T> SliceLen for TokenSlice<'_, T> {
#[inline(always)]
fn slice_len(&self) -> usize {
@@ -180,7 +186,12 @@ where
#[inline(always)]
fn raw(&self) -> &dyn crate::lib::std::fmt::Debug {
- &self.input
+ #![allow(deprecated)]
+ self.input.raw()
+ }
+
+ fn trace(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.input.trace(f)
}
}