diff --git a/README.md b/README.md index 751d211bc..35533ca87 100644 --- a/README.md +++ b/README.md @@ -73,6 +73,7 @@ c = Xdrgen::Compilation.new( namespace: "MyProgram::XDR", options: { rust_types_custom_str_impl: [], + rust_types_custom_jsonschema_impl: [], }, ) diff --git a/lib/xdrgen/cli.rb b/lib/xdrgen/cli.rb index f818c51f5..f01e09c0f 100644 --- a/lib/xdrgen/cli.rb +++ b/lib/xdrgen/cli.rb @@ -10,6 +10,7 @@ def self.run(args) on 'l', 'language=', 'The output language', default: 'ruby' on 'n', 'namespace=', '"namespace" to generate code within (language-specific)' on 'rust-types-custom-str-impl=', 'Rust types that should not have str implementations generated as they will be provided via custom implementations (rust-specific)' + on 'rust-types-custom-jsonschema-impl=', 'Rust types that should not have jsonschema implementations generated as they will be provided via custom implementations (rust-specific)' end fail(opts) if args.blank? @@ -22,6 +23,7 @@ def self.run(args) namespace: opts[:namespace], options: { rust_types_custom_str_impl: opts[:"rust-types-custom-str-impl"]&.split(',') || [], + rust_types_custom_jsonschema_impl: opts[:"rust-types-custom-jsonschema-impl"]&.split(',') || [], }, ) compilation.compile diff --git a/lib/xdrgen/generators/rust.rb b/lib/xdrgen/generators/rust.rb index e43077308..13120d835 100644 --- a/lib/xdrgen/generators/rust.rb +++ b/lib/xdrgen/generators/rust.rb @@ -102,6 +102,7 @@ def render_enum_of_all_types(out, types) derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { #{types.map { |t| "#{t}," }.join("\n")} } @@ -123,6 +124,15 @@ def render_enum_of_all_types(out, types) pub const fn variants() -> [TypeVariant; #{types.count}] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + #{types.map { |t| "Self::#{t} => gen.into_root_schema_for::<#{t}>()," }.join("\n")} + } + } } impl Name for TypeVariant { @@ -156,6 +166,7 @@ def render_enum_of_all_types(out, types) serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { #{types.map { |t| "#{t}(Box<#{t}>)," }.join("\n")} } @@ -369,6 +380,9 @@ def render_struct(out, struct) else out.puts %{#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))]} end + if !@options[:rust_types_custom_jsonschema_impl].include?(name struct) + out.puts %{#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]} + end out.puts "pub struct #{name struct} {" out.indent do struct.members.each do |m| @@ -415,6 +429,9 @@ def render_enum(out, enum) else out.puts %{#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))]} end + if !@options[:rust_types_custom_jsonschema_impl].include?(name enum) + out.puts %{#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]} + end out.puts "#[repr(i32)]" out.puts "pub enum #{name enum} {" out.indent do @@ -544,6 +561,9 @@ def render_union(out, union) else out.puts %{#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))]} end + if !@options[:rust_types_custom_jsonschema_impl].include?(name union) + out.puts %{#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]} + end out.puts "#[allow(clippy::large_enum_variant)]" out.puts "pub enum #{name union} {" union_case_count = 0 @@ -678,6 +698,9 @@ def render_typedef(out, typedef) else out.puts %{#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))]} end + if !is_fixed_array_opaque(typedef.type) && !@options[:rust_types_custom_jsonschema_impl].include?(name typedef) + out.puts %{#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]} + end if !is_fixed_array_opaque(typedef.type) out.puts "#[derive(Debug)]" end @@ -719,6 +742,43 @@ def render_typedef(out, typedef) } EOS end + if is_fixed_array_opaque(typedef.type) && !@options[:rust_types_custom_jsonschema_impl].include?(name typedef) + out.puts <<-EOS.strip_heredoc + #[cfg(feature = "schemars")] + impl schemars::JsonSchema for #{name typedef} { + fn schema_name() -> String { + "#{name typedef}".to_string() + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: #{typedef.type.size}_u32.checked_mul(2).map(Some).unwrap_or_default(), + min_length: #{typedef.type.size}_u32.checked_mul(2).map(Some).unwrap_or_default(), + ..string + })); + schema.into() + } else { + schema + } + } + } + EOS + end out.puts <<-EOS.strip_heredoc impl From<#{name typedef}> for #{reference(typedef, typedef.type)} { #[must_use] @@ -931,6 +991,12 @@ def base_reference(type) end end + def array_size(type) + _, size = type.array_size + size = name @top.find_definition(size) if is_named + size + end + def reference(parent, type) base_ref = base_reference type diff --git a/lib/xdrgen/generators/rust/src/types.rs b/lib/xdrgen/generators/rust/src/types.rs index bc05694ad..13a36c004 100644 --- a/lib/xdrgen/generators/rust/src/types.rs +++ b/lib/xdrgen/generators/rust/src/types.rs @@ -888,6 +888,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1323,6 +1349,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1711,6 +1771,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2027,6 +2108,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, diff --git a/lib/xdrgen/output.rb b/lib/xdrgen/output.rb index 412f7247c..e2af53533 100644 --- a/lib/xdrgen/output.rb +++ b/lib/xdrgen/output.rb @@ -13,6 +13,16 @@ def initialize(source_paths, output_dir) @files = {} end + def inputs_hash + Digest::SHA256.hexdigest( + [ + Digest::SHA256.hexdigest(relative_source_paths.map { |p| Digest::SHA256.file(p).hexdigest }.join), + Digest::SHA256.hexdigest(relative_source_paths.map { |p| Digest::SHA256.hexdigest(p) }.join), + Digest::SHA256.hexdigest(@output_dir), + ].join + ) + end + def relative_source_paths @source_paths.map { |p| Pathname.new(p).expand_path.relative_path_from(Dir.pwd).to_s }.sort end @@ -21,6 +31,10 @@ def relative_source_path_sha256_hashes relative_source_paths.map { |p| [p, Digest::SHA256.file(p).hexdigest] }.to_h end + def relative_source_path_sha256_hash + Digest::SHA256.hexdigest(relative_source_paths.map { |p| Digest::SHA256.file(p).hexdigest }.join) + end + def open(child_path) if @files.has_key?(child_path) raise Xdrgen::DuplicateFileError, "Cannot open #{child_path} twice" diff --git a/spec/lib/xdrgen/rust_spec.rb b/spec/lib/xdrgen/rust_spec.rb index 1aa158b17..34fa37dc4 100644 --- a/spec/lib/xdrgen/rust_spec.rb +++ b/spec/lib/xdrgen/rust_spec.rb @@ -18,11 +18,28 @@ "MyStruct", "LotsOfMyStructs", ], + rust_types_custom_jsonschema_impl: [], + } + end + + it "can generate #{File.basename path} with custom jsonschema impls" do + c = generate path, "_custom_jsonschema_impls", { + rust_types_custom_str_impl: [], + rust_types_custom_jsonschema_impl: [ + "Foo", + "TestArray", + "Color2", + "UnionKey", + "MyUnion", + "HasOptions", + "MyStruct", + "LotsOfMyStructs", + ], } end end - def generate(path, output_sub_path, options = {rust_types_custom_str_impl: []}) + def generate(path, output_sub_path, options = {rust_types_custom_str_impl: [], rust_types_custom_jsonschema_impl: []}) compilation = Xdrgen::Compilation.new( [path], output_dir: "#{SPEC_ROOT}/output/generator_spec_rust#{output_sub_path}/#{File.basename path}", diff --git a/spec/output/generator_spec_rust/block_comments.x/MyXDR.rs b/spec/output/generator_spec_rust/block_comments.x/MyXDR.rs index 70fa480fb..c7f9cf95b 100644 --- a/spec/output/generator_spec_rust/block_comments.x/MyXDR.rs +++ b/spec/output/generator_spec_rust/block_comments.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2699,6 +2791,7 @@ mod test { #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum AccountFlags { AuthRequiredFlag = 1, @@ -2789,6 +2882,7 @@ impl WriteXdr for AccountFlags { derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { AccountFlags, } @@ -2810,6 +2904,15 @@ impl TypeVariant { pub const fn variants() -> [TypeVariant; 1] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::AccountFlags => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -2843,6 +2946,7 @@ impl core::str::FromStr for TypeVariant { serde(rename_all = "snake_case"), serde(untagged), )] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { AccountFlags(Box), } diff --git a/spec/output/generator_spec_rust/const.x/MyXDR.rs b/spec/output/generator_spec_rust/const.x/MyXDR.rs index 459077778..80670d3ce 100644 --- a/spec/output/generator_spec_rust/const.x/MyXDR.rs +++ b/spec/output/generator_spec_rust/const.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2716,6 +2808,7 @@ pub type TestArray2 = VecM::; derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { TestArray, TestArray2, @@ -2741,6 +2834,16 @@ Self::TestArray2 => "TestArray2", pub const fn variants() -> [TypeVariant; 2] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::TestArray => gen.into_root_schema_for::(), +Self::TestArray2 => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -2775,6 +2878,7 @@ Self::TestArray2 => "TestArray2", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { TestArray(Box), TestArray2(Box), diff --git a/spec/output/generator_spec_rust/enum.x/MyXDR.rs b/spec/output/generator_spec_rust/enum.x/MyXDR.rs index 80740add2..276d6dca4 100644 --- a/spec/output/generator_spec_rust/enum.x/MyXDR.rs +++ b/spec/output/generator_spec_rust/enum.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2718,6 +2810,7 @@ mod test { #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum MessageType { ErrorMsg = 0, @@ -2881,6 +2974,7 @@ Self::FbaMessage => "FbaMessage", #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum Color { Red = 0, @@ -2989,6 +3083,7 @@ Self::Blue => "Blue", #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum Color2 { Red2 = 0, @@ -3089,6 +3184,7 @@ Self::Blue2 => "Blue2", derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { MessageType, Color, @@ -3118,6 +3214,17 @@ Self::Color2 => "Color2", pub const fn variants() -> [TypeVariant; 3] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::MessageType => gen.into_root_schema_for::(), +Self::Color => gen.into_root_schema_for::(), +Self::Color2 => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -3153,6 +3260,7 @@ Self::Color2 => "Color2", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { MessageType(Box), Color(Box), diff --git a/spec/output/generator_spec_rust/nesting.x/MyXDR.rs b/spec/output/generator_spec_rust/nesting.x/MyXDR.rs index 677ed00aa..e08690046 100644 --- a/spec/output/generator_spec_rust/nesting.x/MyXDR.rs +++ b/spec/output/generator_spec_rust/nesting.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2700,6 +2792,7 @@ mod test { #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum UnionKey { One = 1, @@ -2813,6 +2906,7 @@ pub type Foo = i32; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct MyUnionOne { pub some_int: i32, } @@ -2850,6 +2944,7 @@ impl WriteXdr for MyUnionOne { #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct MyUnionTwo { pub some_int: i32, pub foo: i32, @@ -2903,6 +2998,7 @@ self.foo.write_xdr(w)?; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[allow(clippy::large_enum_variant)] pub enum MyUnion { One(MyUnionOne), @@ -3009,6 +3105,7 @@ Self::Offer => ().write_xdr(w)?, derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { UnionKey, Foo, @@ -3046,6 +3143,19 @@ Self::MyUnionTwo => "MyUnionTwo", pub const fn variants() -> [TypeVariant; 5] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::UnionKey => gen.into_root_schema_for::(), +Self::Foo => gen.into_root_schema_for::(), +Self::MyUnion => gen.into_root_schema_for::(), +Self::MyUnionOne => gen.into_root_schema_for::(), +Self::MyUnionTwo => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -3083,6 +3193,7 @@ Self::MyUnionTwo => "MyUnionTwo", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { UnionKey(Box), Foo(Box), diff --git a/spec/output/generator_spec_rust/optional.x/MyXDR.rs b/spec/output/generator_spec_rust/optional.x/MyXDR.rs index c38ee2890..0f29a0a45 100644 --- a/spec/output/generator_spec_rust/optional.x/MyXDR.rs +++ b/spec/output/generator_spec_rust/optional.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2708,6 +2800,7 @@ pub type Arr = [i32; 2]; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct HasOptions { pub first_option: Option, pub second_option: Option, @@ -2745,6 +2838,7 @@ self.third_option.write_xdr(w)?; derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { Arr, HasOptions, @@ -2770,6 +2864,16 @@ Self::HasOptions => "HasOptions", pub const fn variants() -> [TypeVariant; 2] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::Arr => gen.into_root_schema_for::(), +Self::HasOptions => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -2804,6 +2908,7 @@ Self::HasOptions => "HasOptions", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { Arr(Box), HasOptions(Box), diff --git a/spec/output/generator_spec_rust/struct.x/MyXDR.rs b/spec/output/generator_spec_rust/struct.x/MyXDR.rs index 1c59b5e8c..1e6a82b74 100644 --- a/spec/output/generator_spec_rust/struct.x/MyXDR.rs +++ b/spec/output/generator_spec_rust/struct.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2710,6 +2802,7 @@ pub type Int64 = i64; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct MyStruct { pub some_int: i32, pub a_big_int: i64, @@ -2753,6 +2846,7 @@ self.max_string.write_xdr(w)?; derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { Int64, MyStruct, @@ -2778,6 +2872,16 @@ Self::MyStruct => "MyStruct", pub const fn variants() -> [TypeVariant; 2] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::Int64 => gen.into_root_schema_for::(), +Self::MyStruct => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -2812,6 +2916,7 @@ Self::MyStruct => "MyStruct", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { Int64(Box), MyStruct(Box), diff --git a/spec/output/generator_spec_rust/test.x/MyXDR.rs b/spec/output/generator_spec_rust/test.x/MyXDR.rs index bd9f912c2..4bfffad0c 100644 --- a/spec/output/generator_spec_rust/test.x/MyXDR.rs +++ b/spec/output/generator_spec_rust/test.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2725,6 +2817,39 @@ impl core::str::FromStr for Uint512 { hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Uint512 { + fn schema_name() -> String { + "Uint512".to_string() + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: 64_u32.checked_mul(2).map(Some).unwrap_or_default(), + min_length: 64_u32.checked_mul(2).map(Some).unwrap_or_default(), + ..string + })); + schema.into() + } else { + schema + } + } +} impl From for [u8; 64] { #[must_use] fn from(x: Uint512) -> Self { @@ -2811,6 +2936,7 @@ impl AsRef<[u8]> for Uint512 { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Uint513(pub BytesM::<64>); @@ -2912,6 +3038,7 @@ impl AsRef<[u8]> for Uint513 { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Uint514(pub BytesM); @@ -3013,6 +3140,7 @@ impl AsRef<[u8]> for Uint514 { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Str(pub StringM::<64>); @@ -3114,6 +3242,7 @@ impl AsRef<[u8]> for Str { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Str2(pub StringM); @@ -3244,6 +3373,39 @@ impl core::str::FromStr for Hash { hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Hash { + fn schema_name() -> String { + "Hash".to_string() + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: 32_u32.checked_mul(2).map(Some).unwrap_or_default(), + min_length: 32_u32.checked_mul(2).map(Some).unwrap_or_default(), + ..string + })); + schema.into() + } else { + schema + } + } +} impl From for [u8; 32] { #[must_use] fn from(x: Hash) -> Self { @@ -3329,6 +3491,7 @@ impl AsRef<[u8]> for Hash { #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Hashes1(pub [Hash; 12]); @@ -3418,6 +3581,7 @@ impl AsRef<[Hash]> for Hashes1 { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Hashes2(pub VecM::); @@ -3519,6 +3683,7 @@ impl AsRef<[Hash]> for Hashes2 { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Hashes3(pub VecM::); @@ -3619,6 +3784,7 @@ impl AsRef<[Hash]> for Hashes3 { #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct OptHash1(pub Option); @@ -3670,6 +3836,7 @@ impl WriteXdr for OptHash1 { #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct OptHash2(pub Option); @@ -3762,6 +3929,7 @@ pub type Int4 = u64; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct MyStruct { pub field1: Uint512, pub field2: OptHash1, @@ -3817,6 +3985,7 @@ self.field7.write_xdr(w)?; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct LotsOfMyStructs { pub members: VecM::, } @@ -3854,6 +4023,7 @@ impl WriteXdr for LotsOfMyStructs { #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct HasStuff { pub data: LotsOfMyStructs, } @@ -3893,6 +4063,7 @@ impl WriteXdr for HasStuff { #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum Color { Red = 0, @@ -4016,6 +4187,7 @@ pub const BAR: u64 = FOO; #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum NesterNestedEnum { 1 = 0, @@ -4116,6 +4288,7 @@ Self::2 => "2", #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct NesterNestedStruct { pub blah: i32, } @@ -4156,6 +4329,7 @@ impl WriteXdr for NesterNestedStruct { #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[allow(clippy::large_enum_variant)] pub enum NesterNestedUnion { Red, @@ -4270,6 +4444,7 @@ impl WriteXdr for NesterNestedUnion { #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct Nester { pub nested_enum: NesterNestedEnum, pub nested_struct: NesterNestedStruct, @@ -4307,6 +4482,7 @@ self.nested_union.write_xdr(w)?; derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { Uint512, Uint513, @@ -4416,6 +4592,37 @@ Self::NesterNestedUnion => "NesterNestedUnion", pub const fn variants() -> [TypeVariant; 23] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::Uint512 => gen.into_root_schema_for::(), +Self::Uint513 => gen.into_root_schema_for::(), +Self::Uint514 => gen.into_root_schema_for::(), +Self::Str => gen.into_root_schema_for::(), +Self::Str2 => gen.into_root_schema_for::(), +Self::Hash => gen.into_root_schema_for::(), +Self::Hashes1 => gen.into_root_schema_for::(), +Self::Hashes2 => gen.into_root_schema_for::(), +Self::Hashes3 => gen.into_root_schema_for::(), +Self::OptHash1 => gen.into_root_schema_for::(), +Self::OptHash2 => gen.into_root_schema_for::(), +Self::Int1 => gen.into_root_schema_for::(), +Self::Int2 => gen.into_root_schema_for::(), +Self::Int3 => gen.into_root_schema_for::(), +Self::Int4 => gen.into_root_schema_for::(), +Self::MyStruct => gen.into_root_schema_for::(), +Self::LotsOfMyStructs => gen.into_root_schema_for::(), +Self::HasStuff => gen.into_root_schema_for::(), +Self::Color => gen.into_root_schema_for::(), +Self::Nester => gen.into_root_schema_for::(), +Self::NesterNestedEnum => gen.into_root_schema_for::(), +Self::NesterNestedStruct => gen.into_root_schema_for::(), +Self::NesterNestedUnion => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -4471,6 +4678,7 @@ Self::NesterNestedUnion => "NesterNestedUnion", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { Uint512(Box), Uint513(Box), diff --git a/spec/output/generator_spec_rust/union.x/MyXDR.rs b/spec/output/generator_spec_rust/union.x/MyXDR.rs index 9c601f042..eb96312c0 100644 --- a/spec/output/generator_spec_rust/union.x/MyXDR.rs +++ b/spec/output/generator_spec_rust/union.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2715,6 +2807,7 @@ pub type Multi = i32; #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum UnionKey { Error = 0, @@ -2822,6 +2915,7 @@ Self::Multi => "Multi", #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[allow(clippy::large_enum_variant)] pub enum MyUnion { Error(i32), @@ -2932,6 +3026,7 @@ Self::Multi(v) => v.write_xdr(w)?, #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[allow(clippy::large_enum_variant)] pub enum IntUnion { V0(i32), @@ -3034,6 +3129,7 @@ Self::V1(v) => v.write_xdr(w)?, #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct IntUnion2(pub IntUnion); @@ -3082,6 +3178,7 @@ impl WriteXdr for IntUnion2 { derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { SError, Multi, @@ -3123,6 +3220,20 @@ Self::IntUnion2 => "IntUnion2", pub const fn variants() -> [TypeVariant; 6] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::SError => gen.into_root_schema_for::(), +Self::Multi => gen.into_root_schema_for::(), +Self::UnionKey => gen.into_root_schema_for::(), +Self::MyUnion => gen.into_root_schema_for::(), +Self::IntUnion => gen.into_root_schema_for::(), +Self::IntUnion2 => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -3161,6 +3272,7 @@ Self::IntUnion2 => "IntUnion2", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { SError(Box), Multi(Box), diff --git a/spec/output/generator_spec_rust_custom_jsonschema_impls/block_comments.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_jsonschema_impls/block_comments.x/MyXDR.rs new file mode 100644 index 000000000..c7f9cf95b --- /dev/null +++ b/spec/output/generator_spec_rust_custom_jsonschema_impls/block_comments.x/MyXDR.rs @@ -0,0 +1,3094 @@ +// Module is generated from: +// spec/fixtures/generator/block_comments.x + +#![allow(clippy::missing_errors_doc, clippy::unreadable_literal)] + +/// `XDR_FILES_SHA256` is a list of pairs of source files and their SHA256 hashes. +pub const XDR_FILES_SHA256: [(&str, &str); 1] = [ + ("spec/fixtures/generator/block_comments.x", "e13131bc4134f38da17b9d5e9f67d2695a69ef98e3ef272833f4c18d0cc88a30") +]; + +use core::{array::TryFromSliceError, fmt, fmt::Debug, marker::Sized, ops::Deref, slice}; + +#[cfg(feature = "std")] +use core::marker::PhantomData; + +// When feature alloc is turned off use static lifetime Box and Vec types. +#[cfg(not(feature = "alloc"))] +mod noalloc { + pub mod boxed { + pub type Box = &'static T; + } + pub mod vec { + pub type Vec = &'static [T]; + } +} +#[cfg(not(feature = "alloc"))] +use noalloc::{boxed::Box, vec::Vec}; + +// When feature std is turned off, but feature alloc is turned on import the +// alloc crate and use its Box and Vec types. +#[cfg(all(not(feature = "std"), feature = "alloc"))] +extern crate alloc; +#[cfg(all(not(feature = "std"), feature = "alloc"))] +use alloc::{ + borrow::ToOwned, + boxed::Box, + string::{FromUtf8Error, String}, + vec::Vec, +}; +#[cfg(feature = "std")] +use std::string::FromUtf8Error; + +#[cfg(feature = "arbitrary")] +use arbitrary::Arbitrary; + +// TODO: Add support for read/write xdr fns when std not available. + +#[cfg(feature = "std")] +use std::{ + error, io, + io::{BufRead, BufReader, Cursor, Read, Write}, +}; + +/// Error contains all errors returned by functions in this crate. It can be +/// compared via `PartialEq`, however any contained IO errors will only be +/// compared on their `ErrorKind`. +#[derive(Debug)] +pub enum Error { + Invalid, + Unsupported, + LengthExceedsMax, + LengthMismatch, + NonZeroPadding, + Utf8Error(core::str::Utf8Error), + #[cfg(feature = "alloc")] + InvalidHex, + #[cfg(feature = "std")] + Io(io::Error), + DepthLimitExceeded, + #[cfg(feature = "serde_json")] + Json(serde_json::Error), + LengthLimitExceeded, +} + +impl PartialEq for Error { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Utf8Error(l), Self::Utf8Error(r)) => l == r, + // IO errors cannot be compared, but in the absence of any more + // meaningful way to compare the errors we compare the kind of error + // and ignore the embedded source error or OS error. The main use + // case for comparing errors outputted by the XDR library is for + // error case testing, and a lack of the ability to compare has a + // detrimental affect on failure testing, so this is a tradeoff. + #[cfg(feature = "std")] + (Self::Io(l), Self::Io(r)) => l.kind() == r.kind(), + _ => core::mem::discriminant(self) == core::mem::discriminant(other), + } + } +} + +#[cfg(feature = "std")] +impl error::Error for Error { + #[must_use] + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Self::Io(e) => Some(e), + #[cfg(feature = "serde_json")] + Self::Json(e) => Some(e), + _ => None, + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Error::Invalid => write!(f, "xdr value invalid"), + Error::Unsupported => write!(f, "xdr value unsupported"), + Error::LengthExceedsMax => write!(f, "xdr value max length exceeded"), + Error::LengthMismatch => write!(f, "xdr value length does not match"), + Error::NonZeroPadding => write!(f, "xdr padding contains non-zero bytes"), + Error::Utf8Error(e) => write!(f, "{e}"), + #[cfg(feature = "alloc")] + Error::InvalidHex => write!(f, "hex invalid"), + #[cfg(feature = "std")] + Error::Io(e) => write!(f, "{e}"), + Error::DepthLimitExceeded => write!(f, "depth limit exceeded"), + #[cfg(feature = "serde_json")] + Error::Json(e) => write!(f, "{e}"), + Error::LengthLimitExceeded => write!(f, "length limit exceeded"), + } + } +} + +impl From for Error { + fn from(_: TryFromSliceError) -> Error { + Error::LengthMismatch + } +} + +impl From for Error { + #[must_use] + fn from(e: core::str::Utf8Error) -> Self { + Error::Utf8Error(e) + } +} + +#[cfg(feature = "alloc")] +impl From for Error { + #[must_use] + fn from(e: FromUtf8Error) -> Self { + Error::Utf8Error(e.utf8_error()) + } +} + +#[cfg(feature = "std")] +impl From for Error { + #[must_use] + fn from(e: io::Error) -> Self { + Error::Io(e) + } +} + +#[cfg(feature = "serde_json")] +impl From for Error { + #[must_use] + fn from(e: serde_json::Error) -> Self { + Error::Json(e) + } +} + +impl From for () { + fn from(_: Error) {} +} + +#[allow(dead_code)] +type Result = core::result::Result; + +/// Name defines types that assign a static name to their value, such as the +/// name given to an identifier in an XDR enum, or the name given to the case in +/// a union. +pub trait Name { + fn name(&self) -> &'static str; +} + +/// Discriminant defines types that may contain a one-of value determined +/// according to the discriminant, and exposes the value of the discriminant for +/// that type, such as in an XDR union. +pub trait Discriminant { + fn discriminant(&self) -> D; +} + +/// Iter defines types that have variants that can be iterated. +pub trait Variants { + fn variants() -> slice::Iter<'static, V> + where + V: Sized; +} + +// Enum defines a type that is represented as an XDR enumeration when encoded. +pub trait Enum: Name + Variants + Sized {} + +// Union defines a type that is represented as an XDR union when encoded. +pub trait Union: Name + Discriminant + Variants +where + D: Sized, +{ +} + +/// `Limits` contains the limits that a limited reader or writer will be +/// constrained to. +#[cfg(feature = "std")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Limits { + /// Defines the maximum depth for recursive calls in `Read/WriteXdr` to + /// prevent stack overflow. + /// + /// The depth limit is akin to limiting stack depth. Its purpose is to + /// prevent the program from hitting the maximum stack size allowed by Rust, + /// which would result in an unrecoverable `SIGABRT`. For more information + /// about Rust's stack size limit, refer to the [Rust + /// documentation](https://doc.rust-lang.org/std/thread/#stack-size). + pub depth: u32, + + /// Defines the maximum number of bytes that will be read or written. + pub len: usize, +} + +#[cfg(feature = "std")] +impl Limits { + #[must_use] + pub fn none() -> Self { + Self { + depth: u32::MAX, + len: usize::MAX, + } + } + + #[must_use] + pub fn depth(depth: u32) -> Self { + Limits { + depth, + ..Limits::none() + } + } + + #[must_use] + pub fn len(len: usize) -> Self { + Limits { + len, + ..Limits::none() + } + } +} + +/// `Limited` wraps an object and provides functions for enforcing limits. +/// +/// Intended for use with readers and writers and limiting their reads and +/// writes. +#[cfg(feature = "std")] +pub struct Limited { + pub inner: L, + pub(crate) limits: Limits, +} + +#[cfg(feature = "std")] +impl Limited { + /// Constructs a new `Limited`. + /// + /// - `inner`: The value being limited. + /// - `limits`: The limits to enforce. + pub fn new(inner: L, limits: Limits) -> Self { + Limited { inner, limits } + } + + /// Consume the given length from the internal remaining length limit. + /// + /// ### Errors + /// + /// If the length would consume more length than the remaining length limit + /// allows. + pub(crate) fn consume_len(&mut self, len: usize) -> Result<()> { + if let Some(len) = self.limits.len.checked_sub(len) { + self.limits.len = len; + Ok(()) + } else { + Err(Error::LengthLimitExceeded) + } + } + + /// Consumes a single depth for the duration of the given function. + /// + /// ### Errors + /// + /// If the depth limit is already exhausted. + pub(crate) fn with_limited_depth(&mut self, f: F) -> Result + where + F: FnOnce(&mut Self) -> Result, + { + if let Some(depth) = self.limits.depth.checked_sub(1) { + self.limits.depth = depth; + let res = f(self); + self.limits.depth = self.limits.depth.saturating_add(1); + res + } else { + Err(Error::DepthLimitExceeded) + } + } +} + +#[cfg(feature = "std")] +impl Read for Limited { + /// Forwards the read operation to the wrapped object. + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + self.inner.read(buf) + } +} + +#[cfg(feature = "std")] +impl BufRead for Limited { + /// Forwards the read operation to the wrapped object. + fn fill_buf(&mut self) -> std::io::Result<&[u8]> { + self.inner.fill_buf() + } + + /// Forwards the read operation to the wrapped object. + fn consume(&mut self, amt: usize) { + self.inner.consume(amt); + } +} + +#[cfg(feature = "std")] +impl Write for Limited { + /// Forwards the write operation to the wrapped object. + fn write(&mut self, buf: &[u8]) -> std::io::Result { + self.inner.write(buf) + } + + /// Forwards the flush operation to the wrapped object. + fn flush(&mut self) -> std::io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "std")] +pub struct ReadXdrIter { + reader: Limited>, + _s: PhantomData, +} + +#[cfg(feature = "std")] +impl ReadXdrIter { + fn new(r: R, limits: Limits) -> Self { + Self { + reader: Limited { + inner: BufReader::new(r), + limits, + }, + _s: PhantomData, + } + } +} + +#[cfg(feature = "std")] +impl Iterator for ReadXdrIter { + type Item = Result; + + // Next reads the internal reader and XDR decodes it into the Self type. If + // the EOF is reached without reading any new bytes `None` is returned. If + // EOF is reached after reading some bytes a truncated entry is assumed an + // an `Error::Io` containing an `UnexpectedEof`. If any other IO error + // occurs it is returned. Iteration of this iterator stops naturally when + // `None` is returned, but not when a `Some(Err(...))` is returned. The + // caller is responsible for checking each Result. + fn next(&mut self) -> Option { + // Try to fill the buffer to see if the EOF has been reached or not. + // This happens to effectively peek to see if the stream has finished + // and there are no more items. It is necessary to do this because the + // xdr types in this crate heavily use the `std::io::Read::read_exact` + // method that doesn't distinguish between an EOF at the beginning of a + // read and an EOF after a partial fill of a read_exact. + match self.reader.fill_buf() { + // If the reader has no more data and is unable to fill any new data + // into its internal buf, then the EOF has been reached. + Ok([]) => return None, + // If an error occurs filling the buffer, treat that as an error and stop. + Err(e) => return Some(Err(Error::Io(e))), + // If there is data in the buf available for reading, continue. + Ok([..]) => (), + }; + // Read the buf into the type. + let r = self.reader.with_limited_depth(|dlr| S::read_xdr(dlr)); + match r { + Ok(s) => Some(Ok(s)), + Err(e) => Some(Err(e)), + } + } +} + +pub trait ReadXdr +where + Self: Sized, +{ + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_to_end`] when the intent is for all bytes in the + /// read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result; + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_to_end(r: &mut Limited) -> Result { + let s = Self::read_xdr(r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64_to_end(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_into_to_end`] when the intent is for all bytes + /// in the read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr_into(&mut self, r: &mut Limited) -> Result<()> { + *self = Self::read_xdr(r)?; + Ok(()) + } + + /// Read the XDR into the existing value, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_into_to_end(&mut self, r: &mut Limited) -> Result<()> { + Self::read_xdr_into(self, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(()) + } else { + Err(Error::Invalid) + } + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_iter(r: &mut Limited) -> ReadXdrIter<&mut R, Self> { + ReadXdrIter::new(&mut r.inner, r.limits.clone()) + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + #[cfg(feature = "base64")] + fn read_xdr_base64_iter( + r: &mut Limited, + ) -> ReadXdrIter, Self> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + ReadXdrIter::new(dec, r.limits.clone()) + } + + /// Construct the type from the XDR bytes. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "std")] + fn from_xdr(bytes: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(&mut cursor)?; + Ok(t) + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn from_xdr_base64(b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), + limits, + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } +} + +pub trait WriteXdr { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()>; + + #[cfg(feature = "std")] + fn to_xdr(&self, limits: Limits) -> Result> { + let mut cursor = Limited::new(Cursor::new(vec![]), limits); + self.write_xdr(&mut cursor)?; + let bytes = cursor.inner.into_inner(); + Ok(bytes) + } + + #[cfg(feature = "base64")] + fn to_xdr_base64(&self, limits: Limits) -> Result { + let mut enc = Limited::new( + base64::write::EncoderStringWriter::new(base64::STANDARD), + limits, + ); + self.write_xdr(&mut enc)?; + let b64 = enc.inner.into_inner(); + Ok(b64) + } +} + +/// `Pad_len` returns the number of bytes to pad an XDR value of the given +/// length to make the final serialized size a multiple of 4. +#[cfg(feature = "std")] +fn pad_len(len: usize) -> usize { + (4 - (len % 4)) % 4 +} + +impl ReadXdr for i32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for i64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for f32 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f32 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for f64 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f64 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for bool { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + let b = i == 1; + Ok(b) + }) + } +} + +impl WriteXdr for bool { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i = u32::from(*self); // true = 1, false = 0 + i.write_xdr(w) + }) + } +} + +impl ReadXdr for Option { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + match i { + 0 => Ok(None), + 1 => { + let t = T::read_xdr(r)?; + Ok(Some(t)) + } + _ => Err(Error::Invalid), + } + }) + } +} + +impl WriteXdr for Option { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + if let Some(t) = self { + 1u32.write_xdr(w)?; + t.write_xdr(w)?; + } else { + 0u32.write_xdr(w)?; + } + Ok(()) + }) + } +} + +impl ReadXdr for Box { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| Ok(Box::new(T::read_xdr(r)?))) + } +} + +impl WriteXdr for Box { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| T::write_xdr(self, w)) + } +} + +impl ReadXdr for () { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + Ok(()) + } +} + +impl WriteXdr for () { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + Ok(()) + } +} + +impl ReadXdr for [u8; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + r.consume_len(N)?; + let padding = pad_len(N); + r.consume_len(padding)?; + let mut arr = [0u8; N]; + r.read_exact(&mut arr)?; + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + Ok(arr) + }) + } +} + +impl WriteXdr for [u8; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + w.consume_len(N)?; + let padding = pad_len(N); + w.consume_len(padding)?; + w.write_all(self)?; + w.write_all(&[0u8; 3][..padding])?; + Ok(()) + }) + } +} + +impl ReadXdr for [T; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let mut vec = Vec::with_capacity(N); + for _ in 0..N { + let t = T::read_xdr(r)?; + vec.push(t); + } + let arr: [T; N] = vec.try_into().unwrap_or_else(|_: Vec| unreachable!()); + Ok(arr) + }) + } +} + +impl WriteXdr for [T; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + for t in self { + t.write_xdr(w)?; + } + Ok(()) + }) + } +} + +// VecM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec) +where + T: 'static; + +impl Deref for VecM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for VecM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + +impl VecM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl VecM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl VecM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl VecM { + #[must_use] + pub fn to_option(&self) -> Option { + if self.len() > 0 { + Some(self.0[0].clone()) + } else { + None + } + } +} + +#[cfg(not(feature = "alloc"))] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.to_option() + } +} + +#[cfg(feature = "alloc")] +impl VecM { + #[must_use] + pub fn into_option(mut self) -> Option { + self.0.drain(..).next() + } +} + +#[cfg(feature = "alloc")] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.into_option() + } +} + +impl TryFrom> for VecM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: VecM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&VecM> for Vec { + #[must_use] + fn from(v: &VecM) -> Self { + v.0.clone() + } +} + +impl AsRef> for VecM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for VecM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T]> for VecM { + type Error = Error; + + fn try_from(v: &[T]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[T]> for VecM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[T; N]> for VecM { + type Error = Error; + + fn try_from(v: [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [T; N] { + type Error = VecM; + + fn try_from(v: VecM) -> core::result::Result { + let s: [T; N] = v.0.try_into().map_err(|v: Vec| VecM::(v))?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T; N]> for VecM { + type Error = Error; + + fn try_from(v: &[T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [T; N]> for VecM { + type Error = Error; + + fn try_from(v: &'static [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for VecM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for VecM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: VecM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&VecM> for String { + type Error = Error; + + fn try_from(v: &VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for VecM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for VecM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a VecM> for &'a str { + type Error = Error; + + fn try_from(v: &'a VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + let mut vec = Vec::new(); + for _ in 0..len { + let t = T::read_xdr(r)?; + vec.push(t); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + for t in &self.0 { + t.write_xdr(w)?; + } + + Ok(()) + }) + } +} + +// BytesM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +impl core::fmt::Display for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in v { + write!(f, "{b:02x}")?; + } + Ok(()) + } +} + +impl core::fmt::Debug for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "BytesM(")?; + for b in v { + write!(f, "{b:02x}")?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for BytesM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() + } +} + +impl Deref for BytesM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl Default for BytesM { + fn default() -> Self { + Self(Vec::default()) + } +} + +impl BytesM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl BytesM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl BytesM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for BytesM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: BytesM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&BytesM> for Vec { + #[must_use] + fn from(v: &BytesM) -> Self { + v.0.clone() + } +} + +impl AsRef> for BytesM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for BytesM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for BytesM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = BytesM; + + fn try_from(v: BytesM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(BytesM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for BytesM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for BytesM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: BytesM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&BytesM> for String { + type Error = Error; + + fn try_from(v: &BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for BytesM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for BytesM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a BytesM> for &'a str { + type Error = Error; + + fn try_from(v: &'a BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for BytesM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(BytesM(vec)) + }) + } +} + +impl WriteXdr for BytesM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..pad_len(len as usize)])?; + + Ok(()) + }) + } +} + +// StringM ------------------------------------------------------------------------ + +/// A string type that contains arbitrary bytes. +/// +/// Convertible, fallibly, to/from a Rust UTF-8 String using +/// [`TryFrom`]/[`TryInto`]/[`StringM::to_utf8_string`]. +/// +/// Convertible, lossyly, to a Rust UTF-8 String using +/// [`StringM::to_utf8_string_lossy`]. +/// +/// Convertible to/from escaped printable-ASCII using +/// [`Display`]/[`ToString`]/[`FromStr`]. + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +impl core::fmt::Display for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + Ok(()) + } +} + +impl core::fmt::Debug for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "StringM(")?; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for StringM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + let b = escape_bytes::unescape(s.as_bytes()).map_err(|_| Error::Invalid)?; + Ok(Self(b)) + } +} + +impl Deref for StringM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for StringM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl StringM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl StringM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl StringM { + #[cfg(feature = "alloc")] + pub fn to_utf8_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_utf8_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_utf8_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_utf8_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for StringM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: StringM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&StringM> for Vec { + #[must_use] + fn from(v: &StringM) -> Self { + v.0.clone() + } +} + +impl AsRef> for StringM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for StringM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for StringM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for StringM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = StringM; + + fn try_from(v: StringM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(StringM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for StringM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for StringM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: StringM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&StringM> for String { + type Error = Error; + + fn try_from(v: &StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for StringM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for StringM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a StringM> for &'a str { + type Error = Error; + + fn try_from(v: &'a StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for StringM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(StringM(vec)) + }) + } +} + +impl WriteXdr for StringM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +// Frame ------------------------------------------------------------------------ + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") +)] +pub struct Frame(pub T) +where + T: ReadXdr; + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + +impl ReadXdr for Frame +where + T: ReadXdr, +{ + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + // Read the frame header value that contains 1 flag-bit and a 33-bit length. + // - The 1 flag bit is 0 when there are more frames for the same record. + // - The 31-bit length is the length of the bytes within the frame that + // follow the frame header. + let header = u32::read_xdr(r)?; + // TODO: Use the length and cap the length we'll read from `r`. + let last_record = header >> 31 == 1; + if last_record { + // Read the record in the frame. + Ok(Self(T::read_xdr(r)?)) + } else { + // TODO: Support reading those additional frames for the same + // record. + Err(Error::Unsupported) + } + } +} + +#[cfg(all(test, feature = "std"))] +mod tests { + use std::io::Cursor; + + use super::*; + + #[test] + pub fn vec_u8_read_without_padding() { + let buf = Cursor::new(vec![0, 0, 0, 4, 2, 2, 2, 2]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_read_with_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0, 0]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2]); + } + + #[test] + pub fn vec_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn vec_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 3, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn vec_u8_write_without_padding() { + let mut buf = vec![]; + let v: VecM = vec![2, 2, 2, 2].try_into().unwrap(); + + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 4, 2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_write_with_padding() { + let mut buf = vec![]; + let v: VecM = vec![2].try_into().unwrap(); + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 1, 2, 0, 0, 0]); + } + + #[test] + pub fn arr_u8_read_without_padding() { + let buf = Cursor::new(vec![2, 2, 2, 2]); + let v = <[u8; 4]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_read_with_padding() { + let buf = Cursor::new(vec![2, 0, 0, 0]); + let v = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2]); + } + + #[test] + pub fn arr_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![2, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn arr_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![2, 3, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn arr_u8_write_without_padding() { + let mut buf = vec![]; + [2u8, 2, 2, 2] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_write_with_padding() { + let mut buf = vec![]; + [2u8] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 0, 0, 0]); + } +} + +#[cfg(all(test, feature = "std"))] +mod test { + use super::*; + + #[test] + fn into_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.into_option(), None); + } + + #[test] + fn into_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.into_option(), Some(1)); + } + + #[test] + fn to_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } + + #[test] + fn depth_limited_read_write_under_the_limit_success() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(4)); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::depth(4)); + let a_back: Option>> = ReadXdr::read_xdr(&mut dlr).unwrap(); + assert_eq!(a, a_back); + } + + #[test] + fn write_over_depth_limit_fail() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(3)); + let res = a.write_xdr(&mut buf); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn read_over_depth_limit_fail() { + let read_limits = Limits::depth(3); + let write_limits = Limits::depth(5); + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), write_limits); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), read_limits); + let res: Result>>> = ReadXdr::read_xdr(&mut dlr); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn length_limited_read_write_i32() { + // Exact limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u32() { + // Exact limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_i64() { + // Exact limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u64() { + // Exact limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bool() { + // Exact limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_option() { + // Exact limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_u8() { + // Exact limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + <[u8; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_type() { + // Exact limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(12)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(13)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(13)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(11)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(11)); + assert_eq!( + <[bool; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_vec() { + // Exact limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(16)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(17)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(17)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(15)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(15)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bytes() { + // Exact limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_string() { + // Exact limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } +} + +#[cfg(all(test, not(feature = "alloc")))] +mod test { + use super::VecM; + + #[test] + fn to_option_none() { + let v: VecM = (&[]).try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = (&[1]).try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } +} + +/// AccountFlags is an XDR Enum defines as: +/// +/// ```text +/// enum AccountFlags +/// { // masks for each flag +/// AUTH_REQUIRED_FLAG = 0x1 +/// }; +/// ``` +/// +// enum +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[repr(i32)] +pub enum AccountFlags { + AuthRequiredFlag = 1, +} + +impl AccountFlags { + pub const VARIANTS: [AccountFlags; 1] = [ AccountFlags::AuthRequiredFlag, ]; + pub const VARIANTS_STR: [&'static str; 1] = [ "AuthRequiredFlag", ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::AuthRequiredFlag => "AuthRequiredFlag", + } + } + + #[must_use] + pub const fn variants() -> [AccountFlags; 1] { + Self::VARIANTS + } +} + +impl Name for AccountFlags { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } +} + +impl Variants for AccountFlags { + fn variants() -> slice::Iter<'static, AccountFlags> { + Self::VARIANTS.iter() + } +} + +impl Enum for AccountFlags {} + +impl fmt::Display for AccountFlags { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.name()) + } +} + +impl TryFrom for AccountFlags { + type Error = Error; + + fn try_from(i: i32) -> Result { + let e = match i { + 1 => AccountFlags::AuthRequiredFlag, + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(e) + } +} + +impl From for i32 { + #[must_use] + fn from(e: AccountFlags) -> Self { + e as Self + } +} + +impl ReadXdr for AccountFlags { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let e = i32::read_xdr(r)?; + let v: Self = e.try_into()?; + Ok(v) + }) + } +} + +impl WriteXdr for AccountFlags { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i: i32 = (*self).into(); + i.write_xdr(w) + }) + } +} + +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") +)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub enum TypeVariant { + AccountFlags, +} + +impl TypeVariant { + pub const VARIANTS: [TypeVariant; 1] = [ TypeVariant::AccountFlags, ]; + pub const VARIANTS_STR: [&'static str; 1] = [ "AccountFlags", ]; + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::AccountFlags => "AccountFlags", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 1] { + Self::VARIANTS + } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::AccountFlags => gen.into_root_schema_for::(), + } + } +} + +impl Name for TypeVariant { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } +} + +impl Variants for TypeVariant { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } +} + +impl core::str::FromStr for TypeVariant { + type Err = Error; + #[allow(clippy::too_many_lines)] + fn from_str(s: &str) -> Result { + match s { + "AccountFlags" => Ok(Self::AccountFlags), + _ => Err(Error::Invalid), + } + } +} + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case"), + serde(untagged), +)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub enum Type { + AccountFlags(Box), +} + +impl Type { + pub const VARIANTS: [TypeVariant; 1] = [ TypeVariant::AccountFlags, ]; + pub const VARIANTS_STR: [&'static str; 1] = [ "AccountFlags", ]; + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr(v: TypeVariant, r: &mut Limited) -> Result { + match v { + TypeVariant::AccountFlags => r.with_limited_depth(|r| Ok(Self::AccountFlags(Box::new(AccountFlags::read_xdr(r)?)))), + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + pub fn read_xdr_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let s = Self::read_xdr(v, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::AccountFlags => Box::new(ReadXdrIter::<_, AccountFlags>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::AccountFlags(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_framed_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::AccountFlags => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::AccountFlags(Box::new(t.0))))), + } + } + + #[cfg(feature = "base64")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_base64_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + match v { + TypeVariant::AccountFlags => Box::new(ReadXdrIter::<_, AccountFlags>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::AccountFlags(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + pub fn from_xdr>(v: TypeVariant, bytes: B, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(v, &mut cursor)?; + Ok(t) + } + + #[cfg(feature = "base64")] + pub fn from_xdr_base64(v: TypeVariant, b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), limits); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(all(feature = "std", feature = "serde_json"))] + #[allow(clippy::too_many_lines)] + pub fn read_json(v: TypeVariant, r: impl Read) -> Result { + match v { + TypeVariant::AccountFlags => Ok(Self::AccountFlags(Box::new(serde_json::from_reader(r)?))), + } + } + + #[cfg(feature = "alloc")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn value(&self) -> &dyn core::any::Any { + #[allow(clippy::match_same_arms)] + match self { + Self::AccountFlags(ref v) => v.as_ref(), + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::AccountFlags(_) => "AccountFlags", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 1] { + Self::VARIANTS + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variant(&self) -> TypeVariant { + match self { + Self::AccountFlags(_) => TypeVariant::AccountFlags, + } + } +} + +impl Name for Type { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } +} + +impl Variants for Type { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } +} + +impl WriteXdr for Type { + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + match self { + Self::AccountFlags(v) => v.write_xdr(w), + } + } +} diff --git a/spec/output/generator_spec_rust_custom_jsonschema_impls/const.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_jsonschema_impls/const.x/MyXDR.rs new file mode 100644 index 000000000..80670d3ce --- /dev/null +++ b/spec/output/generator_spec_rust_custom_jsonschema_impls/const.x/MyXDR.rs @@ -0,0 +1,3038 @@ +// Module is generated from: +// spec/fixtures/generator/const.x + +#![allow(clippy::missing_errors_doc, clippy::unreadable_literal)] + +/// `XDR_FILES_SHA256` is a list of pairs of source files and their SHA256 hashes. +pub const XDR_FILES_SHA256: [(&str, &str); 1] = [ + ("spec/fixtures/generator/const.x", "0bff3b37592fcc16cad2fe10b9a72f5d39d033a114917c24e86a9ebd9cda9c37") +]; + +use core::{array::TryFromSliceError, fmt, fmt::Debug, marker::Sized, ops::Deref, slice}; + +#[cfg(feature = "std")] +use core::marker::PhantomData; + +// When feature alloc is turned off use static lifetime Box and Vec types. +#[cfg(not(feature = "alloc"))] +mod noalloc { + pub mod boxed { + pub type Box = &'static T; + } + pub mod vec { + pub type Vec = &'static [T]; + } +} +#[cfg(not(feature = "alloc"))] +use noalloc::{boxed::Box, vec::Vec}; + +// When feature std is turned off, but feature alloc is turned on import the +// alloc crate and use its Box and Vec types. +#[cfg(all(not(feature = "std"), feature = "alloc"))] +extern crate alloc; +#[cfg(all(not(feature = "std"), feature = "alloc"))] +use alloc::{ + borrow::ToOwned, + boxed::Box, + string::{FromUtf8Error, String}, + vec::Vec, +}; +#[cfg(feature = "std")] +use std::string::FromUtf8Error; + +#[cfg(feature = "arbitrary")] +use arbitrary::Arbitrary; + +// TODO: Add support for read/write xdr fns when std not available. + +#[cfg(feature = "std")] +use std::{ + error, io, + io::{BufRead, BufReader, Cursor, Read, Write}, +}; + +/// Error contains all errors returned by functions in this crate. It can be +/// compared via `PartialEq`, however any contained IO errors will only be +/// compared on their `ErrorKind`. +#[derive(Debug)] +pub enum Error { + Invalid, + Unsupported, + LengthExceedsMax, + LengthMismatch, + NonZeroPadding, + Utf8Error(core::str::Utf8Error), + #[cfg(feature = "alloc")] + InvalidHex, + #[cfg(feature = "std")] + Io(io::Error), + DepthLimitExceeded, + #[cfg(feature = "serde_json")] + Json(serde_json::Error), + LengthLimitExceeded, +} + +impl PartialEq for Error { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Utf8Error(l), Self::Utf8Error(r)) => l == r, + // IO errors cannot be compared, but in the absence of any more + // meaningful way to compare the errors we compare the kind of error + // and ignore the embedded source error or OS error. The main use + // case for comparing errors outputted by the XDR library is for + // error case testing, and a lack of the ability to compare has a + // detrimental affect on failure testing, so this is a tradeoff. + #[cfg(feature = "std")] + (Self::Io(l), Self::Io(r)) => l.kind() == r.kind(), + _ => core::mem::discriminant(self) == core::mem::discriminant(other), + } + } +} + +#[cfg(feature = "std")] +impl error::Error for Error { + #[must_use] + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Self::Io(e) => Some(e), + #[cfg(feature = "serde_json")] + Self::Json(e) => Some(e), + _ => None, + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Error::Invalid => write!(f, "xdr value invalid"), + Error::Unsupported => write!(f, "xdr value unsupported"), + Error::LengthExceedsMax => write!(f, "xdr value max length exceeded"), + Error::LengthMismatch => write!(f, "xdr value length does not match"), + Error::NonZeroPadding => write!(f, "xdr padding contains non-zero bytes"), + Error::Utf8Error(e) => write!(f, "{e}"), + #[cfg(feature = "alloc")] + Error::InvalidHex => write!(f, "hex invalid"), + #[cfg(feature = "std")] + Error::Io(e) => write!(f, "{e}"), + Error::DepthLimitExceeded => write!(f, "depth limit exceeded"), + #[cfg(feature = "serde_json")] + Error::Json(e) => write!(f, "{e}"), + Error::LengthLimitExceeded => write!(f, "length limit exceeded"), + } + } +} + +impl From for Error { + fn from(_: TryFromSliceError) -> Error { + Error::LengthMismatch + } +} + +impl From for Error { + #[must_use] + fn from(e: core::str::Utf8Error) -> Self { + Error::Utf8Error(e) + } +} + +#[cfg(feature = "alloc")] +impl From for Error { + #[must_use] + fn from(e: FromUtf8Error) -> Self { + Error::Utf8Error(e.utf8_error()) + } +} + +#[cfg(feature = "std")] +impl From for Error { + #[must_use] + fn from(e: io::Error) -> Self { + Error::Io(e) + } +} + +#[cfg(feature = "serde_json")] +impl From for Error { + #[must_use] + fn from(e: serde_json::Error) -> Self { + Error::Json(e) + } +} + +impl From for () { + fn from(_: Error) {} +} + +#[allow(dead_code)] +type Result = core::result::Result; + +/// Name defines types that assign a static name to their value, such as the +/// name given to an identifier in an XDR enum, or the name given to the case in +/// a union. +pub trait Name { + fn name(&self) -> &'static str; +} + +/// Discriminant defines types that may contain a one-of value determined +/// according to the discriminant, and exposes the value of the discriminant for +/// that type, such as in an XDR union. +pub trait Discriminant { + fn discriminant(&self) -> D; +} + +/// Iter defines types that have variants that can be iterated. +pub trait Variants { + fn variants() -> slice::Iter<'static, V> + where + V: Sized; +} + +// Enum defines a type that is represented as an XDR enumeration when encoded. +pub trait Enum: Name + Variants + Sized {} + +// Union defines a type that is represented as an XDR union when encoded. +pub trait Union: Name + Discriminant + Variants +where + D: Sized, +{ +} + +/// `Limits` contains the limits that a limited reader or writer will be +/// constrained to. +#[cfg(feature = "std")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Limits { + /// Defines the maximum depth for recursive calls in `Read/WriteXdr` to + /// prevent stack overflow. + /// + /// The depth limit is akin to limiting stack depth. Its purpose is to + /// prevent the program from hitting the maximum stack size allowed by Rust, + /// which would result in an unrecoverable `SIGABRT`. For more information + /// about Rust's stack size limit, refer to the [Rust + /// documentation](https://doc.rust-lang.org/std/thread/#stack-size). + pub depth: u32, + + /// Defines the maximum number of bytes that will be read or written. + pub len: usize, +} + +#[cfg(feature = "std")] +impl Limits { + #[must_use] + pub fn none() -> Self { + Self { + depth: u32::MAX, + len: usize::MAX, + } + } + + #[must_use] + pub fn depth(depth: u32) -> Self { + Limits { + depth, + ..Limits::none() + } + } + + #[must_use] + pub fn len(len: usize) -> Self { + Limits { + len, + ..Limits::none() + } + } +} + +/// `Limited` wraps an object and provides functions for enforcing limits. +/// +/// Intended for use with readers and writers and limiting their reads and +/// writes. +#[cfg(feature = "std")] +pub struct Limited { + pub inner: L, + pub(crate) limits: Limits, +} + +#[cfg(feature = "std")] +impl Limited { + /// Constructs a new `Limited`. + /// + /// - `inner`: The value being limited. + /// - `limits`: The limits to enforce. + pub fn new(inner: L, limits: Limits) -> Self { + Limited { inner, limits } + } + + /// Consume the given length from the internal remaining length limit. + /// + /// ### Errors + /// + /// If the length would consume more length than the remaining length limit + /// allows. + pub(crate) fn consume_len(&mut self, len: usize) -> Result<()> { + if let Some(len) = self.limits.len.checked_sub(len) { + self.limits.len = len; + Ok(()) + } else { + Err(Error::LengthLimitExceeded) + } + } + + /// Consumes a single depth for the duration of the given function. + /// + /// ### Errors + /// + /// If the depth limit is already exhausted. + pub(crate) fn with_limited_depth(&mut self, f: F) -> Result + where + F: FnOnce(&mut Self) -> Result, + { + if let Some(depth) = self.limits.depth.checked_sub(1) { + self.limits.depth = depth; + let res = f(self); + self.limits.depth = self.limits.depth.saturating_add(1); + res + } else { + Err(Error::DepthLimitExceeded) + } + } +} + +#[cfg(feature = "std")] +impl Read for Limited { + /// Forwards the read operation to the wrapped object. + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + self.inner.read(buf) + } +} + +#[cfg(feature = "std")] +impl BufRead for Limited { + /// Forwards the read operation to the wrapped object. + fn fill_buf(&mut self) -> std::io::Result<&[u8]> { + self.inner.fill_buf() + } + + /// Forwards the read operation to the wrapped object. + fn consume(&mut self, amt: usize) { + self.inner.consume(amt); + } +} + +#[cfg(feature = "std")] +impl Write for Limited { + /// Forwards the write operation to the wrapped object. + fn write(&mut self, buf: &[u8]) -> std::io::Result { + self.inner.write(buf) + } + + /// Forwards the flush operation to the wrapped object. + fn flush(&mut self) -> std::io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "std")] +pub struct ReadXdrIter { + reader: Limited>, + _s: PhantomData, +} + +#[cfg(feature = "std")] +impl ReadXdrIter { + fn new(r: R, limits: Limits) -> Self { + Self { + reader: Limited { + inner: BufReader::new(r), + limits, + }, + _s: PhantomData, + } + } +} + +#[cfg(feature = "std")] +impl Iterator for ReadXdrIter { + type Item = Result; + + // Next reads the internal reader and XDR decodes it into the Self type. If + // the EOF is reached without reading any new bytes `None` is returned. If + // EOF is reached after reading some bytes a truncated entry is assumed an + // an `Error::Io` containing an `UnexpectedEof`. If any other IO error + // occurs it is returned. Iteration of this iterator stops naturally when + // `None` is returned, but not when a `Some(Err(...))` is returned. The + // caller is responsible for checking each Result. + fn next(&mut self) -> Option { + // Try to fill the buffer to see if the EOF has been reached or not. + // This happens to effectively peek to see if the stream has finished + // and there are no more items. It is necessary to do this because the + // xdr types in this crate heavily use the `std::io::Read::read_exact` + // method that doesn't distinguish between an EOF at the beginning of a + // read and an EOF after a partial fill of a read_exact. + match self.reader.fill_buf() { + // If the reader has no more data and is unable to fill any new data + // into its internal buf, then the EOF has been reached. + Ok([]) => return None, + // If an error occurs filling the buffer, treat that as an error and stop. + Err(e) => return Some(Err(Error::Io(e))), + // If there is data in the buf available for reading, continue. + Ok([..]) => (), + }; + // Read the buf into the type. + let r = self.reader.with_limited_depth(|dlr| S::read_xdr(dlr)); + match r { + Ok(s) => Some(Ok(s)), + Err(e) => Some(Err(e)), + } + } +} + +pub trait ReadXdr +where + Self: Sized, +{ + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_to_end`] when the intent is for all bytes in the + /// read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result; + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_to_end(r: &mut Limited) -> Result { + let s = Self::read_xdr(r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64_to_end(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_into_to_end`] when the intent is for all bytes + /// in the read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr_into(&mut self, r: &mut Limited) -> Result<()> { + *self = Self::read_xdr(r)?; + Ok(()) + } + + /// Read the XDR into the existing value, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_into_to_end(&mut self, r: &mut Limited) -> Result<()> { + Self::read_xdr_into(self, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(()) + } else { + Err(Error::Invalid) + } + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_iter(r: &mut Limited) -> ReadXdrIter<&mut R, Self> { + ReadXdrIter::new(&mut r.inner, r.limits.clone()) + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + #[cfg(feature = "base64")] + fn read_xdr_base64_iter( + r: &mut Limited, + ) -> ReadXdrIter, Self> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + ReadXdrIter::new(dec, r.limits.clone()) + } + + /// Construct the type from the XDR bytes. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "std")] + fn from_xdr(bytes: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(&mut cursor)?; + Ok(t) + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn from_xdr_base64(b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), + limits, + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } +} + +pub trait WriteXdr { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()>; + + #[cfg(feature = "std")] + fn to_xdr(&self, limits: Limits) -> Result> { + let mut cursor = Limited::new(Cursor::new(vec![]), limits); + self.write_xdr(&mut cursor)?; + let bytes = cursor.inner.into_inner(); + Ok(bytes) + } + + #[cfg(feature = "base64")] + fn to_xdr_base64(&self, limits: Limits) -> Result { + let mut enc = Limited::new( + base64::write::EncoderStringWriter::new(base64::STANDARD), + limits, + ); + self.write_xdr(&mut enc)?; + let b64 = enc.inner.into_inner(); + Ok(b64) + } +} + +/// `Pad_len` returns the number of bytes to pad an XDR value of the given +/// length to make the final serialized size a multiple of 4. +#[cfg(feature = "std")] +fn pad_len(len: usize) -> usize { + (4 - (len % 4)) % 4 +} + +impl ReadXdr for i32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for i64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for f32 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f32 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for f64 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f64 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for bool { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + let b = i == 1; + Ok(b) + }) + } +} + +impl WriteXdr for bool { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i = u32::from(*self); // true = 1, false = 0 + i.write_xdr(w) + }) + } +} + +impl ReadXdr for Option { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + match i { + 0 => Ok(None), + 1 => { + let t = T::read_xdr(r)?; + Ok(Some(t)) + } + _ => Err(Error::Invalid), + } + }) + } +} + +impl WriteXdr for Option { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + if let Some(t) = self { + 1u32.write_xdr(w)?; + t.write_xdr(w)?; + } else { + 0u32.write_xdr(w)?; + } + Ok(()) + }) + } +} + +impl ReadXdr for Box { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| Ok(Box::new(T::read_xdr(r)?))) + } +} + +impl WriteXdr for Box { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| T::write_xdr(self, w)) + } +} + +impl ReadXdr for () { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + Ok(()) + } +} + +impl WriteXdr for () { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + Ok(()) + } +} + +impl ReadXdr for [u8; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + r.consume_len(N)?; + let padding = pad_len(N); + r.consume_len(padding)?; + let mut arr = [0u8; N]; + r.read_exact(&mut arr)?; + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + Ok(arr) + }) + } +} + +impl WriteXdr for [u8; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + w.consume_len(N)?; + let padding = pad_len(N); + w.consume_len(padding)?; + w.write_all(self)?; + w.write_all(&[0u8; 3][..padding])?; + Ok(()) + }) + } +} + +impl ReadXdr for [T; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let mut vec = Vec::with_capacity(N); + for _ in 0..N { + let t = T::read_xdr(r)?; + vec.push(t); + } + let arr: [T; N] = vec.try_into().unwrap_or_else(|_: Vec| unreachable!()); + Ok(arr) + }) + } +} + +impl WriteXdr for [T; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + for t in self { + t.write_xdr(w)?; + } + Ok(()) + }) + } +} + +// VecM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec) +where + T: 'static; + +impl Deref for VecM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for VecM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + +impl VecM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl VecM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl VecM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl VecM { + #[must_use] + pub fn to_option(&self) -> Option { + if self.len() > 0 { + Some(self.0[0].clone()) + } else { + None + } + } +} + +#[cfg(not(feature = "alloc"))] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.to_option() + } +} + +#[cfg(feature = "alloc")] +impl VecM { + #[must_use] + pub fn into_option(mut self) -> Option { + self.0.drain(..).next() + } +} + +#[cfg(feature = "alloc")] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.into_option() + } +} + +impl TryFrom> for VecM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: VecM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&VecM> for Vec { + #[must_use] + fn from(v: &VecM) -> Self { + v.0.clone() + } +} + +impl AsRef> for VecM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for VecM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T]> for VecM { + type Error = Error; + + fn try_from(v: &[T]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[T]> for VecM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[T; N]> for VecM { + type Error = Error; + + fn try_from(v: [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [T; N] { + type Error = VecM; + + fn try_from(v: VecM) -> core::result::Result { + let s: [T; N] = v.0.try_into().map_err(|v: Vec| VecM::(v))?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T; N]> for VecM { + type Error = Error; + + fn try_from(v: &[T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [T; N]> for VecM { + type Error = Error; + + fn try_from(v: &'static [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for VecM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for VecM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: VecM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&VecM> for String { + type Error = Error; + + fn try_from(v: &VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for VecM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for VecM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a VecM> for &'a str { + type Error = Error; + + fn try_from(v: &'a VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + let mut vec = Vec::new(); + for _ in 0..len { + let t = T::read_xdr(r)?; + vec.push(t); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + for t in &self.0 { + t.write_xdr(w)?; + } + + Ok(()) + }) + } +} + +// BytesM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +impl core::fmt::Display for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in v { + write!(f, "{b:02x}")?; + } + Ok(()) + } +} + +impl core::fmt::Debug for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "BytesM(")?; + for b in v { + write!(f, "{b:02x}")?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for BytesM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() + } +} + +impl Deref for BytesM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl Default for BytesM { + fn default() -> Self { + Self(Vec::default()) + } +} + +impl BytesM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl BytesM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl BytesM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for BytesM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: BytesM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&BytesM> for Vec { + #[must_use] + fn from(v: &BytesM) -> Self { + v.0.clone() + } +} + +impl AsRef> for BytesM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for BytesM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for BytesM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = BytesM; + + fn try_from(v: BytesM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(BytesM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for BytesM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for BytesM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: BytesM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&BytesM> for String { + type Error = Error; + + fn try_from(v: &BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for BytesM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for BytesM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a BytesM> for &'a str { + type Error = Error; + + fn try_from(v: &'a BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for BytesM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(BytesM(vec)) + }) + } +} + +impl WriteXdr for BytesM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..pad_len(len as usize)])?; + + Ok(()) + }) + } +} + +// StringM ------------------------------------------------------------------------ + +/// A string type that contains arbitrary bytes. +/// +/// Convertible, fallibly, to/from a Rust UTF-8 String using +/// [`TryFrom`]/[`TryInto`]/[`StringM::to_utf8_string`]. +/// +/// Convertible, lossyly, to a Rust UTF-8 String using +/// [`StringM::to_utf8_string_lossy`]. +/// +/// Convertible to/from escaped printable-ASCII using +/// [`Display`]/[`ToString`]/[`FromStr`]. + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +impl core::fmt::Display for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + Ok(()) + } +} + +impl core::fmt::Debug for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "StringM(")?; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for StringM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + let b = escape_bytes::unescape(s.as_bytes()).map_err(|_| Error::Invalid)?; + Ok(Self(b)) + } +} + +impl Deref for StringM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for StringM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl StringM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl StringM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl StringM { + #[cfg(feature = "alloc")] + pub fn to_utf8_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_utf8_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_utf8_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_utf8_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for StringM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: StringM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&StringM> for Vec { + #[must_use] + fn from(v: &StringM) -> Self { + v.0.clone() + } +} + +impl AsRef> for StringM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for StringM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for StringM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for StringM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = StringM; + + fn try_from(v: StringM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(StringM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for StringM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for StringM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: StringM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&StringM> for String { + type Error = Error; + + fn try_from(v: &StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for StringM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for StringM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a StringM> for &'a str { + type Error = Error; + + fn try_from(v: &'a StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for StringM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(StringM(vec)) + }) + } +} + +impl WriteXdr for StringM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +// Frame ------------------------------------------------------------------------ + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") +)] +pub struct Frame(pub T) +where + T: ReadXdr; + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + +impl ReadXdr for Frame +where + T: ReadXdr, +{ + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + // Read the frame header value that contains 1 flag-bit and a 33-bit length. + // - The 1 flag bit is 0 when there are more frames for the same record. + // - The 31-bit length is the length of the bytes within the frame that + // follow the frame header. + let header = u32::read_xdr(r)?; + // TODO: Use the length and cap the length we'll read from `r`. + let last_record = header >> 31 == 1; + if last_record { + // Read the record in the frame. + Ok(Self(T::read_xdr(r)?)) + } else { + // TODO: Support reading those additional frames for the same + // record. + Err(Error::Unsupported) + } + } +} + +#[cfg(all(test, feature = "std"))] +mod tests { + use std::io::Cursor; + + use super::*; + + #[test] + pub fn vec_u8_read_without_padding() { + let buf = Cursor::new(vec![0, 0, 0, 4, 2, 2, 2, 2]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_read_with_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0, 0]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2]); + } + + #[test] + pub fn vec_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn vec_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 3, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn vec_u8_write_without_padding() { + let mut buf = vec![]; + let v: VecM = vec![2, 2, 2, 2].try_into().unwrap(); + + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 4, 2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_write_with_padding() { + let mut buf = vec![]; + let v: VecM = vec![2].try_into().unwrap(); + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 1, 2, 0, 0, 0]); + } + + #[test] + pub fn arr_u8_read_without_padding() { + let buf = Cursor::new(vec![2, 2, 2, 2]); + let v = <[u8; 4]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_read_with_padding() { + let buf = Cursor::new(vec![2, 0, 0, 0]); + let v = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2]); + } + + #[test] + pub fn arr_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![2, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn arr_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![2, 3, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn arr_u8_write_without_padding() { + let mut buf = vec![]; + [2u8, 2, 2, 2] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_write_with_padding() { + let mut buf = vec![]; + [2u8] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 0, 0, 0]); + } +} + +#[cfg(all(test, feature = "std"))] +mod test { + use super::*; + + #[test] + fn into_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.into_option(), None); + } + + #[test] + fn into_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.into_option(), Some(1)); + } + + #[test] + fn to_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } + + #[test] + fn depth_limited_read_write_under_the_limit_success() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(4)); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::depth(4)); + let a_back: Option>> = ReadXdr::read_xdr(&mut dlr).unwrap(); + assert_eq!(a, a_back); + } + + #[test] + fn write_over_depth_limit_fail() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(3)); + let res = a.write_xdr(&mut buf); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn read_over_depth_limit_fail() { + let read_limits = Limits::depth(3); + let write_limits = Limits::depth(5); + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), write_limits); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), read_limits); + let res: Result>>> = ReadXdr::read_xdr(&mut dlr); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn length_limited_read_write_i32() { + // Exact limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u32() { + // Exact limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_i64() { + // Exact limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u64() { + // Exact limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bool() { + // Exact limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_option() { + // Exact limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_u8() { + // Exact limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + <[u8; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_type() { + // Exact limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(12)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(13)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(13)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(11)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(11)); + assert_eq!( + <[bool; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_vec() { + // Exact limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(16)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(17)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(17)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(15)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(15)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bytes() { + // Exact limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_string() { + // Exact limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } +} + +#[cfg(all(test, not(feature = "alloc")))] +mod test { + use super::VecM; + + #[test] + fn to_option_none() { + let v: VecM = (&[]).try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = (&[1]).try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } +} + +/// Foo is an XDR Const defines as: +/// +/// ```text +/// const FOO = 1; +/// ``` +/// +pub const FOO: u64 = 1; + +/// TestArray is an XDR Typedef defines as: +/// +/// ```text +/// typedef int TestArray[FOO]; +/// ``` +/// +pub type TestArray = [i32; Foo]; + +/// TestArray2 is an XDR Typedef defines as: +/// +/// ```text +/// typedef int TestArray2; +/// ``` +/// +pub type TestArray2 = VecM::; + + #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum TypeVariant { + TestArray, +TestArray2, + } + + impl TypeVariant { + pub const VARIANTS: [TypeVariant; 2] = [ TypeVariant::TestArray, +TypeVariant::TestArray2, ]; + pub const VARIANTS_STR: [&'static str; 2] = [ "TestArray", +"TestArray2", ]; + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::TestArray => "TestArray", +Self::TestArray2 => "TestArray2", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 2] { + Self::VARIANTS + } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::TestArray => gen.into_root_schema_for::(), +Self::TestArray2 => gen.into_root_schema_for::(), + } + } + } + + impl Name for TypeVariant { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for TypeVariant { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl core::str::FromStr for TypeVariant { + type Err = Error; + #[allow(clippy::too_many_lines)] + fn from_str(s: &str) -> Result { + match s { + "TestArray" => Ok(Self::TestArray), +"TestArray2" => Ok(Self::TestArray2), + _ => Err(Error::Invalid), + } + } + } + + #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case"), + serde(untagged), + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum Type { + TestArray(Box), +TestArray2(Box), + } + + impl Type { + pub const VARIANTS: [TypeVariant; 2] = [ TypeVariant::TestArray, +TypeVariant::TestArray2, ]; + pub const VARIANTS_STR: [&'static str; 2] = [ "TestArray", +"TestArray2", ]; + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr(v: TypeVariant, r: &mut Limited) -> Result { + match v { + TypeVariant::TestArray => r.with_limited_depth(|r| Ok(Self::TestArray(Box::new(TestArray::read_xdr(r)?)))), +TypeVariant::TestArray2 => r.with_limited_depth(|r| Ok(Self::TestArray2(Box::new(TestArray2::read_xdr(r)?)))), + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + pub fn read_xdr_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let s = Self::read_xdr(v, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::TestArray => Box::new(ReadXdrIter::<_, TestArray>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::TestArray(Box::new(t))))), +TypeVariant::TestArray2 => Box::new(ReadXdrIter::<_, TestArray2>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::TestArray2(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_framed_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::TestArray => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::TestArray(Box::new(t.0))))), +TypeVariant::TestArray2 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::TestArray2(Box::new(t.0))))), + } + } + + #[cfg(feature = "base64")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_base64_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + match v { + TypeVariant::TestArray => Box::new(ReadXdrIter::<_, TestArray>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::TestArray(Box::new(t))))), +TypeVariant::TestArray2 => Box::new(ReadXdrIter::<_, TestArray2>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::TestArray2(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + pub fn from_xdr>(v: TypeVariant, bytes: B, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(v, &mut cursor)?; + Ok(t) + } + + #[cfg(feature = "base64")] + pub fn from_xdr_base64(v: TypeVariant, b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), limits); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(all(feature = "std", feature = "serde_json"))] + #[allow(clippy::too_many_lines)] + pub fn read_json(v: TypeVariant, r: impl Read) -> Result { + match v { + TypeVariant::TestArray => Ok(Self::TestArray(Box::new(serde_json::from_reader(r)?))), +TypeVariant::TestArray2 => Ok(Self::TestArray2(Box::new(serde_json::from_reader(r)?))), + } + } + + #[cfg(feature = "alloc")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn value(&self) -> &dyn core::any::Any { + #[allow(clippy::match_same_arms)] + match self { + Self::TestArray(ref v) => v.as_ref(), +Self::TestArray2(ref v) => v.as_ref(), + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::TestArray(_) => "TestArray", +Self::TestArray2(_) => "TestArray2", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 2] { + Self::VARIANTS + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variant(&self) -> TypeVariant { + match self { + Self::TestArray(_) => TypeVariant::TestArray, +Self::TestArray2(_) => TypeVariant::TestArray2, + } + } + } + + impl Name for Type { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for Type { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl WriteXdr for Type { + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + match self { + Self::TestArray(v) => v.write_xdr(w), +Self::TestArray2(v) => v.write_xdr(w), + } + } + } diff --git a/spec/output/generator_spec_rust_custom_jsonschema_impls/enum.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_jsonschema_impls/enum.x/MyXDR.rs new file mode 100644 index 000000000..f85776cdd --- /dev/null +++ b/spec/output/generator_spec_rust_custom_jsonschema_impls/enum.x/MyXDR.rs @@ -0,0 +1,3431 @@ +// Module is generated from: +// spec/fixtures/generator/enum.x + +#![allow(clippy::missing_errors_doc, clippy::unreadable_literal)] + +/// `XDR_FILES_SHA256` is a list of pairs of source files and their SHA256 hashes. +pub const XDR_FILES_SHA256: [(&str, &str); 1] = [ + ("spec/fixtures/generator/enum.x", "35cf5e97e2057039640ed260e8b38bb2733a3c3ca8529c93877bdec02a999d7f") +]; + +use core::{array::TryFromSliceError, fmt, fmt::Debug, marker::Sized, ops::Deref, slice}; + +#[cfg(feature = "std")] +use core::marker::PhantomData; + +// When feature alloc is turned off use static lifetime Box and Vec types. +#[cfg(not(feature = "alloc"))] +mod noalloc { + pub mod boxed { + pub type Box = &'static T; + } + pub mod vec { + pub type Vec = &'static [T]; + } +} +#[cfg(not(feature = "alloc"))] +use noalloc::{boxed::Box, vec::Vec}; + +// When feature std is turned off, but feature alloc is turned on import the +// alloc crate and use its Box and Vec types. +#[cfg(all(not(feature = "std"), feature = "alloc"))] +extern crate alloc; +#[cfg(all(not(feature = "std"), feature = "alloc"))] +use alloc::{ + borrow::ToOwned, + boxed::Box, + string::{FromUtf8Error, String}, + vec::Vec, +}; +#[cfg(feature = "std")] +use std::string::FromUtf8Error; + +#[cfg(feature = "arbitrary")] +use arbitrary::Arbitrary; + +// TODO: Add support for read/write xdr fns when std not available. + +#[cfg(feature = "std")] +use std::{ + error, io, + io::{BufRead, BufReader, Cursor, Read, Write}, +}; + +/// Error contains all errors returned by functions in this crate. It can be +/// compared via `PartialEq`, however any contained IO errors will only be +/// compared on their `ErrorKind`. +#[derive(Debug)] +pub enum Error { + Invalid, + Unsupported, + LengthExceedsMax, + LengthMismatch, + NonZeroPadding, + Utf8Error(core::str::Utf8Error), + #[cfg(feature = "alloc")] + InvalidHex, + #[cfg(feature = "std")] + Io(io::Error), + DepthLimitExceeded, + #[cfg(feature = "serde_json")] + Json(serde_json::Error), + LengthLimitExceeded, +} + +impl PartialEq for Error { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Utf8Error(l), Self::Utf8Error(r)) => l == r, + // IO errors cannot be compared, but in the absence of any more + // meaningful way to compare the errors we compare the kind of error + // and ignore the embedded source error or OS error. The main use + // case for comparing errors outputted by the XDR library is for + // error case testing, and a lack of the ability to compare has a + // detrimental affect on failure testing, so this is a tradeoff. + #[cfg(feature = "std")] + (Self::Io(l), Self::Io(r)) => l.kind() == r.kind(), + _ => core::mem::discriminant(self) == core::mem::discriminant(other), + } + } +} + +#[cfg(feature = "std")] +impl error::Error for Error { + #[must_use] + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Self::Io(e) => Some(e), + #[cfg(feature = "serde_json")] + Self::Json(e) => Some(e), + _ => None, + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Error::Invalid => write!(f, "xdr value invalid"), + Error::Unsupported => write!(f, "xdr value unsupported"), + Error::LengthExceedsMax => write!(f, "xdr value max length exceeded"), + Error::LengthMismatch => write!(f, "xdr value length does not match"), + Error::NonZeroPadding => write!(f, "xdr padding contains non-zero bytes"), + Error::Utf8Error(e) => write!(f, "{e}"), + #[cfg(feature = "alloc")] + Error::InvalidHex => write!(f, "hex invalid"), + #[cfg(feature = "std")] + Error::Io(e) => write!(f, "{e}"), + Error::DepthLimitExceeded => write!(f, "depth limit exceeded"), + #[cfg(feature = "serde_json")] + Error::Json(e) => write!(f, "{e}"), + Error::LengthLimitExceeded => write!(f, "length limit exceeded"), + } + } +} + +impl From for Error { + fn from(_: TryFromSliceError) -> Error { + Error::LengthMismatch + } +} + +impl From for Error { + #[must_use] + fn from(e: core::str::Utf8Error) -> Self { + Error::Utf8Error(e) + } +} + +#[cfg(feature = "alloc")] +impl From for Error { + #[must_use] + fn from(e: FromUtf8Error) -> Self { + Error::Utf8Error(e.utf8_error()) + } +} + +#[cfg(feature = "std")] +impl From for Error { + #[must_use] + fn from(e: io::Error) -> Self { + Error::Io(e) + } +} + +#[cfg(feature = "serde_json")] +impl From for Error { + #[must_use] + fn from(e: serde_json::Error) -> Self { + Error::Json(e) + } +} + +impl From for () { + fn from(_: Error) {} +} + +#[allow(dead_code)] +type Result = core::result::Result; + +/// Name defines types that assign a static name to their value, such as the +/// name given to an identifier in an XDR enum, or the name given to the case in +/// a union. +pub trait Name { + fn name(&self) -> &'static str; +} + +/// Discriminant defines types that may contain a one-of value determined +/// according to the discriminant, and exposes the value of the discriminant for +/// that type, such as in an XDR union. +pub trait Discriminant { + fn discriminant(&self) -> D; +} + +/// Iter defines types that have variants that can be iterated. +pub trait Variants { + fn variants() -> slice::Iter<'static, V> + where + V: Sized; +} + +// Enum defines a type that is represented as an XDR enumeration when encoded. +pub trait Enum: Name + Variants + Sized {} + +// Union defines a type that is represented as an XDR union when encoded. +pub trait Union: Name + Discriminant + Variants +where + D: Sized, +{ +} + +/// `Limits` contains the limits that a limited reader or writer will be +/// constrained to. +#[cfg(feature = "std")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Limits { + /// Defines the maximum depth for recursive calls in `Read/WriteXdr` to + /// prevent stack overflow. + /// + /// The depth limit is akin to limiting stack depth. Its purpose is to + /// prevent the program from hitting the maximum stack size allowed by Rust, + /// which would result in an unrecoverable `SIGABRT`. For more information + /// about Rust's stack size limit, refer to the [Rust + /// documentation](https://doc.rust-lang.org/std/thread/#stack-size). + pub depth: u32, + + /// Defines the maximum number of bytes that will be read or written. + pub len: usize, +} + +#[cfg(feature = "std")] +impl Limits { + #[must_use] + pub fn none() -> Self { + Self { + depth: u32::MAX, + len: usize::MAX, + } + } + + #[must_use] + pub fn depth(depth: u32) -> Self { + Limits { + depth, + ..Limits::none() + } + } + + #[must_use] + pub fn len(len: usize) -> Self { + Limits { + len, + ..Limits::none() + } + } +} + +/// `Limited` wraps an object and provides functions for enforcing limits. +/// +/// Intended for use with readers and writers and limiting their reads and +/// writes. +#[cfg(feature = "std")] +pub struct Limited { + pub inner: L, + pub(crate) limits: Limits, +} + +#[cfg(feature = "std")] +impl Limited { + /// Constructs a new `Limited`. + /// + /// - `inner`: The value being limited. + /// - `limits`: The limits to enforce. + pub fn new(inner: L, limits: Limits) -> Self { + Limited { inner, limits } + } + + /// Consume the given length from the internal remaining length limit. + /// + /// ### Errors + /// + /// If the length would consume more length than the remaining length limit + /// allows. + pub(crate) fn consume_len(&mut self, len: usize) -> Result<()> { + if let Some(len) = self.limits.len.checked_sub(len) { + self.limits.len = len; + Ok(()) + } else { + Err(Error::LengthLimitExceeded) + } + } + + /// Consumes a single depth for the duration of the given function. + /// + /// ### Errors + /// + /// If the depth limit is already exhausted. + pub(crate) fn with_limited_depth(&mut self, f: F) -> Result + where + F: FnOnce(&mut Self) -> Result, + { + if let Some(depth) = self.limits.depth.checked_sub(1) { + self.limits.depth = depth; + let res = f(self); + self.limits.depth = self.limits.depth.saturating_add(1); + res + } else { + Err(Error::DepthLimitExceeded) + } + } +} + +#[cfg(feature = "std")] +impl Read for Limited { + /// Forwards the read operation to the wrapped object. + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + self.inner.read(buf) + } +} + +#[cfg(feature = "std")] +impl BufRead for Limited { + /// Forwards the read operation to the wrapped object. + fn fill_buf(&mut self) -> std::io::Result<&[u8]> { + self.inner.fill_buf() + } + + /// Forwards the read operation to the wrapped object. + fn consume(&mut self, amt: usize) { + self.inner.consume(amt); + } +} + +#[cfg(feature = "std")] +impl Write for Limited { + /// Forwards the write operation to the wrapped object. + fn write(&mut self, buf: &[u8]) -> std::io::Result { + self.inner.write(buf) + } + + /// Forwards the flush operation to the wrapped object. + fn flush(&mut self) -> std::io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "std")] +pub struct ReadXdrIter { + reader: Limited>, + _s: PhantomData, +} + +#[cfg(feature = "std")] +impl ReadXdrIter { + fn new(r: R, limits: Limits) -> Self { + Self { + reader: Limited { + inner: BufReader::new(r), + limits, + }, + _s: PhantomData, + } + } +} + +#[cfg(feature = "std")] +impl Iterator for ReadXdrIter { + type Item = Result; + + // Next reads the internal reader and XDR decodes it into the Self type. If + // the EOF is reached without reading any new bytes `None` is returned. If + // EOF is reached after reading some bytes a truncated entry is assumed an + // an `Error::Io` containing an `UnexpectedEof`. If any other IO error + // occurs it is returned. Iteration of this iterator stops naturally when + // `None` is returned, but not when a `Some(Err(...))` is returned. The + // caller is responsible for checking each Result. + fn next(&mut self) -> Option { + // Try to fill the buffer to see if the EOF has been reached or not. + // This happens to effectively peek to see if the stream has finished + // and there are no more items. It is necessary to do this because the + // xdr types in this crate heavily use the `std::io::Read::read_exact` + // method that doesn't distinguish between an EOF at the beginning of a + // read and an EOF after a partial fill of a read_exact. + match self.reader.fill_buf() { + // If the reader has no more data and is unable to fill any new data + // into its internal buf, then the EOF has been reached. + Ok([]) => return None, + // If an error occurs filling the buffer, treat that as an error and stop. + Err(e) => return Some(Err(Error::Io(e))), + // If there is data in the buf available for reading, continue. + Ok([..]) => (), + }; + // Read the buf into the type. + let r = self.reader.with_limited_depth(|dlr| S::read_xdr(dlr)); + match r { + Ok(s) => Some(Ok(s)), + Err(e) => Some(Err(e)), + } + } +} + +pub trait ReadXdr +where + Self: Sized, +{ + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_to_end`] when the intent is for all bytes in the + /// read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result; + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_to_end(r: &mut Limited) -> Result { + let s = Self::read_xdr(r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64_to_end(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_into_to_end`] when the intent is for all bytes + /// in the read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr_into(&mut self, r: &mut Limited) -> Result<()> { + *self = Self::read_xdr(r)?; + Ok(()) + } + + /// Read the XDR into the existing value, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_into_to_end(&mut self, r: &mut Limited) -> Result<()> { + Self::read_xdr_into(self, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(()) + } else { + Err(Error::Invalid) + } + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_iter(r: &mut Limited) -> ReadXdrIter<&mut R, Self> { + ReadXdrIter::new(&mut r.inner, r.limits.clone()) + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + #[cfg(feature = "base64")] + fn read_xdr_base64_iter( + r: &mut Limited, + ) -> ReadXdrIter, Self> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + ReadXdrIter::new(dec, r.limits.clone()) + } + + /// Construct the type from the XDR bytes. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "std")] + fn from_xdr(bytes: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(&mut cursor)?; + Ok(t) + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn from_xdr_base64(b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), + limits, + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } +} + +pub trait WriteXdr { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()>; + + #[cfg(feature = "std")] + fn to_xdr(&self, limits: Limits) -> Result> { + let mut cursor = Limited::new(Cursor::new(vec![]), limits); + self.write_xdr(&mut cursor)?; + let bytes = cursor.inner.into_inner(); + Ok(bytes) + } + + #[cfg(feature = "base64")] + fn to_xdr_base64(&self, limits: Limits) -> Result { + let mut enc = Limited::new( + base64::write::EncoderStringWriter::new(base64::STANDARD), + limits, + ); + self.write_xdr(&mut enc)?; + let b64 = enc.inner.into_inner(); + Ok(b64) + } +} + +/// `Pad_len` returns the number of bytes to pad an XDR value of the given +/// length to make the final serialized size a multiple of 4. +#[cfg(feature = "std")] +fn pad_len(len: usize) -> usize { + (4 - (len % 4)) % 4 +} + +impl ReadXdr for i32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for i64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for f32 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f32 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for f64 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f64 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for bool { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + let b = i == 1; + Ok(b) + }) + } +} + +impl WriteXdr for bool { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i = u32::from(*self); // true = 1, false = 0 + i.write_xdr(w) + }) + } +} + +impl ReadXdr for Option { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + match i { + 0 => Ok(None), + 1 => { + let t = T::read_xdr(r)?; + Ok(Some(t)) + } + _ => Err(Error::Invalid), + } + }) + } +} + +impl WriteXdr for Option { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + if let Some(t) = self { + 1u32.write_xdr(w)?; + t.write_xdr(w)?; + } else { + 0u32.write_xdr(w)?; + } + Ok(()) + }) + } +} + +impl ReadXdr for Box { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| Ok(Box::new(T::read_xdr(r)?))) + } +} + +impl WriteXdr for Box { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| T::write_xdr(self, w)) + } +} + +impl ReadXdr for () { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + Ok(()) + } +} + +impl WriteXdr for () { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + Ok(()) + } +} + +impl ReadXdr for [u8; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + r.consume_len(N)?; + let padding = pad_len(N); + r.consume_len(padding)?; + let mut arr = [0u8; N]; + r.read_exact(&mut arr)?; + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + Ok(arr) + }) + } +} + +impl WriteXdr for [u8; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + w.consume_len(N)?; + let padding = pad_len(N); + w.consume_len(padding)?; + w.write_all(self)?; + w.write_all(&[0u8; 3][..padding])?; + Ok(()) + }) + } +} + +impl ReadXdr for [T; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let mut vec = Vec::with_capacity(N); + for _ in 0..N { + let t = T::read_xdr(r)?; + vec.push(t); + } + let arr: [T; N] = vec.try_into().unwrap_or_else(|_: Vec| unreachable!()); + Ok(arr) + }) + } +} + +impl WriteXdr for [T; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + for t in self { + t.write_xdr(w)?; + } + Ok(()) + }) + } +} + +// VecM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec) +where + T: 'static; + +impl Deref for VecM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for VecM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + +impl VecM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl VecM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl VecM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl VecM { + #[must_use] + pub fn to_option(&self) -> Option { + if self.len() > 0 { + Some(self.0[0].clone()) + } else { + None + } + } +} + +#[cfg(not(feature = "alloc"))] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.to_option() + } +} + +#[cfg(feature = "alloc")] +impl VecM { + #[must_use] + pub fn into_option(mut self) -> Option { + self.0.drain(..).next() + } +} + +#[cfg(feature = "alloc")] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.into_option() + } +} + +impl TryFrom> for VecM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: VecM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&VecM> for Vec { + #[must_use] + fn from(v: &VecM) -> Self { + v.0.clone() + } +} + +impl AsRef> for VecM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for VecM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T]> for VecM { + type Error = Error; + + fn try_from(v: &[T]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[T]> for VecM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[T; N]> for VecM { + type Error = Error; + + fn try_from(v: [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [T; N] { + type Error = VecM; + + fn try_from(v: VecM) -> core::result::Result { + let s: [T; N] = v.0.try_into().map_err(|v: Vec| VecM::(v))?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T; N]> for VecM { + type Error = Error; + + fn try_from(v: &[T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [T; N]> for VecM { + type Error = Error; + + fn try_from(v: &'static [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for VecM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for VecM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: VecM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&VecM> for String { + type Error = Error; + + fn try_from(v: &VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for VecM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for VecM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a VecM> for &'a str { + type Error = Error; + + fn try_from(v: &'a VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + let mut vec = Vec::new(); + for _ in 0..len { + let t = T::read_xdr(r)?; + vec.push(t); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + for t in &self.0 { + t.write_xdr(w)?; + } + + Ok(()) + }) + } +} + +// BytesM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +impl core::fmt::Display for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in v { + write!(f, "{b:02x}")?; + } + Ok(()) + } +} + +impl core::fmt::Debug for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "BytesM(")?; + for b in v { + write!(f, "{b:02x}")?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for BytesM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() + } +} + +impl Deref for BytesM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl Default for BytesM { + fn default() -> Self { + Self(Vec::default()) + } +} + +impl BytesM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl BytesM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl BytesM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for BytesM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: BytesM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&BytesM> for Vec { + #[must_use] + fn from(v: &BytesM) -> Self { + v.0.clone() + } +} + +impl AsRef> for BytesM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for BytesM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for BytesM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = BytesM; + + fn try_from(v: BytesM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(BytesM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for BytesM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for BytesM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: BytesM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&BytesM> for String { + type Error = Error; + + fn try_from(v: &BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for BytesM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for BytesM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a BytesM> for &'a str { + type Error = Error; + + fn try_from(v: &'a BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for BytesM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(BytesM(vec)) + }) + } +} + +impl WriteXdr for BytesM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..pad_len(len as usize)])?; + + Ok(()) + }) + } +} + +// StringM ------------------------------------------------------------------------ + +/// A string type that contains arbitrary bytes. +/// +/// Convertible, fallibly, to/from a Rust UTF-8 String using +/// [`TryFrom`]/[`TryInto`]/[`StringM::to_utf8_string`]. +/// +/// Convertible, lossyly, to a Rust UTF-8 String using +/// [`StringM::to_utf8_string_lossy`]. +/// +/// Convertible to/from escaped printable-ASCII using +/// [`Display`]/[`ToString`]/[`FromStr`]. + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +impl core::fmt::Display for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + Ok(()) + } +} + +impl core::fmt::Debug for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "StringM(")?; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for StringM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + let b = escape_bytes::unescape(s.as_bytes()).map_err(|_| Error::Invalid)?; + Ok(Self(b)) + } +} + +impl Deref for StringM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for StringM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl StringM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl StringM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl StringM { + #[cfg(feature = "alloc")] + pub fn to_utf8_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_utf8_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_utf8_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_utf8_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for StringM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: StringM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&StringM> for Vec { + #[must_use] + fn from(v: &StringM) -> Self { + v.0.clone() + } +} + +impl AsRef> for StringM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for StringM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for StringM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for StringM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = StringM; + + fn try_from(v: StringM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(StringM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for StringM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for StringM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: StringM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&StringM> for String { + type Error = Error; + + fn try_from(v: &StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for StringM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for StringM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a StringM> for &'a str { + type Error = Error; + + fn try_from(v: &'a StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for StringM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(StringM(vec)) + }) + } +} + +impl WriteXdr for StringM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +// Frame ------------------------------------------------------------------------ + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") +)] +pub struct Frame(pub T) +where + T: ReadXdr; + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + +impl ReadXdr for Frame +where + T: ReadXdr, +{ + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + // Read the frame header value that contains 1 flag-bit and a 33-bit length. + // - The 1 flag bit is 0 when there are more frames for the same record. + // - The 31-bit length is the length of the bytes within the frame that + // follow the frame header. + let header = u32::read_xdr(r)?; + // TODO: Use the length and cap the length we'll read from `r`. + let last_record = header >> 31 == 1; + if last_record { + // Read the record in the frame. + Ok(Self(T::read_xdr(r)?)) + } else { + // TODO: Support reading those additional frames for the same + // record. + Err(Error::Unsupported) + } + } +} + +#[cfg(all(test, feature = "std"))] +mod tests { + use std::io::Cursor; + + use super::*; + + #[test] + pub fn vec_u8_read_without_padding() { + let buf = Cursor::new(vec![0, 0, 0, 4, 2, 2, 2, 2]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_read_with_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0, 0]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2]); + } + + #[test] + pub fn vec_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn vec_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 3, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn vec_u8_write_without_padding() { + let mut buf = vec![]; + let v: VecM = vec![2, 2, 2, 2].try_into().unwrap(); + + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 4, 2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_write_with_padding() { + let mut buf = vec![]; + let v: VecM = vec![2].try_into().unwrap(); + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 1, 2, 0, 0, 0]); + } + + #[test] + pub fn arr_u8_read_without_padding() { + let buf = Cursor::new(vec![2, 2, 2, 2]); + let v = <[u8; 4]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_read_with_padding() { + let buf = Cursor::new(vec![2, 0, 0, 0]); + let v = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2]); + } + + #[test] + pub fn arr_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![2, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn arr_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![2, 3, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn arr_u8_write_without_padding() { + let mut buf = vec![]; + [2u8, 2, 2, 2] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_write_with_padding() { + let mut buf = vec![]; + [2u8] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 0, 0, 0]); + } +} + +#[cfg(all(test, feature = "std"))] +mod test { + use super::*; + + #[test] + fn into_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.into_option(), None); + } + + #[test] + fn into_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.into_option(), Some(1)); + } + + #[test] + fn to_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } + + #[test] + fn depth_limited_read_write_under_the_limit_success() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(4)); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::depth(4)); + let a_back: Option>> = ReadXdr::read_xdr(&mut dlr).unwrap(); + assert_eq!(a, a_back); + } + + #[test] + fn write_over_depth_limit_fail() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(3)); + let res = a.write_xdr(&mut buf); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn read_over_depth_limit_fail() { + let read_limits = Limits::depth(3); + let write_limits = Limits::depth(5); + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), write_limits); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), read_limits); + let res: Result>>> = ReadXdr::read_xdr(&mut dlr); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn length_limited_read_write_i32() { + // Exact limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u32() { + // Exact limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_i64() { + // Exact limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u64() { + // Exact limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bool() { + // Exact limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_option() { + // Exact limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_u8() { + // Exact limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + <[u8; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_type() { + // Exact limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(12)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(13)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(13)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(11)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(11)); + assert_eq!( + <[bool; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_vec() { + // Exact limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(16)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(17)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(17)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(15)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(15)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bytes() { + // Exact limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_string() { + // Exact limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } +} + +#[cfg(all(test, not(feature = "alloc")))] +mod test { + use super::VecM; + + #[test] + fn to_option_none() { + let v: VecM = (&[]).try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = (&[1]).try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } +} + +/// MessageType is an XDR Enum defines as: +/// +/// ```text +/// enum MessageType +/// { +/// ERROR_MSG, +/// HELLO, +/// DONT_HAVE, +/// +/// GET_PEERS, // gets a list of peers this guy knows about +/// PEERS, +/// +/// GET_TX_SET, // gets a particular txset by hash +/// TX_SET, +/// +/// GET_VALIDATIONS, // gets validations for a given ledger hash +/// VALIDATIONS, +/// +/// TRANSACTION, //pass on a tx you have heard about +/// JSON_TRANSACTION, +/// +/// // FBA +/// GET_FBA_QUORUMSET, +/// FBA_QUORUMSET, +/// FBA_MESSAGE +/// }; +/// ``` +/// +// enum +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[repr(i32)] +pub enum MessageType { + ErrorMsg = 0, + Hello = 1, + DontHave = 2, + GetPeers = 3, + Peers = 4, + GetTxSet = 5, + TxSet = 6, + GetValidations = 7, + Validations = 8, + Transaction = 9, + JsonTransaction = 10, + GetFbaQuorumset = 11, + FbaQuorumset = 12, + FbaMessage = 13, +} + + impl MessageType { + pub const VARIANTS: [MessageType; 14] = [ MessageType::ErrorMsg, +MessageType::Hello, +MessageType::DontHave, +MessageType::GetPeers, +MessageType::Peers, +MessageType::GetTxSet, +MessageType::TxSet, +MessageType::GetValidations, +MessageType::Validations, +MessageType::Transaction, +MessageType::JsonTransaction, +MessageType::GetFbaQuorumset, +MessageType::FbaQuorumset, +MessageType::FbaMessage, ]; + pub const VARIANTS_STR: [&'static str; 14] = [ "ErrorMsg", +"Hello", +"DontHave", +"GetPeers", +"Peers", +"GetTxSet", +"TxSet", +"GetValidations", +"Validations", +"Transaction", +"JsonTransaction", +"GetFbaQuorumset", +"FbaQuorumset", +"FbaMessage", ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::ErrorMsg => "ErrorMsg", +Self::Hello => "Hello", +Self::DontHave => "DontHave", +Self::GetPeers => "GetPeers", +Self::Peers => "Peers", +Self::GetTxSet => "GetTxSet", +Self::TxSet => "TxSet", +Self::GetValidations => "GetValidations", +Self::Validations => "Validations", +Self::Transaction => "Transaction", +Self::JsonTransaction => "JsonTransaction", +Self::GetFbaQuorumset => "GetFbaQuorumset", +Self::FbaQuorumset => "FbaQuorumset", +Self::FbaMessage => "FbaMessage", + } + } + + #[must_use] + pub const fn variants() -> [MessageType; 14] { + Self::VARIANTS + } + } + + impl Name for MessageType { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for MessageType { + fn variants() -> slice::Iter<'static, MessageType> { + Self::VARIANTS.iter() + } + } + + impl Enum for MessageType {} + + impl fmt::Display for MessageType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.name()) + } + } + + impl TryFrom for MessageType { + type Error = Error; + + fn try_from(i: i32) -> Result { + let e = match i { + 0 => MessageType::ErrorMsg, +1 => MessageType::Hello, +2 => MessageType::DontHave, +3 => MessageType::GetPeers, +4 => MessageType::Peers, +5 => MessageType::GetTxSet, +6 => MessageType::TxSet, +7 => MessageType::GetValidations, +8 => MessageType::Validations, +9 => MessageType::Transaction, +10 => MessageType::JsonTransaction, +11 => MessageType::GetFbaQuorumset, +12 => MessageType::FbaQuorumset, +13 => MessageType::FbaMessage, + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(e) + } + } + + impl From for i32 { + #[must_use] + fn from(e: MessageType) -> Self { + e as Self + } + } + + impl ReadXdr for MessageType { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let e = i32::read_xdr(r)?; + let v: Self = e.try_into()?; + Ok(v) + }) + } + } + + impl WriteXdr for MessageType { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i: i32 = (*self).into(); + i.write_xdr(w) + }) + } + } + +/// Color is an XDR Enum defines as: +/// +/// ```text +/// enum Color { +/// RED=0, +/// GREEN=1, +/// BLUE=2 +/// }; +/// ``` +/// +// enum +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[repr(i32)] +pub enum Color { + Red = 0, + Green = 1, + Blue = 2, +} + + impl Color { + pub const VARIANTS: [Color; 3] = [ Color::Red, +Color::Green, +Color::Blue, ]; + pub const VARIANTS_STR: [&'static str; 3] = [ "Red", +"Green", +"Blue", ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::Red => "Red", +Self::Green => "Green", +Self::Blue => "Blue", + } + } + + #[must_use] + pub const fn variants() -> [Color; 3] { + Self::VARIANTS + } + } + + impl Name for Color { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for Color { + fn variants() -> slice::Iter<'static, Color> { + Self::VARIANTS.iter() + } + } + + impl Enum for Color {} + + impl fmt::Display for Color { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.name()) + } + } + + impl TryFrom for Color { + type Error = Error; + + fn try_from(i: i32) -> Result { + let e = match i { + 0 => Color::Red, +1 => Color::Green, +2 => Color::Blue, + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(e) + } + } + + impl From for i32 { + #[must_use] + fn from(e: Color) -> Self { + e as Self + } + } + + impl ReadXdr for Color { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let e = i32::read_xdr(r)?; + let v: Self = e.try_into()?; + Ok(v) + }) + } + } + + impl WriteXdr for Color { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i: i32 = (*self).into(); + i.write_xdr(w) + }) + } + } + +/// Color2 is an XDR Enum defines as: +/// +/// ```text +/// enum Color2 { +/// RED2=RED, +/// GREEN2=1, +/// BLUE2=2 +/// }; +/// ``` +/// +// enum +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[repr(i32)] +pub enum Color2 { + Red2 = 0, + Green2 = 1, + Blue2 = 2, +} + + impl Color2 { + pub const VARIANTS: [Color2; 3] = [ Color2::Red2, +Color2::Green2, +Color2::Blue2, ]; + pub const VARIANTS_STR: [&'static str; 3] = [ "Red2", +"Green2", +"Blue2", ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::Red2 => "Red2", +Self::Green2 => "Green2", +Self::Blue2 => "Blue2", + } + } + + #[must_use] + pub const fn variants() -> [Color2; 3] { + Self::VARIANTS + } + } + + impl Name for Color2 { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for Color2 { + fn variants() -> slice::Iter<'static, Color2> { + Self::VARIANTS.iter() + } + } + + impl Enum for Color2 {} + + impl fmt::Display for Color2 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.name()) + } + } + + impl TryFrom for Color2 { + type Error = Error; + + fn try_from(i: i32) -> Result { + let e = match i { + 0 => Color2::Red2, +1 => Color2::Green2, +2 => Color2::Blue2, + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(e) + } + } + + impl From for i32 { + #[must_use] + fn from(e: Color2) -> Self { + e as Self + } + } + + impl ReadXdr for Color2 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let e = i32::read_xdr(r)?; + let v: Self = e.try_into()?; + Ok(v) + }) + } + } + + impl WriteXdr for Color2 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i: i32 = (*self).into(); + i.write_xdr(w) + }) + } + } + + #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum TypeVariant { + MessageType, +Color, +Color2, + } + + impl TypeVariant { + pub const VARIANTS: [TypeVariant; 3] = [ TypeVariant::MessageType, +TypeVariant::Color, +TypeVariant::Color2, ]; + pub const VARIANTS_STR: [&'static str; 3] = [ "MessageType", +"Color", +"Color2", ]; + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::MessageType => "MessageType", +Self::Color => "Color", +Self::Color2 => "Color2", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 3] { + Self::VARIANTS + } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::MessageType => gen.into_root_schema_for::(), +Self::Color => gen.into_root_schema_for::(), +Self::Color2 => gen.into_root_schema_for::(), + } + } + } + + impl Name for TypeVariant { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for TypeVariant { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl core::str::FromStr for TypeVariant { + type Err = Error; + #[allow(clippy::too_many_lines)] + fn from_str(s: &str) -> Result { + match s { + "MessageType" => Ok(Self::MessageType), +"Color" => Ok(Self::Color), +"Color2" => Ok(Self::Color2), + _ => Err(Error::Invalid), + } + } + } + + #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case"), + serde(untagged), + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum Type { + MessageType(Box), +Color(Box), +Color2(Box), + } + + impl Type { + pub const VARIANTS: [TypeVariant; 3] = [ TypeVariant::MessageType, +TypeVariant::Color, +TypeVariant::Color2, ]; + pub const VARIANTS_STR: [&'static str; 3] = [ "MessageType", +"Color", +"Color2", ]; + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr(v: TypeVariant, r: &mut Limited) -> Result { + match v { + TypeVariant::MessageType => r.with_limited_depth(|r| Ok(Self::MessageType(Box::new(MessageType::read_xdr(r)?)))), +TypeVariant::Color => r.with_limited_depth(|r| Ok(Self::Color(Box::new(Color::read_xdr(r)?)))), +TypeVariant::Color2 => r.with_limited_depth(|r| Ok(Self::Color2(Box::new(Color2::read_xdr(r)?)))), + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + pub fn read_xdr_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let s = Self::read_xdr(v, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::MessageType => Box::new(ReadXdrIter::<_, MessageType>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MessageType(Box::new(t))))), +TypeVariant::Color => Box::new(ReadXdrIter::<_, Color>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Color(Box::new(t))))), +TypeVariant::Color2 => Box::new(ReadXdrIter::<_, Color2>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Color2(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_framed_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::MessageType => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MessageType(Box::new(t.0))))), +TypeVariant::Color => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Color(Box::new(t.0))))), +TypeVariant::Color2 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Color2(Box::new(t.0))))), + } + } + + #[cfg(feature = "base64")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_base64_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + match v { + TypeVariant::MessageType => Box::new(ReadXdrIter::<_, MessageType>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::MessageType(Box::new(t))))), +TypeVariant::Color => Box::new(ReadXdrIter::<_, Color>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Color(Box::new(t))))), +TypeVariant::Color2 => Box::new(ReadXdrIter::<_, Color2>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Color2(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + pub fn from_xdr>(v: TypeVariant, bytes: B, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(v, &mut cursor)?; + Ok(t) + } + + #[cfg(feature = "base64")] + pub fn from_xdr_base64(v: TypeVariant, b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), limits); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(all(feature = "std", feature = "serde_json"))] + #[allow(clippy::too_many_lines)] + pub fn read_json(v: TypeVariant, r: impl Read) -> Result { + match v { + TypeVariant::MessageType => Ok(Self::MessageType(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Color => Ok(Self::Color(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Color2 => Ok(Self::Color2(Box::new(serde_json::from_reader(r)?))), + } + } + + #[cfg(feature = "alloc")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn value(&self) -> &dyn core::any::Any { + #[allow(clippy::match_same_arms)] + match self { + Self::MessageType(ref v) => v.as_ref(), +Self::Color(ref v) => v.as_ref(), +Self::Color2(ref v) => v.as_ref(), + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::MessageType(_) => "MessageType", +Self::Color(_) => "Color", +Self::Color2(_) => "Color2", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 3] { + Self::VARIANTS + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variant(&self) -> TypeVariant { + match self { + Self::MessageType(_) => TypeVariant::MessageType, +Self::Color(_) => TypeVariant::Color, +Self::Color2(_) => TypeVariant::Color2, + } + } + } + + impl Name for Type { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for Type { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl WriteXdr for Type { + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + match self { + Self::MessageType(v) => v.write_xdr(w), +Self::Color(v) => v.write_xdr(w), +Self::Color2(v) => v.write_xdr(w), + } + } + } diff --git a/spec/output/generator_spec_rust_custom_jsonschema_impls/nesting.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_jsonschema_impls/nesting.x/MyXDR.rs new file mode 100644 index 000000000..a4e145120 --- /dev/null +++ b/spec/output/generator_spec_rust_custom_jsonschema_impls/nesting.x/MyXDR.rs @@ -0,0 +1,3387 @@ +// Module is generated from: +// spec/fixtures/generator/nesting.x + +#![allow(clippy::missing_errors_doc, clippy::unreadable_literal)] + +/// `XDR_FILES_SHA256` is a list of pairs of source files and their SHA256 hashes. +pub const XDR_FILES_SHA256: [(&str, &str); 1] = [ + ("spec/fixtures/generator/nesting.x", "5537949272c11f1bd09cf613a3751668b5018d686a1c2aaa3baa91183ca18f6a") +]; + +use core::{array::TryFromSliceError, fmt, fmt::Debug, marker::Sized, ops::Deref, slice}; + +#[cfg(feature = "std")] +use core::marker::PhantomData; + +// When feature alloc is turned off use static lifetime Box and Vec types. +#[cfg(not(feature = "alloc"))] +mod noalloc { + pub mod boxed { + pub type Box = &'static T; + } + pub mod vec { + pub type Vec = &'static [T]; + } +} +#[cfg(not(feature = "alloc"))] +use noalloc::{boxed::Box, vec::Vec}; + +// When feature std is turned off, but feature alloc is turned on import the +// alloc crate and use its Box and Vec types. +#[cfg(all(not(feature = "std"), feature = "alloc"))] +extern crate alloc; +#[cfg(all(not(feature = "std"), feature = "alloc"))] +use alloc::{ + borrow::ToOwned, + boxed::Box, + string::{FromUtf8Error, String}, + vec::Vec, +}; +#[cfg(feature = "std")] +use std::string::FromUtf8Error; + +#[cfg(feature = "arbitrary")] +use arbitrary::Arbitrary; + +// TODO: Add support for read/write xdr fns when std not available. + +#[cfg(feature = "std")] +use std::{ + error, io, + io::{BufRead, BufReader, Cursor, Read, Write}, +}; + +/// Error contains all errors returned by functions in this crate. It can be +/// compared via `PartialEq`, however any contained IO errors will only be +/// compared on their `ErrorKind`. +#[derive(Debug)] +pub enum Error { + Invalid, + Unsupported, + LengthExceedsMax, + LengthMismatch, + NonZeroPadding, + Utf8Error(core::str::Utf8Error), + #[cfg(feature = "alloc")] + InvalidHex, + #[cfg(feature = "std")] + Io(io::Error), + DepthLimitExceeded, + #[cfg(feature = "serde_json")] + Json(serde_json::Error), + LengthLimitExceeded, +} + +impl PartialEq for Error { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Utf8Error(l), Self::Utf8Error(r)) => l == r, + // IO errors cannot be compared, but in the absence of any more + // meaningful way to compare the errors we compare the kind of error + // and ignore the embedded source error or OS error. The main use + // case for comparing errors outputted by the XDR library is for + // error case testing, and a lack of the ability to compare has a + // detrimental affect on failure testing, so this is a tradeoff. + #[cfg(feature = "std")] + (Self::Io(l), Self::Io(r)) => l.kind() == r.kind(), + _ => core::mem::discriminant(self) == core::mem::discriminant(other), + } + } +} + +#[cfg(feature = "std")] +impl error::Error for Error { + #[must_use] + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Self::Io(e) => Some(e), + #[cfg(feature = "serde_json")] + Self::Json(e) => Some(e), + _ => None, + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Error::Invalid => write!(f, "xdr value invalid"), + Error::Unsupported => write!(f, "xdr value unsupported"), + Error::LengthExceedsMax => write!(f, "xdr value max length exceeded"), + Error::LengthMismatch => write!(f, "xdr value length does not match"), + Error::NonZeroPadding => write!(f, "xdr padding contains non-zero bytes"), + Error::Utf8Error(e) => write!(f, "{e}"), + #[cfg(feature = "alloc")] + Error::InvalidHex => write!(f, "hex invalid"), + #[cfg(feature = "std")] + Error::Io(e) => write!(f, "{e}"), + Error::DepthLimitExceeded => write!(f, "depth limit exceeded"), + #[cfg(feature = "serde_json")] + Error::Json(e) => write!(f, "{e}"), + Error::LengthLimitExceeded => write!(f, "length limit exceeded"), + } + } +} + +impl From for Error { + fn from(_: TryFromSliceError) -> Error { + Error::LengthMismatch + } +} + +impl From for Error { + #[must_use] + fn from(e: core::str::Utf8Error) -> Self { + Error::Utf8Error(e) + } +} + +#[cfg(feature = "alloc")] +impl From for Error { + #[must_use] + fn from(e: FromUtf8Error) -> Self { + Error::Utf8Error(e.utf8_error()) + } +} + +#[cfg(feature = "std")] +impl From for Error { + #[must_use] + fn from(e: io::Error) -> Self { + Error::Io(e) + } +} + +#[cfg(feature = "serde_json")] +impl From for Error { + #[must_use] + fn from(e: serde_json::Error) -> Self { + Error::Json(e) + } +} + +impl From for () { + fn from(_: Error) {} +} + +#[allow(dead_code)] +type Result = core::result::Result; + +/// Name defines types that assign a static name to their value, such as the +/// name given to an identifier in an XDR enum, or the name given to the case in +/// a union. +pub trait Name { + fn name(&self) -> &'static str; +} + +/// Discriminant defines types that may contain a one-of value determined +/// according to the discriminant, and exposes the value of the discriminant for +/// that type, such as in an XDR union. +pub trait Discriminant { + fn discriminant(&self) -> D; +} + +/// Iter defines types that have variants that can be iterated. +pub trait Variants { + fn variants() -> slice::Iter<'static, V> + where + V: Sized; +} + +// Enum defines a type that is represented as an XDR enumeration when encoded. +pub trait Enum: Name + Variants + Sized {} + +// Union defines a type that is represented as an XDR union when encoded. +pub trait Union: Name + Discriminant + Variants +where + D: Sized, +{ +} + +/// `Limits` contains the limits that a limited reader or writer will be +/// constrained to. +#[cfg(feature = "std")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Limits { + /// Defines the maximum depth for recursive calls in `Read/WriteXdr` to + /// prevent stack overflow. + /// + /// The depth limit is akin to limiting stack depth. Its purpose is to + /// prevent the program from hitting the maximum stack size allowed by Rust, + /// which would result in an unrecoverable `SIGABRT`. For more information + /// about Rust's stack size limit, refer to the [Rust + /// documentation](https://doc.rust-lang.org/std/thread/#stack-size). + pub depth: u32, + + /// Defines the maximum number of bytes that will be read or written. + pub len: usize, +} + +#[cfg(feature = "std")] +impl Limits { + #[must_use] + pub fn none() -> Self { + Self { + depth: u32::MAX, + len: usize::MAX, + } + } + + #[must_use] + pub fn depth(depth: u32) -> Self { + Limits { + depth, + ..Limits::none() + } + } + + #[must_use] + pub fn len(len: usize) -> Self { + Limits { + len, + ..Limits::none() + } + } +} + +/// `Limited` wraps an object and provides functions for enforcing limits. +/// +/// Intended for use with readers and writers and limiting their reads and +/// writes. +#[cfg(feature = "std")] +pub struct Limited { + pub inner: L, + pub(crate) limits: Limits, +} + +#[cfg(feature = "std")] +impl Limited { + /// Constructs a new `Limited`. + /// + /// - `inner`: The value being limited. + /// - `limits`: The limits to enforce. + pub fn new(inner: L, limits: Limits) -> Self { + Limited { inner, limits } + } + + /// Consume the given length from the internal remaining length limit. + /// + /// ### Errors + /// + /// If the length would consume more length than the remaining length limit + /// allows. + pub(crate) fn consume_len(&mut self, len: usize) -> Result<()> { + if let Some(len) = self.limits.len.checked_sub(len) { + self.limits.len = len; + Ok(()) + } else { + Err(Error::LengthLimitExceeded) + } + } + + /// Consumes a single depth for the duration of the given function. + /// + /// ### Errors + /// + /// If the depth limit is already exhausted. + pub(crate) fn with_limited_depth(&mut self, f: F) -> Result + where + F: FnOnce(&mut Self) -> Result, + { + if let Some(depth) = self.limits.depth.checked_sub(1) { + self.limits.depth = depth; + let res = f(self); + self.limits.depth = self.limits.depth.saturating_add(1); + res + } else { + Err(Error::DepthLimitExceeded) + } + } +} + +#[cfg(feature = "std")] +impl Read for Limited { + /// Forwards the read operation to the wrapped object. + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + self.inner.read(buf) + } +} + +#[cfg(feature = "std")] +impl BufRead for Limited { + /// Forwards the read operation to the wrapped object. + fn fill_buf(&mut self) -> std::io::Result<&[u8]> { + self.inner.fill_buf() + } + + /// Forwards the read operation to the wrapped object. + fn consume(&mut self, amt: usize) { + self.inner.consume(amt); + } +} + +#[cfg(feature = "std")] +impl Write for Limited { + /// Forwards the write operation to the wrapped object. + fn write(&mut self, buf: &[u8]) -> std::io::Result { + self.inner.write(buf) + } + + /// Forwards the flush operation to the wrapped object. + fn flush(&mut self) -> std::io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "std")] +pub struct ReadXdrIter { + reader: Limited>, + _s: PhantomData, +} + +#[cfg(feature = "std")] +impl ReadXdrIter { + fn new(r: R, limits: Limits) -> Self { + Self { + reader: Limited { + inner: BufReader::new(r), + limits, + }, + _s: PhantomData, + } + } +} + +#[cfg(feature = "std")] +impl Iterator for ReadXdrIter { + type Item = Result; + + // Next reads the internal reader and XDR decodes it into the Self type. If + // the EOF is reached without reading any new bytes `None` is returned. If + // EOF is reached after reading some bytes a truncated entry is assumed an + // an `Error::Io` containing an `UnexpectedEof`. If any other IO error + // occurs it is returned. Iteration of this iterator stops naturally when + // `None` is returned, but not when a `Some(Err(...))` is returned. The + // caller is responsible for checking each Result. + fn next(&mut self) -> Option { + // Try to fill the buffer to see if the EOF has been reached or not. + // This happens to effectively peek to see if the stream has finished + // and there are no more items. It is necessary to do this because the + // xdr types in this crate heavily use the `std::io::Read::read_exact` + // method that doesn't distinguish between an EOF at the beginning of a + // read and an EOF after a partial fill of a read_exact. + match self.reader.fill_buf() { + // If the reader has no more data and is unable to fill any new data + // into its internal buf, then the EOF has been reached. + Ok([]) => return None, + // If an error occurs filling the buffer, treat that as an error and stop. + Err(e) => return Some(Err(Error::Io(e))), + // If there is data in the buf available for reading, continue. + Ok([..]) => (), + }; + // Read the buf into the type. + let r = self.reader.with_limited_depth(|dlr| S::read_xdr(dlr)); + match r { + Ok(s) => Some(Ok(s)), + Err(e) => Some(Err(e)), + } + } +} + +pub trait ReadXdr +where + Self: Sized, +{ + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_to_end`] when the intent is for all bytes in the + /// read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result; + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_to_end(r: &mut Limited) -> Result { + let s = Self::read_xdr(r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64_to_end(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_into_to_end`] when the intent is for all bytes + /// in the read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr_into(&mut self, r: &mut Limited) -> Result<()> { + *self = Self::read_xdr(r)?; + Ok(()) + } + + /// Read the XDR into the existing value, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_into_to_end(&mut self, r: &mut Limited) -> Result<()> { + Self::read_xdr_into(self, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(()) + } else { + Err(Error::Invalid) + } + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_iter(r: &mut Limited) -> ReadXdrIter<&mut R, Self> { + ReadXdrIter::new(&mut r.inner, r.limits.clone()) + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + #[cfg(feature = "base64")] + fn read_xdr_base64_iter( + r: &mut Limited, + ) -> ReadXdrIter, Self> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + ReadXdrIter::new(dec, r.limits.clone()) + } + + /// Construct the type from the XDR bytes. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "std")] + fn from_xdr(bytes: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(&mut cursor)?; + Ok(t) + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn from_xdr_base64(b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), + limits, + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } +} + +pub trait WriteXdr { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()>; + + #[cfg(feature = "std")] + fn to_xdr(&self, limits: Limits) -> Result> { + let mut cursor = Limited::new(Cursor::new(vec![]), limits); + self.write_xdr(&mut cursor)?; + let bytes = cursor.inner.into_inner(); + Ok(bytes) + } + + #[cfg(feature = "base64")] + fn to_xdr_base64(&self, limits: Limits) -> Result { + let mut enc = Limited::new( + base64::write::EncoderStringWriter::new(base64::STANDARD), + limits, + ); + self.write_xdr(&mut enc)?; + let b64 = enc.inner.into_inner(); + Ok(b64) + } +} + +/// `Pad_len` returns the number of bytes to pad an XDR value of the given +/// length to make the final serialized size a multiple of 4. +#[cfg(feature = "std")] +fn pad_len(len: usize) -> usize { + (4 - (len % 4)) % 4 +} + +impl ReadXdr for i32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for i64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for f32 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f32 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for f64 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f64 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for bool { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + let b = i == 1; + Ok(b) + }) + } +} + +impl WriteXdr for bool { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i = u32::from(*self); // true = 1, false = 0 + i.write_xdr(w) + }) + } +} + +impl ReadXdr for Option { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + match i { + 0 => Ok(None), + 1 => { + let t = T::read_xdr(r)?; + Ok(Some(t)) + } + _ => Err(Error::Invalid), + } + }) + } +} + +impl WriteXdr for Option { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + if let Some(t) = self { + 1u32.write_xdr(w)?; + t.write_xdr(w)?; + } else { + 0u32.write_xdr(w)?; + } + Ok(()) + }) + } +} + +impl ReadXdr for Box { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| Ok(Box::new(T::read_xdr(r)?))) + } +} + +impl WriteXdr for Box { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| T::write_xdr(self, w)) + } +} + +impl ReadXdr for () { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + Ok(()) + } +} + +impl WriteXdr for () { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + Ok(()) + } +} + +impl ReadXdr for [u8; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + r.consume_len(N)?; + let padding = pad_len(N); + r.consume_len(padding)?; + let mut arr = [0u8; N]; + r.read_exact(&mut arr)?; + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + Ok(arr) + }) + } +} + +impl WriteXdr for [u8; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + w.consume_len(N)?; + let padding = pad_len(N); + w.consume_len(padding)?; + w.write_all(self)?; + w.write_all(&[0u8; 3][..padding])?; + Ok(()) + }) + } +} + +impl ReadXdr for [T; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let mut vec = Vec::with_capacity(N); + for _ in 0..N { + let t = T::read_xdr(r)?; + vec.push(t); + } + let arr: [T; N] = vec.try_into().unwrap_or_else(|_: Vec| unreachable!()); + Ok(arr) + }) + } +} + +impl WriteXdr for [T; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + for t in self { + t.write_xdr(w)?; + } + Ok(()) + }) + } +} + +// VecM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec) +where + T: 'static; + +impl Deref for VecM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for VecM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + +impl VecM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl VecM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl VecM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl VecM { + #[must_use] + pub fn to_option(&self) -> Option { + if self.len() > 0 { + Some(self.0[0].clone()) + } else { + None + } + } +} + +#[cfg(not(feature = "alloc"))] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.to_option() + } +} + +#[cfg(feature = "alloc")] +impl VecM { + #[must_use] + pub fn into_option(mut self) -> Option { + self.0.drain(..).next() + } +} + +#[cfg(feature = "alloc")] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.into_option() + } +} + +impl TryFrom> for VecM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: VecM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&VecM> for Vec { + #[must_use] + fn from(v: &VecM) -> Self { + v.0.clone() + } +} + +impl AsRef> for VecM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for VecM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T]> for VecM { + type Error = Error; + + fn try_from(v: &[T]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[T]> for VecM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[T; N]> for VecM { + type Error = Error; + + fn try_from(v: [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [T; N] { + type Error = VecM; + + fn try_from(v: VecM) -> core::result::Result { + let s: [T; N] = v.0.try_into().map_err(|v: Vec| VecM::(v))?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T; N]> for VecM { + type Error = Error; + + fn try_from(v: &[T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [T; N]> for VecM { + type Error = Error; + + fn try_from(v: &'static [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for VecM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for VecM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: VecM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&VecM> for String { + type Error = Error; + + fn try_from(v: &VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for VecM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for VecM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a VecM> for &'a str { + type Error = Error; + + fn try_from(v: &'a VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + let mut vec = Vec::new(); + for _ in 0..len { + let t = T::read_xdr(r)?; + vec.push(t); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + for t in &self.0 { + t.write_xdr(w)?; + } + + Ok(()) + }) + } +} + +// BytesM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +impl core::fmt::Display for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in v { + write!(f, "{b:02x}")?; + } + Ok(()) + } +} + +impl core::fmt::Debug for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "BytesM(")?; + for b in v { + write!(f, "{b:02x}")?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for BytesM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() + } +} + +impl Deref for BytesM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl Default for BytesM { + fn default() -> Self { + Self(Vec::default()) + } +} + +impl BytesM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl BytesM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl BytesM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for BytesM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: BytesM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&BytesM> for Vec { + #[must_use] + fn from(v: &BytesM) -> Self { + v.0.clone() + } +} + +impl AsRef> for BytesM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for BytesM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for BytesM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = BytesM; + + fn try_from(v: BytesM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(BytesM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for BytesM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for BytesM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: BytesM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&BytesM> for String { + type Error = Error; + + fn try_from(v: &BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for BytesM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for BytesM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a BytesM> for &'a str { + type Error = Error; + + fn try_from(v: &'a BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for BytesM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(BytesM(vec)) + }) + } +} + +impl WriteXdr for BytesM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..pad_len(len as usize)])?; + + Ok(()) + }) + } +} + +// StringM ------------------------------------------------------------------------ + +/// A string type that contains arbitrary bytes. +/// +/// Convertible, fallibly, to/from a Rust UTF-8 String using +/// [`TryFrom`]/[`TryInto`]/[`StringM::to_utf8_string`]. +/// +/// Convertible, lossyly, to a Rust UTF-8 String using +/// [`StringM::to_utf8_string_lossy`]. +/// +/// Convertible to/from escaped printable-ASCII using +/// [`Display`]/[`ToString`]/[`FromStr`]. + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +impl core::fmt::Display for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + Ok(()) + } +} + +impl core::fmt::Debug for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "StringM(")?; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for StringM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + let b = escape_bytes::unescape(s.as_bytes()).map_err(|_| Error::Invalid)?; + Ok(Self(b)) + } +} + +impl Deref for StringM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for StringM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl StringM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl StringM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl StringM { + #[cfg(feature = "alloc")] + pub fn to_utf8_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_utf8_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_utf8_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_utf8_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for StringM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: StringM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&StringM> for Vec { + #[must_use] + fn from(v: &StringM) -> Self { + v.0.clone() + } +} + +impl AsRef> for StringM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for StringM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for StringM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for StringM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = StringM; + + fn try_from(v: StringM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(StringM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for StringM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for StringM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: StringM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&StringM> for String { + type Error = Error; + + fn try_from(v: &StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for StringM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for StringM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a StringM> for &'a str { + type Error = Error; + + fn try_from(v: &'a StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for StringM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(StringM(vec)) + }) + } +} + +impl WriteXdr for StringM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +// Frame ------------------------------------------------------------------------ + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") +)] +pub struct Frame(pub T) +where + T: ReadXdr; + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + +impl ReadXdr for Frame +where + T: ReadXdr, +{ + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + // Read the frame header value that contains 1 flag-bit and a 33-bit length. + // - The 1 flag bit is 0 when there are more frames for the same record. + // - The 31-bit length is the length of the bytes within the frame that + // follow the frame header. + let header = u32::read_xdr(r)?; + // TODO: Use the length and cap the length we'll read from `r`. + let last_record = header >> 31 == 1; + if last_record { + // Read the record in the frame. + Ok(Self(T::read_xdr(r)?)) + } else { + // TODO: Support reading those additional frames for the same + // record. + Err(Error::Unsupported) + } + } +} + +#[cfg(all(test, feature = "std"))] +mod tests { + use std::io::Cursor; + + use super::*; + + #[test] + pub fn vec_u8_read_without_padding() { + let buf = Cursor::new(vec![0, 0, 0, 4, 2, 2, 2, 2]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_read_with_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0, 0]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2]); + } + + #[test] + pub fn vec_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn vec_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 3, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn vec_u8_write_without_padding() { + let mut buf = vec![]; + let v: VecM = vec![2, 2, 2, 2].try_into().unwrap(); + + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 4, 2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_write_with_padding() { + let mut buf = vec![]; + let v: VecM = vec![2].try_into().unwrap(); + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 1, 2, 0, 0, 0]); + } + + #[test] + pub fn arr_u8_read_without_padding() { + let buf = Cursor::new(vec![2, 2, 2, 2]); + let v = <[u8; 4]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_read_with_padding() { + let buf = Cursor::new(vec![2, 0, 0, 0]); + let v = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2]); + } + + #[test] + pub fn arr_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![2, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn arr_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![2, 3, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn arr_u8_write_without_padding() { + let mut buf = vec![]; + [2u8, 2, 2, 2] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_write_with_padding() { + let mut buf = vec![]; + [2u8] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 0, 0, 0]); + } +} + +#[cfg(all(test, feature = "std"))] +mod test { + use super::*; + + #[test] + fn into_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.into_option(), None); + } + + #[test] + fn into_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.into_option(), Some(1)); + } + + #[test] + fn to_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } + + #[test] + fn depth_limited_read_write_under_the_limit_success() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(4)); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::depth(4)); + let a_back: Option>> = ReadXdr::read_xdr(&mut dlr).unwrap(); + assert_eq!(a, a_back); + } + + #[test] + fn write_over_depth_limit_fail() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(3)); + let res = a.write_xdr(&mut buf); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn read_over_depth_limit_fail() { + let read_limits = Limits::depth(3); + let write_limits = Limits::depth(5); + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), write_limits); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), read_limits); + let res: Result>>> = ReadXdr::read_xdr(&mut dlr); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn length_limited_read_write_i32() { + // Exact limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u32() { + // Exact limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_i64() { + // Exact limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u64() { + // Exact limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bool() { + // Exact limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_option() { + // Exact limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_u8() { + // Exact limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + <[u8; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_type() { + // Exact limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(12)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(13)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(13)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(11)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(11)); + assert_eq!( + <[bool; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_vec() { + // Exact limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(16)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(17)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(17)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(15)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(15)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bytes() { + // Exact limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_string() { + // Exact limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } +} + +#[cfg(all(test, not(feature = "alloc")))] +mod test { + use super::VecM; + + #[test] + fn to_option_none() { + let v: VecM = (&[]).try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = (&[1]).try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } +} + +/// UnionKey is an XDR Enum defines as: +/// +/// ```text +/// enum UnionKey { +/// ONE = 1, +/// TWO = 2, +/// OFFER = 3 +/// }; +/// ``` +/// +// enum +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[repr(i32)] +pub enum UnionKey { + One = 1, + Two = 2, + Offer = 3, +} + + impl UnionKey { + pub const VARIANTS: [UnionKey; 3] = [ UnionKey::One, +UnionKey::Two, +UnionKey::Offer, ]; + pub const VARIANTS_STR: [&'static str; 3] = [ "One", +"Two", +"Offer", ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::One => "One", +Self::Two => "Two", +Self::Offer => "Offer", + } + } + + #[must_use] + pub const fn variants() -> [UnionKey; 3] { + Self::VARIANTS + } + } + + impl Name for UnionKey { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for UnionKey { + fn variants() -> slice::Iter<'static, UnionKey> { + Self::VARIANTS.iter() + } + } + + impl Enum for UnionKey {} + + impl fmt::Display for UnionKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.name()) + } + } + + impl TryFrom for UnionKey { + type Error = Error; + + fn try_from(i: i32) -> Result { + let e = match i { + 1 => UnionKey::One, +2 => UnionKey::Two, +3 => UnionKey::Offer, + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(e) + } + } + + impl From for i32 { + #[must_use] + fn from(e: UnionKey) -> Self { + e as Self + } + } + + impl ReadXdr for UnionKey { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let e = i32::read_xdr(r)?; + let v: Self = e.try_into()?; + Ok(v) + }) + } + } + + impl WriteXdr for UnionKey { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i: i32 = (*self).into(); + i.write_xdr(w) + }) + } + } + +/// Foo is an XDR Typedef defines as: +/// +/// ```text +/// typedef int Foo; +/// ``` +/// +pub type Foo = i32; + +/// MyUnionOne is an XDR NestedStruct defines as: +/// +/// ```text +/// struct { +/// int someInt; +/// } +/// ``` +/// +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct MyUnionOne { + pub some_int: i32, +} + +impl ReadXdr for MyUnionOne { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + Ok(Self{ + some_int: i32::read_xdr(r)?, + }) + }) + } +} + +impl WriteXdr for MyUnionOne { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.some_int.write_xdr(w)?; + Ok(()) + }) + } +} + +/// MyUnionTwo is an XDR NestedStruct defines as: +/// +/// ```text +/// struct { +/// int someInt; +/// Foo foo; +/// } +/// ``` +/// +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct MyUnionTwo { + pub some_int: i32, + pub foo: i32, +} + + impl ReadXdr for MyUnionTwo { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + Ok(Self{ + some_int: i32::read_xdr(r)?, +foo: i32::read_xdr(r)?, + }) + }) + } + } + + impl WriteXdr for MyUnionTwo { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.some_int.write_xdr(w)?; +self.foo.write_xdr(w)?; + Ok(()) + }) + } + } + +/// MyUnion is an XDR Union defines as: +/// +/// ```text +/// union MyUnion switch (UnionKey type) +/// { +/// case ONE: +/// struct { +/// int someInt; +/// } one; +/// +/// case TWO: +/// struct { +/// int someInt; +/// Foo foo; +/// } two; +/// +/// case OFFER: +/// void; +/// }; +/// ``` +/// +// union with discriminant UnionKey +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[allow(clippy::large_enum_variant)] +pub enum MyUnion { + One(MyUnionOne), + Two(MyUnionTwo), + Offer, +} + + impl MyUnion { + pub const VARIANTS: [UnionKey; 3] = [ + UnionKey::One, +UnionKey::Two, +UnionKey::Offer, + ]; + pub const VARIANTS_STR: [&'static str; 3] = [ + "One", +"Two", +"Offer", + ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::One(_) => "One", +Self::Two(_) => "Two", +Self::Offer => "Offer", + } + } + + #[must_use] + pub const fn discriminant(&self) -> UnionKey { + #[allow(clippy::match_same_arms)] + match self { + Self::One(_) => UnionKey::One, +Self::Two(_) => UnionKey::Two, +Self::Offer => UnionKey::Offer, + } + } + + #[must_use] + pub const fn variants() -> [UnionKey; 3] { + Self::VARIANTS + } + } + + impl Name for MyUnion { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Discriminant for MyUnion { + #[must_use] + fn discriminant(&self) -> UnionKey { + Self::discriminant(self) + } + } + + impl Variants for MyUnion { + fn variants() -> slice::Iter<'static, UnionKey> { + Self::VARIANTS.iter() + } + } + + impl Union for MyUnion {} + + impl ReadXdr for MyUnion { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let dv: UnionKey = ::read_xdr(r)?; + #[allow(clippy::match_same_arms, clippy::match_wildcard_for_single_variants)] + let v = match dv { + UnionKey::One => Self::One(MyUnionOne::read_xdr(r)?), +UnionKey::Two => Self::Two(MyUnionTwo::read_xdr(r)?), +UnionKey::Offer => Self::Offer, + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(v) + }) + } + } + + impl WriteXdr for MyUnion { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.discriminant().write_xdr(w)?; + #[allow(clippy::match_same_arms)] + match self { + Self::One(v) => v.write_xdr(w)?, +Self::Two(v) => v.write_xdr(w)?, +Self::Offer => ().write_xdr(w)?, + }; + Ok(()) + }) + } + } + + #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum TypeVariant { + UnionKey, +Foo, +MyUnion, +MyUnionOne, +MyUnionTwo, + } + + impl TypeVariant { + pub const VARIANTS: [TypeVariant; 5] = [ TypeVariant::UnionKey, +TypeVariant::Foo, +TypeVariant::MyUnion, +TypeVariant::MyUnionOne, +TypeVariant::MyUnionTwo, ]; + pub const VARIANTS_STR: [&'static str; 5] = [ "UnionKey", +"Foo", +"MyUnion", +"MyUnionOne", +"MyUnionTwo", ]; + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::UnionKey => "UnionKey", +Self::Foo => "Foo", +Self::MyUnion => "MyUnion", +Self::MyUnionOne => "MyUnionOne", +Self::MyUnionTwo => "MyUnionTwo", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 5] { + Self::VARIANTS + } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::UnionKey => gen.into_root_schema_for::(), +Self::Foo => gen.into_root_schema_for::(), +Self::MyUnion => gen.into_root_schema_for::(), +Self::MyUnionOne => gen.into_root_schema_for::(), +Self::MyUnionTwo => gen.into_root_schema_for::(), + } + } + } + + impl Name for TypeVariant { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for TypeVariant { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl core::str::FromStr for TypeVariant { + type Err = Error; + #[allow(clippy::too_many_lines)] + fn from_str(s: &str) -> Result { + match s { + "UnionKey" => Ok(Self::UnionKey), +"Foo" => Ok(Self::Foo), +"MyUnion" => Ok(Self::MyUnion), +"MyUnionOne" => Ok(Self::MyUnionOne), +"MyUnionTwo" => Ok(Self::MyUnionTwo), + _ => Err(Error::Invalid), + } + } + } + + #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case"), + serde(untagged), + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum Type { + UnionKey(Box), +Foo(Box), +MyUnion(Box), +MyUnionOne(Box), +MyUnionTwo(Box), + } + + impl Type { + pub const VARIANTS: [TypeVariant; 5] = [ TypeVariant::UnionKey, +TypeVariant::Foo, +TypeVariant::MyUnion, +TypeVariant::MyUnionOne, +TypeVariant::MyUnionTwo, ]; + pub const VARIANTS_STR: [&'static str; 5] = [ "UnionKey", +"Foo", +"MyUnion", +"MyUnionOne", +"MyUnionTwo", ]; + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr(v: TypeVariant, r: &mut Limited) -> Result { + match v { + TypeVariant::UnionKey => r.with_limited_depth(|r| Ok(Self::UnionKey(Box::new(UnionKey::read_xdr(r)?)))), +TypeVariant::Foo => r.with_limited_depth(|r| Ok(Self::Foo(Box::new(Foo::read_xdr(r)?)))), +TypeVariant::MyUnion => r.with_limited_depth(|r| Ok(Self::MyUnion(Box::new(MyUnion::read_xdr(r)?)))), +TypeVariant::MyUnionOne => r.with_limited_depth(|r| Ok(Self::MyUnionOne(Box::new(MyUnionOne::read_xdr(r)?)))), +TypeVariant::MyUnionTwo => r.with_limited_depth(|r| Ok(Self::MyUnionTwo(Box::new(MyUnionTwo::read_xdr(r)?)))), + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + pub fn read_xdr_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let s = Self::read_xdr(v, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::UnionKey => Box::new(ReadXdrIter::<_, UnionKey>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::UnionKey(Box::new(t))))), +TypeVariant::Foo => Box::new(ReadXdrIter::<_, Foo>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Foo(Box::new(t))))), +TypeVariant::MyUnion => Box::new(ReadXdrIter::<_, MyUnion>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyUnion(Box::new(t))))), +TypeVariant::MyUnionOne => Box::new(ReadXdrIter::<_, MyUnionOne>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyUnionOne(Box::new(t))))), +TypeVariant::MyUnionTwo => Box::new(ReadXdrIter::<_, MyUnionTwo>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyUnionTwo(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_framed_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::UnionKey => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::UnionKey(Box::new(t.0))))), +TypeVariant::Foo => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Foo(Box::new(t.0))))), +TypeVariant::MyUnion => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyUnion(Box::new(t.0))))), +TypeVariant::MyUnionOne => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyUnionOne(Box::new(t.0))))), +TypeVariant::MyUnionTwo => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyUnionTwo(Box::new(t.0))))), + } + } + + #[cfg(feature = "base64")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_base64_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + match v { + TypeVariant::UnionKey => Box::new(ReadXdrIter::<_, UnionKey>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::UnionKey(Box::new(t))))), +TypeVariant::Foo => Box::new(ReadXdrIter::<_, Foo>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Foo(Box::new(t))))), +TypeVariant::MyUnion => Box::new(ReadXdrIter::<_, MyUnion>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::MyUnion(Box::new(t))))), +TypeVariant::MyUnionOne => Box::new(ReadXdrIter::<_, MyUnionOne>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::MyUnionOne(Box::new(t))))), +TypeVariant::MyUnionTwo => Box::new(ReadXdrIter::<_, MyUnionTwo>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::MyUnionTwo(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + pub fn from_xdr>(v: TypeVariant, bytes: B, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(v, &mut cursor)?; + Ok(t) + } + + #[cfg(feature = "base64")] + pub fn from_xdr_base64(v: TypeVariant, b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), limits); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(all(feature = "std", feature = "serde_json"))] + #[allow(clippy::too_many_lines)] + pub fn read_json(v: TypeVariant, r: impl Read) -> Result { + match v { + TypeVariant::UnionKey => Ok(Self::UnionKey(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Foo => Ok(Self::Foo(Box::new(serde_json::from_reader(r)?))), +TypeVariant::MyUnion => Ok(Self::MyUnion(Box::new(serde_json::from_reader(r)?))), +TypeVariant::MyUnionOne => Ok(Self::MyUnionOne(Box::new(serde_json::from_reader(r)?))), +TypeVariant::MyUnionTwo => Ok(Self::MyUnionTwo(Box::new(serde_json::from_reader(r)?))), + } + } + + #[cfg(feature = "alloc")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn value(&self) -> &dyn core::any::Any { + #[allow(clippy::match_same_arms)] + match self { + Self::UnionKey(ref v) => v.as_ref(), +Self::Foo(ref v) => v.as_ref(), +Self::MyUnion(ref v) => v.as_ref(), +Self::MyUnionOne(ref v) => v.as_ref(), +Self::MyUnionTwo(ref v) => v.as_ref(), + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::UnionKey(_) => "UnionKey", +Self::Foo(_) => "Foo", +Self::MyUnion(_) => "MyUnion", +Self::MyUnionOne(_) => "MyUnionOne", +Self::MyUnionTwo(_) => "MyUnionTwo", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 5] { + Self::VARIANTS + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variant(&self) -> TypeVariant { + match self { + Self::UnionKey(_) => TypeVariant::UnionKey, +Self::Foo(_) => TypeVariant::Foo, +Self::MyUnion(_) => TypeVariant::MyUnion, +Self::MyUnionOne(_) => TypeVariant::MyUnionOne, +Self::MyUnionTwo(_) => TypeVariant::MyUnionTwo, + } + } + } + + impl Name for Type { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for Type { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl WriteXdr for Type { + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + match self { + Self::UnionKey(v) => v.write_xdr(w), +Self::Foo(v) => v.write_xdr(w), +Self::MyUnion(v) => v.write_xdr(w), +Self::MyUnionOne(v) => v.write_xdr(w), +Self::MyUnionTwo(v) => v.write_xdr(w), + } + } + } diff --git a/spec/output/generator_spec_rust_custom_jsonschema_impls/optional.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_jsonschema_impls/optional.x/MyXDR.rs new file mode 100644 index 000000000..9e34e30c8 --- /dev/null +++ b/spec/output/generator_spec_rust_custom_jsonschema_impls/optional.x/MyXDR.rs @@ -0,0 +1,3067 @@ +// Module is generated from: +// spec/fixtures/generator/optional.x + +#![allow(clippy::missing_errors_doc, clippy::unreadable_literal)] + +/// `XDR_FILES_SHA256` is a list of pairs of source files and their SHA256 hashes. +pub const XDR_FILES_SHA256: [(&str, &str); 1] = [ + ("spec/fixtures/generator/optional.x", "3241e832fcf00bca4315ecb6c259621dafb0e302a63a993f5504b0b5cebb6bd7") +]; + +use core::{array::TryFromSliceError, fmt, fmt::Debug, marker::Sized, ops::Deref, slice}; + +#[cfg(feature = "std")] +use core::marker::PhantomData; + +// When feature alloc is turned off use static lifetime Box and Vec types. +#[cfg(not(feature = "alloc"))] +mod noalloc { + pub mod boxed { + pub type Box = &'static T; + } + pub mod vec { + pub type Vec = &'static [T]; + } +} +#[cfg(not(feature = "alloc"))] +use noalloc::{boxed::Box, vec::Vec}; + +// When feature std is turned off, but feature alloc is turned on import the +// alloc crate and use its Box and Vec types. +#[cfg(all(not(feature = "std"), feature = "alloc"))] +extern crate alloc; +#[cfg(all(not(feature = "std"), feature = "alloc"))] +use alloc::{ + borrow::ToOwned, + boxed::Box, + string::{FromUtf8Error, String}, + vec::Vec, +}; +#[cfg(feature = "std")] +use std::string::FromUtf8Error; + +#[cfg(feature = "arbitrary")] +use arbitrary::Arbitrary; + +// TODO: Add support for read/write xdr fns when std not available. + +#[cfg(feature = "std")] +use std::{ + error, io, + io::{BufRead, BufReader, Cursor, Read, Write}, +}; + +/// Error contains all errors returned by functions in this crate. It can be +/// compared via `PartialEq`, however any contained IO errors will only be +/// compared on their `ErrorKind`. +#[derive(Debug)] +pub enum Error { + Invalid, + Unsupported, + LengthExceedsMax, + LengthMismatch, + NonZeroPadding, + Utf8Error(core::str::Utf8Error), + #[cfg(feature = "alloc")] + InvalidHex, + #[cfg(feature = "std")] + Io(io::Error), + DepthLimitExceeded, + #[cfg(feature = "serde_json")] + Json(serde_json::Error), + LengthLimitExceeded, +} + +impl PartialEq for Error { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Utf8Error(l), Self::Utf8Error(r)) => l == r, + // IO errors cannot be compared, but in the absence of any more + // meaningful way to compare the errors we compare the kind of error + // and ignore the embedded source error or OS error. The main use + // case for comparing errors outputted by the XDR library is for + // error case testing, and a lack of the ability to compare has a + // detrimental affect on failure testing, so this is a tradeoff. + #[cfg(feature = "std")] + (Self::Io(l), Self::Io(r)) => l.kind() == r.kind(), + _ => core::mem::discriminant(self) == core::mem::discriminant(other), + } + } +} + +#[cfg(feature = "std")] +impl error::Error for Error { + #[must_use] + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Self::Io(e) => Some(e), + #[cfg(feature = "serde_json")] + Self::Json(e) => Some(e), + _ => None, + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Error::Invalid => write!(f, "xdr value invalid"), + Error::Unsupported => write!(f, "xdr value unsupported"), + Error::LengthExceedsMax => write!(f, "xdr value max length exceeded"), + Error::LengthMismatch => write!(f, "xdr value length does not match"), + Error::NonZeroPadding => write!(f, "xdr padding contains non-zero bytes"), + Error::Utf8Error(e) => write!(f, "{e}"), + #[cfg(feature = "alloc")] + Error::InvalidHex => write!(f, "hex invalid"), + #[cfg(feature = "std")] + Error::Io(e) => write!(f, "{e}"), + Error::DepthLimitExceeded => write!(f, "depth limit exceeded"), + #[cfg(feature = "serde_json")] + Error::Json(e) => write!(f, "{e}"), + Error::LengthLimitExceeded => write!(f, "length limit exceeded"), + } + } +} + +impl From for Error { + fn from(_: TryFromSliceError) -> Error { + Error::LengthMismatch + } +} + +impl From for Error { + #[must_use] + fn from(e: core::str::Utf8Error) -> Self { + Error::Utf8Error(e) + } +} + +#[cfg(feature = "alloc")] +impl From for Error { + #[must_use] + fn from(e: FromUtf8Error) -> Self { + Error::Utf8Error(e.utf8_error()) + } +} + +#[cfg(feature = "std")] +impl From for Error { + #[must_use] + fn from(e: io::Error) -> Self { + Error::Io(e) + } +} + +#[cfg(feature = "serde_json")] +impl From for Error { + #[must_use] + fn from(e: serde_json::Error) -> Self { + Error::Json(e) + } +} + +impl From for () { + fn from(_: Error) {} +} + +#[allow(dead_code)] +type Result = core::result::Result; + +/// Name defines types that assign a static name to their value, such as the +/// name given to an identifier in an XDR enum, or the name given to the case in +/// a union. +pub trait Name { + fn name(&self) -> &'static str; +} + +/// Discriminant defines types that may contain a one-of value determined +/// according to the discriminant, and exposes the value of the discriminant for +/// that type, such as in an XDR union. +pub trait Discriminant { + fn discriminant(&self) -> D; +} + +/// Iter defines types that have variants that can be iterated. +pub trait Variants { + fn variants() -> slice::Iter<'static, V> + where + V: Sized; +} + +// Enum defines a type that is represented as an XDR enumeration when encoded. +pub trait Enum: Name + Variants + Sized {} + +// Union defines a type that is represented as an XDR union when encoded. +pub trait Union: Name + Discriminant + Variants +where + D: Sized, +{ +} + +/// `Limits` contains the limits that a limited reader or writer will be +/// constrained to. +#[cfg(feature = "std")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Limits { + /// Defines the maximum depth for recursive calls in `Read/WriteXdr` to + /// prevent stack overflow. + /// + /// The depth limit is akin to limiting stack depth. Its purpose is to + /// prevent the program from hitting the maximum stack size allowed by Rust, + /// which would result in an unrecoverable `SIGABRT`. For more information + /// about Rust's stack size limit, refer to the [Rust + /// documentation](https://doc.rust-lang.org/std/thread/#stack-size). + pub depth: u32, + + /// Defines the maximum number of bytes that will be read or written. + pub len: usize, +} + +#[cfg(feature = "std")] +impl Limits { + #[must_use] + pub fn none() -> Self { + Self { + depth: u32::MAX, + len: usize::MAX, + } + } + + #[must_use] + pub fn depth(depth: u32) -> Self { + Limits { + depth, + ..Limits::none() + } + } + + #[must_use] + pub fn len(len: usize) -> Self { + Limits { + len, + ..Limits::none() + } + } +} + +/// `Limited` wraps an object and provides functions for enforcing limits. +/// +/// Intended for use with readers and writers and limiting their reads and +/// writes. +#[cfg(feature = "std")] +pub struct Limited { + pub inner: L, + pub(crate) limits: Limits, +} + +#[cfg(feature = "std")] +impl Limited { + /// Constructs a new `Limited`. + /// + /// - `inner`: The value being limited. + /// - `limits`: The limits to enforce. + pub fn new(inner: L, limits: Limits) -> Self { + Limited { inner, limits } + } + + /// Consume the given length from the internal remaining length limit. + /// + /// ### Errors + /// + /// If the length would consume more length than the remaining length limit + /// allows. + pub(crate) fn consume_len(&mut self, len: usize) -> Result<()> { + if let Some(len) = self.limits.len.checked_sub(len) { + self.limits.len = len; + Ok(()) + } else { + Err(Error::LengthLimitExceeded) + } + } + + /// Consumes a single depth for the duration of the given function. + /// + /// ### Errors + /// + /// If the depth limit is already exhausted. + pub(crate) fn with_limited_depth(&mut self, f: F) -> Result + where + F: FnOnce(&mut Self) -> Result, + { + if let Some(depth) = self.limits.depth.checked_sub(1) { + self.limits.depth = depth; + let res = f(self); + self.limits.depth = self.limits.depth.saturating_add(1); + res + } else { + Err(Error::DepthLimitExceeded) + } + } +} + +#[cfg(feature = "std")] +impl Read for Limited { + /// Forwards the read operation to the wrapped object. + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + self.inner.read(buf) + } +} + +#[cfg(feature = "std")] +impl BufRead for Limited { + /// Forwards the read operation to the wrapped object. + fn fill_buf(&mut self) -> std::io::Result<&[u8]> { + self.inner.fill_buf() + } + + /// Forwards the read operation to the wrapped object. + fn consume(&mut self, amt: usize) { + self.inner.consume(amt); + } +} + +#[cfg(feature = "std")] +impl Write for Limited { + /// Forwards the write operation to the wrapped object. + fn write(&mut self, buf: &[u8]) -> std::io::Result { + self.inner.write(buf) + } + + /// Forwards the flush operation to the wrapped object. + fn flush(&mut self) -> std::io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "std")] +pub struct ReadXdrIter { + reader: Limited>, + _s: PhantomData, +} + +#[cfg(feature = "std")] +impl ReadXdrIter { + fn new(r: R, limits: Limits) -> Self { + Self { + reader: Limited { + inner: BufReader::new(r), + limits, + }, + _s: PhantomData, + } + } +} + +#[cfg(feature = "std")] +impl Iterator for ReadXdrIter { + type Item = Result; + + // Next reads the internal reader and XDR decodes it into the Self type. If + // the EOF is reached without reading any new bytes `None` is returned. If + // EOF is reached after reading some bytes a truncated entry is assumed an + // an `Error::Io` containing an `UnexpectedEof`. If any other IO error + // occurs it is returned. Iteration of this iterator stops naturally when + // `None` is returned, but not when a `Some(Err(...))` is returned. The + // caller is responsible for checking each Result. + fn next(&mut self) -> Option { + // Try to fill the buffer to see if the EOF has been reached or not. + // This happens to effectively peek to see if the stream has finished + // and there are no more items. It is necessary to do this because the + // xdr types in this crate heavily use the `std::io::Read::read_exact` + // method that doesn't distinguish between an EOF at the beginning of a + // read and an EOF after a partial fill of a read_exact. + match self.reader.fill_buf() { + // If the reader has no more data and is unable to fill any new data + // into its internal buf, then the EOF has been reached. + Ok([]) => return None, + // If an error occurs filling the buffer, treat that as an error and stop. + Err(e) => return Some(Err(Error::Io(e))), + // If there is data in the buf available for reading, continue. + Ok([..]) => (), + }; + // Read the buf into the type. + let r = self.reader.with_limited_depth(|dlr| S::read_xdr(dlr)); + match r { + Ok(s) => Some(Ok(s)), + Err(e) => Some(Err(e)), + } + } +} + +pub trait ReadXdr +where + Self: Sized, +{ + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_to_end`] when the intent is for all bytes in the + /// read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result; + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_to_end(r: &mut Limited) -> Result { + let s = Self::read_xdr(r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64_to_end(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_into_to_end`] when the intent is for all bytes + /// in the read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr_into(&mut self, r: &mut Limited) -> Result<()> { + *self = Self::read_xdr(r)?; + Ok(()) + } + + /// Read the XDR into the existing value, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_into_to_end(&mut self, r: &mut Limited) -> Result<()> { + Self::read_xdr_into(self, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(()) + } else { + Err(Error::Invalid) + } + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_iter(r: &mut Limited) -> ReadXdrIter<&mut R, Self> { + ReadXdrIter::new(&mut r.inner, r.limits.clone()) + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + #[cfg(feature = "base64")] + fn read_xdr_base64_iter( + r: &mut Limited, + ) -> ReadXdrIter, Self> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + ReadXdrIter::new(dec, r.limits.clone()) + } + + /// Construct the type from the XDR bytes. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "std")] + fn from_xdr(bytes: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(&mut cursor)?; + Ok(t) + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn from_xdr_base64(b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), + limits, + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } +} + +pub trait WriteXdr { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()>; + + #[cfg(feature = "std")] + fn to_xdr(&self, limits: Limits) -> Result> { + let mut cursor = Limited::new(Cursor::new(vec![]), limits); + self.write_xdr(&mut cursor)?; + let bytes = cursor.inner.into_inner(); + Ok(bytes) + } + + #[cfg(feature = "base64")] + fn to_xdr_base64(&self, limits: Limits) -> Result { + let mut enc = Limited::new( + base64::write::EncoderStringWriter::new(base64::STANDARD), + limits, + ); + self.write_xdr(&mut enc)?; + let b64 = enc.inner.into_inner(); + Ok(b64) + } +} + +/// `Pad_len` returns the number of bytes to pad an XDR value of the given +/// length to make the final serialized size a multiple of 4. +#[cfg(feature = "std")] +fn pad_len(len: usize) -> usize { + (4 - (len % 4)) % 4 +} + +impl ReadXdr for i32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for i64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for f32 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f32 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for f64 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f64 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for bool { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + let b = i == 1; + Ok(b) + }) + } +} + +impl WriteXdr for bool { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i = u32::from(*self); // true = 1, false = 0 + i.write_xdr(w) + }) + } +} + +impl ReadXdr for Option { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + match i { + 0 => Ok(None), + 1 => { + let t = T::read_xdr(r)?; + Ok(Some(t)) + } + _ => Err(Error::Invalid), + } + }) + } +} + +impl WriteXdr for Option { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + if let Some(t) = self { + 1u32.write_xdr(w)?; + t.write_xdr(w)?; + } else { + 0u32.write_xdr(w)?; + } + Ok(()) + }) + } +} + +impl ReadXdr for Box { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| Ok(Box::new(T::read_xdr(r)?))) + } +} + +impl WriteXdr for Box { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| T::write_xdr(self, w)) + } +} + +impl ReadXdr for () { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + Ok(()) + } +} + +impl WriteXdr for () { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + Ok(()) + } +} + +impl ReadXdr for [u8; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + r.consume_len(N)?; + let padding = pad_len(N); + r.consume_len(padding)?; + let mut arr = [0u8; N]; + r.read_exact(&mut arr)?; + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + Ok(arr) + }) + } +} + +impl WriteXdr for [u8; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + w.consume_len(N)?; + let padding = pad_len(N); + w.consume_len(padding)?; + w.write_all(self)?; + w.write_all(&[0u8; 3][..padding])?; + Ok(()) + }) + } +} + +impl ReadXdr for [T; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let mut vec = Vec::with_capacity(N); + for _ in 0..N { + let t = T::read_xdr(r)?; + vec.push(t); + } + let arr: [T; N] = vec.try_into().unwrap_or_else(|_: Vec| unreachable!()); + Ok(arr) + }) + } +} + +impl WriteXdr for [T; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + for t in self { + t.write_xdr(w)?; + } + Ok(()) + }) + } +} + +// VecM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec) +where + T: 'static; + +impl Deref for VecM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for VecM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + +impl VecM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl VecM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl VecM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl VecM { + #[must_use] + pub fn to_option(&self) -> Option { + if self.len() > 0 { + Some(self.0[0].clone()) + } else { + None + } + } +} + +#[cfg(not(feature = "alloc"))] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.to_option() + } +} + +#[cfg(feature = "alloc")] +impl VecM { + #[must_use] + pub fn into_option(mut self) -> Option { + self.0.drain(..).next() + } +} + +#[cfg(feature = "alloc")] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.into_option() + } +} + +impl TryFrom> for VecM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: VecM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&VecM> for Vec { + #[must_use] + fn from(v: &VecM) -> Self { + v.0.clone() + } +} + +impl AsRef> for VecM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for VecM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T]> for VecM { + type Error = Error; + + fn try_from(v: &[T]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[T]> for VecM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[T; N]> for VecM { + type Error = Error; + + fn try_from(v: [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [T; N] { + type Error = VecM; + + fn try_from(v: VecM) -> core::result::Result { + let s: [T; N] = v.0.try_into().map_err(|v: Vec| VecM::(v))?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T; N]> for VecM { + type Error = Error; + + fn try_from(v: &[T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [T; N]> for VecM { + type Error = Error; + + fn try_from(v: &'static [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for VecM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for VecM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: VecM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&VecM> for String { + type Error = Error; + + fn try_from(v: &VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for VecM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for VecM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a VecM> for &'a str { + type Error = Error; + + fn try_from(v: &'a VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + let mut vec = Vec::new(); + for _ in 0..len { + let t = T::read_xdr(r)?; + vec.push(t); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + for t in &self.0 { + t.write_xdr(w)?; + } + + Ok(()) + }) + } +} + +// BytesM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +impl core::fmt::Display for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in v { + write!(f, "{b:02x}")?; + } + Ok(()) + } +} + +impl core::fmt::Debug for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "BytesM(")?; + for b in v { + write!(f, "{b:02x}")?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for BytesM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() + } +} + +impl Deref for BytesM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl Default for BytesM { + fn default() -> Self { + Self(Vec::default()) + } +} + +impl BytesM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl BytesM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl BytesM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for BytesM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: BytesM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&BytesM> for Vec { + #[must_use] + fn from(v: &BytesM) -> Self { + v.0.clone() + } +} + +impl AsRef> for BytesM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for BytesM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for BytesM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = BytesM; + + fn try_from(v: BytesM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(BytesM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for BytesM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for BytesM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: BytesM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&BytesM> for String { + type Error = Error; + + fn try_from(v: &BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for BytesM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for BytesM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a BytesM> for &'a str { + type Error = Error; + + fn try_from(v: &'a BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for BytesM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(BytesM(vec)) + }) + } +} + +impl WriteXdr for BytesM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..pad_len(len as usize)])?; + + Ok(()) + }) + } +} + +// StringM ------------------------------------------------------------------------ + +/// A string type that contains arbitrary bytes. +/// +/// Convertible, fallibly, to/from a Rust UTF-8 String using +/// [`TryFrom`]/[`TryInto`]/[`StringM::to_utf8_string`]. +/// +/// Convertible, lossyly, to a Rust UTF-8 String using +/// [`StringM::to_utf8_string_lossy`]. +/// +/// Convertible to/from escaped printable-ASCII using +/// [`Display`]/[`ToString`]/[`FromStr`]. + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +impl core::fmt::Display for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + Ok(()) + } +} + +impl core::fmt::Debug for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "StringM(")?; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for StringM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + let b = escape_bytes::unescape(s.as_bytes()).map_err(|_| Error::Invalid)?; + Ok(Self(b)) + } +} + +impl Deref for StringM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for StringM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl StringM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl StringM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl StringM { + #[cfg(feature = "alloc")] + pub fn to_utf8_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_utf8_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_utf8_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_utf8_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for StringM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: StringM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&StringM> for Vec { + #[must_use] + fn from(v: &StringM) -> Self { + v.0.clone() + } +} + +impl AsRef> for StringM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for StringM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for StringM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for StringM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = StringM; + + fn try_from(v: StringM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(StringM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for StringM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for StringM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: StringM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&StringM> for String { + type Error = Error; + + fn try_from(v: &StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for StringM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for StringM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a StringM> for &'a str { + type Error = Error; + + fn try_from(v: &'a StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for StringM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(StringM(vec)) + }) + } +} + +impl WriteXdr for StringM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +// Frame ------------------------------------------------------------------------ + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") +)] +pub struct Frame(pub T) +where + T: ReadXdr; + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + +impl ReadXdr for Frame +where + T: ReadXdr, +{ + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + // Read the frame header value that contains 1 flag-bit and a 33-bit length. + // - The 1 flag bit is 0 when there are more frames for the same record. + // - The 31-bit length is the length of the bytes within the frame that + // follow the frame header. + let header = u32::read_xdr(r)?; + // TODO: Use the length and cap the length we'll read from `r`. + let last_record = header >> 31 == 1; + if last_record { + // Read the record in the frame. + Ok(Self(T::read_xdr(r)?)) + } else { + // TODO: Support reading those additional frames for the same + // record. + Err(Error::Unsupported) + } + } +} + +#[cfg(all(test, feature = "std"))] +mod tests { + use std::io::Cursor; + + use super::*; + + #[test] + pub fn vec_u8_read_without_padding() { + let buf = Cursor::new(vec![0, 0, 0, 4, 2, 2, 2, 2]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_read_with_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0, 0]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2]); + } + + #[test] + pub fn vec_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn vec_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 3, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn vec_u8_write_without_padding() { + let mut buf = vec![]; + let v: VecM = vec![2, 2, 2, 2].try_into().unwrap(); + + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 4, 2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_write_with_padding() { + let mut buf = vec![]; + let v: VecM = vec![2].try_into().unwrap(); + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 1, 2, 0, 0, 0]); + } + + #[test] + pub fn arr_u8_read_without_padding() { + let buf = Cursor::new(vec![2, 2, 2, 2]); + let v = <[u8; 4]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_read_with_padding() { + let buf = Cursor::new(vec![2, 0, 0, 0]); + let v = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2]); + } + + #[test] + pub fn arr_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![2, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn arr_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![2, 3, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn arr_u8_write_without_padding() { + let mut buf = vec![]; + [2u8, 2, 2, 2] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_write_with_padding() { + let mut buf = vec![]; + [2u8] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 0, 0, 0]); + } +} + +#[cfg(all(test, feature = "std"))] +mod test { + use super::*; + + #[test] + fn into_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.into_option(), None); + } + + #[test] + fn into_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.into_option(), Some(1)); + } + + #[test] + fn to_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } + + #[test] + fn depth_limited_read_write_under_the_limit_success() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(4)); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::depth(4)); + let a_back: Option>> = ReadXdr::read_xdr(&mut dlr).unwrap(); + assert_eq!(a, a_back); + } + + #[test] + fn write_over_depth_limit_fail() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(3)); + let res = a.write_xdr(&mut buf); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn read_over_depth_limit_fail() { + let read_limits = Limits::depth(3); + let write_limits = Limits::depth(5); + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), write_limits); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), read_limits); + let res: Result>>> = ReadXdr::read_xdr(&mut dlr); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn length_limited_read_write_i32() { + // Exact limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u32() { + // Exact limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_i64() { + // Exact limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u64() { + // Exact limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bool() { + // Exact limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_option() { + // Exact limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_u8() { + // Exact limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + <[u8; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_type() { + // Exact limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(12)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(13)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(13)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(11)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(11)); + assert_eq!( + <[bool; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_vec() { + // Exact limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(16)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(17)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(17)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(15)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(15)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bytes() { + // Exact limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_string() { + // Exact limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } +} + +#[cfg(all(test, not(feature = "alloc")))] +mod test { + use super::VecM; + + #[test] + fn to_option_none() { + let v: VecM = (&[]).try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = (&[1]).try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } +} + +/// Arr is an XDR Typedef defines as: +/// +/// ```text +/// typedef int Arr[2]; +/// ``` +/// +pub type Arr = [i32; 2]; + +/// HasOptions is an XDR Struct defines as: +/// +/// ```text +/// struct HasOptions +/// { +/// int* firstOption; +/// int *secondOption; +/// Arr *thirdOption; +/// }; +/// ``` +/// +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +pub struct HasOptions { + pub first_option: Option, + pub second_option: Option, + pub third_option: Option, +} + + impl ReadXdr for HasOptions { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + Ok(Self{ + first_option: Option::::read_xdr(r)?, +second_option: Option::::read_xdr(r)?, +third_option: Option::::read_xdr(r)?, + }) + }) + } + } + + impl WriteXdr for HasOptions { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.first_option.write_xdr(w)?; +self.second_option.write_xdr(w)?; +self.third_option.write_xdr(w)?; + Ok(()) + }) + } + } + + #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum TypeVariant { + Arr, +HasOptions, + } + + impl TypeVariant { + pub const VARIANTS: [TypeVariant; 2] = [ TypeVariant::Arr, +TypeVariant::HasOptions, ]; + pub const VARIANTS_STR: [&'static str; 2] = [ "Arr", +"HasOptions", ]; + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::Arr => "Arr", +Self::HasOptions => "HasOptions", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 2] { + Self::VARIANTS + } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::Arr => gen.into_root_schema_for::(), +Self::HasOptions => gen.into_root_schema_for::(), + } + } + } + + impl Name for TypeVariant { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for TypeVariant { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl core::str::FromStr for TypeVariant { + type Err = Error; + #[allow(clippy::too_many_lines)] + fn from_str(s: &str) -> Result { + match s { + "Arr" => Ok(Self::Arr), +"HasOptions" => Ok(Self::HasOptions), + _ => Err(Error::Invalid), + } + } + } + + #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case"), + serde(untagged), + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum Type { + Arr(Box), +HasOptions(Box), + } + + impl Type { + pub const VARIANTS: [TypeVariant; 2] = [ TypeVariant::Arr, +TypeVariant::HasOptions, ]; + pub const VARIANTS_STR: [&'static str; 2] = [ "Arr", +"HasOptions", ]; + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr(v: TypeVariant, r: &mut Limited) -> Result { + match v { + TypeVariant::Arr => r.with_limited_depth(|r| Ok(Self::Arr(Box::new(Arr::read_xdr(r)?)))), +TypeVariant::HasOptions => r.with_limited_depth(|r| Ok(Self::HasOptions(Box::new(HasOptions::read_xdr(r)?)))), + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + pub fn read_xdr_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let s = Self::read_xdr(v, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::Arr => Box::new(ReadXdrIter::<_, Arr>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Arr(Box::new(t))))), +TypeVariant::HasOptions => Box::new(ReadXdrIter::<_, HasOptions>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::HasOptions(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_framed_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::Arr => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Arr(Box::new(t.0))))), +TypeVariant::HasOptions => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::HasOptions(Box::new(t.0))))), + } + } + + #[cfg(feature = "base64")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_base64_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + match v { + TypeVariant::Arr => Box::new(ReadXdrIter::<_, Arr>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Arr(Box::new(t))))), +TypeVariant::HasOptions => Box::new(ReadXdrIter::<_, HasOptions>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::HasOptions(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + pub fn from_xdr>(v: TypeVariant, bytes: B, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(v, &mut cursor)?; + Ok(t) + } + + #[cfg(feature = "base64")] + pub fn from_xdr_base64(v: TypeVariant, b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), limits); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(all(feature = "std", feature = "serde_json"))] + #[allow(clippy::too_many_lines)] + pub fn read_json(v: TypeVariant, r: impl Read) -> Result { + match v { + TypeVariant::Arr => Ok(Self::Arr(Box::new(serde_json::from_reader(r)?))), +TypeVariant::HasOptions => Ok(Self::HasOptions(Box::new(serde_json::from_reader(r)?))), + } + } + + #[cfg(feature = "alloc")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn value(&self) -> &dyn core::any::Any { + #[allow(clippy::match_same_arms)] + match self { + Self::Arr(ref v) => v.as_ref(), +Self::HasOptions(ref v) => v.as_ref(), + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::Arr(_) => "Arr", +Self::HasOptions(_) => "HasOptions", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 2] { + Self::VARIANTS + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variant(&self) -> TypeVariant { + match self { + Self::Arr(_) => TypeVariant::Arr, +Self::HasOptions(_) => TypeVariant::HasOptions, + } + } + } + + impl Name for Type { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for Type { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl WriteXdr for Type { + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + match self { + Self::Arr(v) => v.write_xdr(w), +Self::HasOptions(v) => v.write_xdr(w), + } + } + } diff --git a/spec/output/generator_spec_rust_custom_jsonschema_impls/struct.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_jsonschema_impls/struct.x/MyXDR.rs new file mode 100644 index 000000000..b7db61fc8 --- /dev/null +++ b/spec/output/generator_spec_rust_custom_jsonschema_impls/struct.x/MyXDR.rs @@ -0,0 +1,3075 @@ +// Module is generated from: +// spec/fixtures/generator/struct.x + +#![allow(clippy::missing_errors_doc, clippy::unreadable_literal)] + +/// `XDR_FILES_SHA256` is a list of pairs of source files and their SHA256 hashes. +pub const XDR_FILES_SHA256: [(&str, &str); 1] = [ + ("spec/fixtures/generator/struct.x", "c6911a83390e3b499c078fd0c579132eacce88a4a0538d3b8b5e57747a58db4a") +]; + +use core::{array::TryFromSliceError, fmt, fmt::Debug, marker::Sized, ops::Deref, slice}; + +#[cfg(feature = "std")] +use core::marker::PhantomData; + +// When feature alloc is turned off use static lifetime Box and Vec types. +#[cfg(not(feature = "alloc"))] +mod noalloc { + pub mod boxed { + pub type Box = &'static T; + } + pub mod vec { + pub type Vec = &'static [T]; + } +} +#[cfg(not(feature = "alloc"))] +use noalloc::{boxed::Box, vec::Vec}; + +// When feature std is turned off, but feature alloc is turned on import the +// alloc crate and use its Box and Vec types. +#[cfg(all(not(feature = "std"), feature = "alloc"))] +extern crate alloc; +#[cfg(all(not(feature = "std"), feature = "alloc"))] +use alloc::{ + borrow::ToOwned, + boxed::Box, + string::{FromUtf8Error, String}, + vec::Vec, +}; +#[cfg(feature = "std")] +use std::string::FromUtf8Error; + +#[cfg(feature = "arbitrary")] +use arbitrary::Arbitrary; + +// TODO: Add support for read/write xdr fns when std not available. + +#[cfg(feature = "std")] +use std::{ + error, io, + io::{BufRead, BufReader, Cursor, Read, Write}, +}; + +/// Error contains all errors returned by functions in this crate. It can be +/// compared via `PartialEq`, however any contained IO errors will only be +/// compared on their `ErrorKind`. +#[derive(Debug)] +pub enum Error { + Invalid, + Unsupported, + LengthExceedsMax, + LengthMismatch, + NonZeroPadding, + Utf8Error(core::str::Utf8Error), + #[cfg(feature = "alloc")] + InvalidHex, + #[cfg(feature = "std")] + Io(io::Error), + DepthLimitExceeded, + #[cfg(feature = "serde_json")] + Json(serde_json::Error), + LengthLimitExceeded, +} + +impl PartialEq for Error { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Utf8Error(l), Self::Utf8Error(r)) => l == r, + // IO errors cannot be compared, but in the absence of any more + // meaningful way to compare the errors we compare the kind of error + // and ignore the embedded source error or OS error. The main use + // case for comparing errors outputted by the XDR library is for + // error case testing, and a lack of the ability to compare has a + // detrimental affect on failure testing, so this is a tradeoff. + #[cfg(feature = "std")] + (Self::Io(l), Self::Io(r)) => l.kind() == r.kind(), + _ => core::mem::discriminant(self) == core::mem::discriminant(other), + } + } +} + +#[cfg(feature = "std")] +impl error::Error for Error { + #[must_use] + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Self::Io(e) => Some(e), + #[cfg(feature = "serde_json")] + Self::Json(e) => Some(e), + _ => None, + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Error::Invalid => write!(f, "xdr value invalid"), + Error::Unsupported => write!(f, "xdr value unsupported"), + Error::LengthExceedsMax => write!(f, "xdr value max length exceeded"), + Error::LengthMismatch => write!(f, "xdr value length does not match"), + Error::NonZeroPadding => write!(f, "xdr padding contains non-zero bytes"), + Error::Utf8Error(e) => write!(f, "{e}"), + #[cfg(feature = "alloc")] + Error::InvalidHex => write!(f, "hex invalid"), + #[cfg(feature = "std")] + Error::Io(e) => write!(f, "{e}"), + Error::DepthLimitExceeded => write!(f, "depth limit exceeded"), + #[cfg(feature = "serde_json")] + Error::Json(e) => write!(f, "{e}"), + Error::LengthLimitExceeded => write!(f, "length limit exceeded"), + } + } +} + +impl From for Error { + fn from(_: TryFromSliceError) -> Error { + Error::LengthMismatch + } +} + +impl From for Error { + #[must_use] + fn from(e: core::str::Utf8Error) -> Self { + Error::Utf8Error(e) + } +} + +#[cfg(feature = "alloc")] +impl From for Error { + #[must_use] + fn from(e: FromUtf8Error) -> Self { + Error::Utf8Error(e.utf8_error()) + } +} + +#[cfg(feature = "std")] +impl From for Error { + #[must_use] + fn from(e: io::Error) -> Self { + Error::Io(e) + } +} + +#[cfg(feature = "serde_json")] +impl From for Error { + #[must_use] + fn from(e: serde_json::Error) -> Self { + Error::Json(e) + } +} + +impl From for () { + fn from(_: Error) {} +} + +#[allow(dead_code)] +type Result = core::result::Result; + +/// Name defines types that assign a static name to their value, such as the +/// name given to an identifier in an XDR enum, or the name given to the case in +/// a union. +pub trait Name { + fn name(&self) -> &'static str; +} + +/// Discriminant defines types that may contain a one-of value determined +/// according to the discriminant, and exposes the value of the discriminant for +/// that type, such as in an XDR union. +pub trait Discriminant { + fn discriminant(&self) -> D; +} + +/// Iter defines types that have variants that can be iterated. +pub trait Variants { + fn variants() -> slice::Iter<'static, V> + where + V: Sized; +} + +// Enum defines a type that is represented as an XDR enumeration when encoded. +pub trait Enum: Name + Variants + Sized {} + +// Union defines a type that is represented as an XDR union when encoded. +pub trait Union: Name + Discriminant + Variants +where + D: Sized, +{ +} + +/// `Limits` contains the limits that a limited reader or writer will be +/// constrained to. +#[cfg(feature = "std")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Limits { + /// Defines the maximum depth for recursive calls in `Read/WriteXdr` to + /// prevent stack overflow. + /// + /// The depth limit is akin to limiting stack depth. Its purpose is to + /// prevent the program from hitting the maximum stack size allowed by Rust, + /// which would result in an unrecoverable `SIGABRT`. For more information + /// about Rust's stack size limit, refer to the [Rust + /// documentation](https://doc.rust-lang.org/std/thread/#stack-size). + pub depth: u32, + + /// Defines the maximum number of bytes that will be read or written. + pub len: usize, +} + +#[cfg(feature = "std")] +impl Limits { + #[must_use] + pub fn none() -> Self { + Self { + depth: u32::MAX, + len: usize::MAX, + } + } + + #[must_use] + pub fn depth(depth: u32) -> Self { + Limits { + depth, + ..Limits::none() + } + } + + #[must_use] + pub fn len(len: usize) -> Self { + Limits { + len, + ..Limits::none() + } + } +} + +/// `Limited` wraps an object and provides functions for enforcing limits. +/// +/// Intended for use with readers and writers and limiting their reads and +/// writes. +#[cfg(feature = "std")] +pub struct Limited { + pub inner: L, + pub(crate) limits: Limits, +} + +#[cfg(feature = "std")] +impl Limited { + /// Constructs a new `Limited`. + /// + /// - `inner`: The value being limited. + /// - `limits`: The limits to enforce. + pub fn new(inner: L, limits: Limits) -> Self { + Limited { inner, limits } + } + + /// Consume the given length from the internal remaining length limit. + /// + /// ### Errors + /// + /// If the length would consume more length than the remaining length limit + /// allows. + pub(crate) fn consume_len(&mut self, len: usize) -> Result<()> { + if let Some(len) = self.limits.len.checked_sub(len) { + self.limits.len = len; + Ok(()) + } else { + Err(Error::LengthLimitExceeded) + } + } + + /// Consumes a single depth for the duration of the given function. + /// + /// ### Errors + /// + /// If the depth limit is already exhausted. + pub(crate) fn with_limited_depth(&mut self, f: F) -> Result + where + F: FnOnce(&mut Self) -> Result, + { + if let Some(depth) = self.limits.depth.checked_sub(1) { + self.limits.depth = depth; + let res = f(self); + self.limits.depth = self.limits.depth.saturating_add(1); + res + } else { + Err(Error::DepthLimitExceeded) + } + } +} + +#[cfg(feature = "std")] +impl Read for Limited { + /// Forwards the read operation to the wrapped object. + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + self.inner.read(buf) + } +} + +#[cfg(feature = "std")] +impl BufRead for Limited { + /// Forwards the read operation to the wrapped object. + fn fill_buf(&mut self) -> std::io::Result<&[u8]> { + self.inner.fill_buf() + } + + /// Forwards the read operation to the wrapped object. + fn consume(&mut self, amt: usize) { + self.inner.consume(amt); + } +} + +#[cfg(feature = "std")] +impl Write for Limited { + /// Forwards the write operation to the wrapped object. + fn write(&mut self, buf: &[u8]) -> std::io::Result { + self.inner.write(buf) + } + + /// Forwards the flush operation to the wrapped object. + fn flush(&mut self) -> std::io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "std")] +pub struct ReadXdrIter { + reader: Limited>, + _s: PhantomData, +} + +#[cfg(feature = "std")] +impl ReadXdrIter { + fn new(r: R, limits: Limits) -> Self { + Self { + reader: Limited { + inner: BufReader::new(r), + limits, + }, + _s: PhantomData, + } + } +} + +#[cfg(feature = "std")] +impl Iterator for ReadXdrIter { + type Item = Result; + + // Next reads the internal reader and XDR decodes it into the Self type. If + // the EOF is reached without reading any new bytes `None` is returned. If + // EOF is reached after reading some bytes a truncated entry is assumed an + // an `Error::Io` containing an `UnexpectedEof`. If any other IO error + // occurs it is returned. Iteration of this iterator stops naturally when + // `None` is returned, but not when a `Some(Err(...))` is returned. The + // caller is responsible for checking each Result. + fn next(&mut self) -> Option { + // Try to fill the buffer to see if the EOF has been reached or not. + // This happens to effectively peek to see if the stream has finished + // and there are no more items. It is necessary to do this because the + // xdr types in this crate heavily use the `std::io::Read::read_exact` + // method that doesn't distinguish between an EOF at the beginning of a + // read and an EOF after a partial fill of a read_exact. + match self.reader.fill_buf() { + // If the reader has no more data and is unable to fill any new data + // into its internal buf, then the EOF has been reached. + Ok([]) => return None, + // If an error occurs filling the buffer, treat that as an error and stop. + Err(e) => return Some(Err(Error::Io(e))), + // If there is data in the buf available for reading, continue. + Ok([..]) => (), + }; + // Read the buf into the type. + let r = self.reader.with_limited_depth(|dlr| S::read_xdr(dlr)); + match r { + Ok(s) => Some(Ok(s)), + Err(e) => Some(Err(e)), + } + } +} + +pub trait ReadXdr +where + Self: Sized, +{ + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_to_end`] when the intent is for all bytes in the + /// read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result; + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_to_end(r: &mut Limited) -> Result { + let s = Self::read_xdr(r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64_to_end(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_into_to_end`] when the intent is for all bytes + /// in the read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr_into(&mut self, r: &mut Limited) -> Result<()> { + *self = Self::read_xdr(r)?; + Ok(()) + } + + /// Read the XDR into the existing value, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_into_to_end(&mut self, r: &mut Limited) -> Result<()> { + Self::read_xdr_into(self, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(()) + } else { + Err(Error::Invalid) + } + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_iter(r: &mut Limited) -> ReadXdrIter<&mut R, Self> { + ReadXdrIter::new(&mut r.inner, r.limits.clone()) + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + #[cfg(feature = "base64")] + fn read_xdr_base64_iter( + r: &mut Limited, + ) -> ReadXdrIter, Self> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + ReadXdrIter::new(dec, r.limits.clone()) + } + + /// Construct the type from the XDR bytes. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "std")] + fn from_xdr(bytes: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(&mut cursor)?; + Ok(t) + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn from_xdr_base64(b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), + limits, + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } +} + +pub trait WriteXdr { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()>; + + #[cfg(feature = "std")] + fn to_xdr(&self, limits: Limits) -> Result> { + let mut cursor = Limited::new(Cursor::new(vec![]), limits); + self.write_xdr(&mut cursor)?; + let bytes = cursor.inner.into_inner(); + Ok(bytes) + } + + #[cfg(feature = "base64")] + fn to_xdr_base64(&self, limits: Limits) -> Result { + let mut enc = Limited::new( + base64::write::EncoderStringWriter::new(base64::STANDARD), + limits, + ); + self.write_xdr(&mut enc)?; + let b64 = enc.inner.into_inner(); + Ok(b64) + } +} + +/// `Pad_len` returns the number of bytes to pad an XDR value of the given +/// length to make the final serialized size a multiple of 4. +#[cfg(feature = "std")] +fn pad_len(len: usize) -> usize { + (4 - (len % 4)) % 4 +} + +impl ReadXdr for i32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for i64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for f32 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f32 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for f64 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f64 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for bool { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + let b = i == 1; + Ok(b) + }) + } +} + +impl WriteXdr for bool { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i = u32::from(*self); // true = 1, false = 0 + i.write_xdr(w) + }) + } +} + +impl ReadXdr for Option { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + match i { + 0 => Ok(None), + 1 => { + let t = T::read_xdr(r)?; + Ok(Some(t)) + } + _ => Err(Error::Invalid), + } + }) + } +} + +impl WriteXdr for Option { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + if let Some(t) = self { + 1u32.write_xdr(w)?; + t.write_xdr(w)?; + } else { + 0u32.write_xdr(w)?; + } + Ok(()) + }) + } +} + +impl ReadXdr for Box { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| Ok(Box::new(T::read_xdr(r)?))) + } +} + +impl WriteXdr for Box { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| T::write_xdr(self, w)) + } +} + +impl ReadXdr for () { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + Ok(()) + } +} + +impl WriteXdr for () { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + Ok(()) + } +} + +impl ReadXdr for [u8; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + r.consume_len(N)?; + let padding = pad_len(N); + r.consume_len(padding)?; + let mut arr = [0u8; N]; + r.read_exact(&mut arr)?; + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + Ok(arr) + }) + } +} + +impl WriteXdr for [u8; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + w.consume_len(N)?; + let padding = pad_len(N); + w.consume_len(padding)?; + w.write_all(self)?; + w.write_all(&[0u8; 3][..padding])?; + Ok(()) + }) + } +} + +impl ReadXdr for [T; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let mut vec = Vec::with_capacity(N); + for _ in 0..N { + let t = T::read_xdr(r)?; + vec.push(t); + } + let arr: [T; N] = vec.try_into().unwrap_or_else(|_: Vec| unreachable!()); + Ok(arr) + }) + } +} + +impl WriteXdr for [T; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + for t in self { + t.write_xdr(w)?; + } + Ok(()) + }) + } +} + +// VecM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec) +where + T: 'static; + +impl Deref for VecM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for VecM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + +impl VecM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl VecM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl VecM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl VecM { + #[must_use] + pub fn to_option(&self) -> Option { + if self.len() > 0 { + Some(self.0[0].clone()) + } else { + None + } + } +} + +#[cfg(not(feature = "alloc"))] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.to_option() + } +} + +#[cfg(feature = "alloc")] +impl VecM { + #[must_use] + pub fn into_option(mut self) -> Option { + self.0.drain(..).next() + } +} + +#[cfg(feature = "alloc")] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.into_option() + } +} + +impl TryFrom> for VecM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: VecM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&VecM> for Vec { + #[must_use] + fn from(v: &VecM) -> Self { + v.0.clone() + } +} + +impl AsRef> for VecM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for VecM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T]> for VecM { + type Error = Error; + + fn try_from(v: &[T]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[T]> for VecM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[T; N]> for VecM { + type Error = Error; + + fn try_from(v: [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [T; N] { + type Error = VecM; + + fn try_from(v: VecM) -> core::result::Result { + let s: [T; N] = v.0.try_into().map_err(|v: Vec| VecM::(v))?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T; N]> for VecM { + type Error = Error; + + fn try_from(v: &[T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [T; N]> for VecM { + type Error = Error; + + fn try_from(v: &'static [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for VecM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for VecM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: VecM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&VecM> for String { + type Error = Error; + + fn try_from(v: &VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for VecM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for VecM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a VecM> for &'a str { + type Error = Error; + + fn try_from(v: &'a VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + let mut vec = Vec::new(); + for _ in 0..len { + let t = T::read_xdr(r)?; + vec.push(t); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + for t in &self.0 { + t.write_xdr(w)?; + } + + Ok(()) + }) + } +} + +// BytesM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +impl core::fmt::Display for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in v { + write!(f, "{b:02x}")?; + } + Ok(()) + } +} + +impl core::fmt::Debug for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "BytesM(")?; + for b in v { + write!(f, "{b:02x}")?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for BytesM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() + } +} + +impl Deref for BytesM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl Default for BytesM { + fn default() -> Self { + Self(Vec::default()) + } +} + +impl BytesM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl BytesM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl BytesM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for BytesM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: BytesM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&BytesM> for Vec { + #[must_use] + fn from(v: &BytesM) -> Self { + v.0.clone() + } +} + +impl AsRef> for BytesM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for BytesM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for BytesM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = BytesM; + + fn try_from(v: BytesM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(BytesM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for BytesM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for BytesM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: BytesM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&BytesM> for String { + type Error = Error; + + fn try_from(v: &BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for BytesM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for BytesM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a BytesM> for &'a str { + type Error = Error; + + fn try_from(v: &'a BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for BytesM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(BytesM(vec)) + }) + } +} + +impl WriteXdr for BytesM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..pad_len(len as usize)])?; + + Ok(()) + }) + } +} + +// StringM ------------------------------------------------------------------------ + +/// A string type that contains arbitrary bytes. +/// +/// Convertible, fallibly, to/from a Rust UTF-8 String using +/// [`TryFrom`]/[`TryInto`]/[`StringM::to_utf8_string`]. +/// +/// Convertible, lossyly, to a Rust UTF-8 String using +/// [`StringM::to_utf8_string_lossy`]. +/// +/// Convertible to/from escaped printable-ASCII using +/// [`Display`]/[`ToString`]/[`FromStr`]. + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +impl core::fmt::Display for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + Ok(()) + } +} + +impl core::fmt::Debug for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "StringM(")?; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for StringM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + let b = escape_bytes::unescape(s.as_bytes()).map_err(|_| Error::Invalid)?; + Ok(Self(b)) + } +} + +impl Deref for StringM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for StringM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl StringM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl StringM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl StringM { + #[cfg(feature = "alloc")] + pub fn to_utf8_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_utf8_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_utf8_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_utf8_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for StringM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: StringM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&StringM> for Vec { + #[must_use] + fn from(v: &StringM) -> Self { + v.0.clone() + } +} + +impl AsRef> for StringM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for StringM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for StringM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for StringM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = StringM; + + fn try_from(v: StringM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(StringM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for StringM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for StringM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: StringM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&StringM> for String { + type Error = Error; + + fn try_from(v: &StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for StringM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for StringM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a StringM> for &'a str { + type Error = Error; + + fn try_from(v: &'a StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for StringM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(StringM(vec)) + }) + } +} + +impl WriteXdr for StringM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +// Frame ------------------------------------------------------------------------ + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") +)] +pub struct Frame(pub T) +where + T: ReadXdr; + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + +impl ReadXdr for Frame +where + T: ReadXdr, +{ + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + // Read the frame header value that contains 1 flag-bit and a 33-bit length. + // - The 1 flag bit is 0 when there are more frames for the same record. + // - The 31-bit length is the length of the bytes within the frame that + // follow the frame header. + let header = u32::read_xdr(r)?; + // TODO: Use the length and cap the length we'll read from `r`. + let last_record = header >> 31 == 1; + if last_record { + // Read the record in the frame. + Ok(Self(T::read_xdr(r)?)) + } else { + // TODO: Support reading those additional frames for the same + // record. + Err(Error::Unsupported) + } + } +} + +#[cfg(all(test, feature = "std"))] +mod tests { + use std::io::Cursor; + + use super::*; + + #[test] + pub fn vec_u8_read_without_padding() { + let buf = Cursor::new(vec![0, 0, 0, 4, 2, 2, 2, 2]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_read_with_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0, 0]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2]); + } + + #[test] + pub fn vec_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn vec_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 3, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn vec_u8_write_without_padding() { + let mut buf = vec![]; + let v: VecM = vec![2, 2, 2, 2].try_into().unwrap(); + + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 4, 2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_write_with_padding() { + let mut buf = vec![]; + let v: VecM = vec![2].try_into().unwrap(); + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 1, 2, 0, 0, 0]); + } + + #[test] + pub fn arr_u8_read_without_padding() { + let buf = Cursor::new(vec![2, 2, 2, 2]); + let v = <[u8; 4]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_read_with_padding() { + let buf = Cursor::new(vec![2, 0, 0, 0]); + let v = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2]); + } + + #[test] + pub fn arr_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![2, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn arr_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![2, 3, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn arr_u8_write_without_padding() { + let mut buf = vec![]; + [2u8, 2, 2, 2] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_write_with_padding() { + let mut buf = vec![]; + [2u8] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 0, 0, 0]); + } +} + +#[cfg(all(test, feature = "std"))] +mod test { + use super::*; + + #[test] + fn into_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.into_option(), None); + } + + #[test] + fn into_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.into_option(), Some(1)); + } + + #[test] + fn to_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } + + #[test] + fn depth_limited_read_write_under_the_limit_success() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(4)); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::depth(4)); + let a_back: Option>> = ReadXdr::read_xdr(&mut dlr).unwrap(); + assert_eq!(a, a_back); + } + + #[test] + fn write_over_depth_limit_fail() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(3)); + let res = a.write_xdr(&mut buf); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn read_over_depth_limit_fail() { + let read_limits = Limits::depth(3); + let write_limits = Limits::depth(5); + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), write_limits); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), read_limits); + let res: Result>>> = ReadXdr::read_xdr(&mut dlr); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn length_limited_read_write_i32() { + // Exact limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u32() { + // Exact limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_i64() { + // Exact limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u64() { + // Exact limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bool() { + // Exact limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_option() { + // Exact limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_u8() { + // Exact limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + <[u8; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_type() { + // Exact limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(12)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(13)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(13)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(11)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(11)); + assert_eq!( + <[bool; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_vec() { + // Exact limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(16)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(17)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(17)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(15)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(15)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bytes() { + // Exact limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_string() { + // Exact limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } +} + +#[cfg(all(test, not(feature = "alloc")))] +mod test { + use super::VecM; + + #[test] + fn to_option_none() { + let v: VecM = (&[]).try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = (&[1]).try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } +} + +/// Int64 is an XDR Typedef defines as: +/// +/// ```text +/// typedef hyper int64; +/// ``` +/// +pub type Int64 = i64; + +/// MyStruct is an XDR Struct defines as: +/// +/// ```text +/// struct MyStruct +/// { +/// int someInt; +/// int64 aBigInt; +/// opaque someOpaque[10]; +/// string someString<>; +/// string maxString<100>; +/// }; +/// ``` +/// +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +pub struct MyStruct { + pub some_int: i32, + pub a_big_int: i64, + pub some_opaque: [u8; 10], + pub some_string: StringM, + pub max_string: StringM::<100>, +} + + impl ReadXdr for MyStruct { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + Ok(Self{ + some_int: i32::read_xdr(r)?, +a_big_int: i64::read_xdr(r)?, +some_opaque: <[u8; 10]>::read_xdr(r)?, +some_string: StringM::read_xdr(r)?, +max_string: StringM::<100>::read_xdr(r)?, + }) + }) + } + } + + impl WriteXdr for MyStruct { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.some_int.write_xdr(w)?; +self.a_big_int.write_xdr(w)?; +self.some_opaque.write_xdr(w)?; +self.some_string.write_xdr(w)?; +self.max_string.write_xdr(w)?; + Ok(()) + }) + } + } + + #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum TypeVariant { + Int64, +MyStruct, + } + + impl TypeVariant { + pub const VARIANTS: [TypeVariant; 2] = [ TypeVariant::Int64, +TypeVariant::MyStruct, ]; + pub const VARIANTS_STR: [&'static str; 2] = [ "Int64", +"MyStruct", ]; + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::Int64 => "Int64", +Self::MyStruct => "MyStruct", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 2] { + Self::VARIANTS + } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::Int64 => gen.into_root_schema_for::(), +Self::MyStruct => gen.into_root_schema_for::(), + } + } + } + + impl Name for TypeVariant { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for TypeVariant { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl core::str::FromStr for TypeVariant { + type Err = Error; + #[allow(clippy::too_many_lines)] + fn from_str(s: &str) -> Result { + match s { + "Int64" => Ok(Self::Int64), +"MyStruct" => Ok(Self::MyStruct), + _ => Err(Error::Invalid), + } + } + } + + #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case"), + serde(untagged), + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum Type { + Int64(Box), +MyStruct(Box), + } + + impl Type { + pub const VARIANTS: [TypeVariant; 2] = [ TypeVariant::Int64, +TypeVariant::MyStruct, ]; + pub const VARIANTS_STR: [&'static str; 2] = [ "Int64", +"MyStruct", ]; + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr(v: TypeVariant, r: &mut Limited) -> Result { + match v { + TypeVariant::Int64 => r.with_limited_depth(|r| Ok(Self::Int64(Box::new(Int64::read_xdr(r)?)))), +TypeVariant::MyStruct => r.with_limited_depth(|r| Ok(Self::MyStruct(Box::new(MyStruct::read_xdr(r)?)))), + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + pub fn read_xdr_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let s = Self::read_xdr(v, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::Int64 => Box::new(ReadXdrIter::<_, Int64>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Int64(Box::new(t))))), +TypeVariant::MyStruct => Box::new(ReadXdrIter::<_, MyStruct>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyStruct(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_framed_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::Int64 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Int64(Box::new(t.0))))), +TypeVariant::MyStruct => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyStruct(Box::new(t.0))))), + } + } + + #[cfg(feature = "base64")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_base64_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + match v { + TypeVariant::Int64 => Box::new(ReadXdrIter::<_, Int64>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Int64(Box::new(t))))), +TypeVariant::MyStruct => Box::new(ReadXdrIter::<_, MyStruct>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::MyStruct(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + pub fn from_xdr>(v: TypeVariant, bytes: B, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(v, &mut cursor)?; + Ok(t) + } + + #[cfg(feature = "base64")] + pub fn from_xdr_base64(v: TypeVariant, b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), limits); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(all(feature = "std", feature = "serde_json"))] + #[allow(clippy::too_many_lines)] + pub fn read_json(v: TypeVariant, r: impl Read) -> Result { + match v { + TypeVariant::Int64 => Ok(Self::Int64(Box::new(serde_json::from_reader(r)?))), +TypeVariant::MyStruct => Ok(Self::MyStruct(Box::new(serde_json::from_reader(r)?))), + } + } + + #[cfg(feature = "alloc")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn value(&self) -> &dyn core::any::Any { + #[allow(clippy::match_same_arms)] + match self { + Self::Int64(ref v) => v.as_ref(), +Self::MyStruct(ref v) => v.as_ref(), + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::Int64(_) => "Int64", +Self::MyStruct(_) => "MyStruct", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 2] { + Self::VARIANTS + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variant(&self) -> TypeVariant { + match self { + Self::Int64(_) => TypeVariant::Int64, +Self::MyStruct(_) => TypeVariant::MyStruct, + } + } + } + + impl Name for Type { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for Type { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl WriteXdr for Type { + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + match self { + Self::Int64(v) => v.write_xdr(w), +Self::MyStruct(v) => v.write_xdr(w), + } + } + } diff --git a/spec/output/generator_spec_rust_custom_jsonschema_impls/test.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_jsonschema_impls/test.x/MyXDR.rs new file mode 100644 index 000000000..fdc5594db --- /dev/null +++ b/spec/output/generator_spec_rust_custom_jsonschema_impls/test.x/MyXDR.rs @@ -0,0 +1,5088 @@ +// Module is generated from: +// spec/fixtures/generator/test.x + +#![allow(clippy::missing_errors_doc, clippy::unreadable_literal)] + +/// `XDR_FILES_SHA256` is a list of pairs of source files and their SHA256 hashes. +pub const XDR_FILES_SHA256: [(&str, &str); 1] = [ + ("spec/fixtures/generator/test.x", "d29a98a6a3b9bf533a3e6712d928e0bed655e0f462ac4dae810c65d52ca9af41") +]; + +use core::{array::TryFromSliceError, fmt, fmt::Debug, marker::Sized, ops::Deref, slice}; + +#[cfg(feature = "std")] +use core::marker::PhantomData; + +// When feature alloc is turned off use static lifetime Box and Vec types. +#[cfg(not(feature = "alloc"))] +mod noalloc { + pub mod boxed { + pub type Box = &'static T; + } + pub mod vec { + pub type Vec = &'static [T]; + } +} +#[cfg(not(feature = "alloc"))] +use noalloc::{boxed::Box, vec::Vec}; + +// When feature std is turned off, but feature alloc is turned on import the +// alloc crate and use its Box and Vec types. +#[cfg(all(not(feature = "std"), feature = "alloc"))] +extern crate alloc; +#[cfg(all(not(feature = "std"), feature = "alloc"))] +use alloc::{ + borrow::ToOwned, + boxed::Box, + string::{FromUtf8Error, String}, + vec::Vec, +}; +#[cfg(feature = "std")] +use std::string::FromUtf8Error; + +#[cfg(feature = "arbitrary")] +use arbitrary::Arbitrary; + +// TODO: Add support for read/write xdr fns when std not available. + +#[cfg(feature = "std")] +use std::{ + error, io, + io::{BufRead, BufReader, Cursor, Read, Write}, +}; + +/// Error contains all errors returned by functions in this crate. It can be +/// compared via `PartialEq`, however any contained IO errors will only be +/// compared on their `ErrorKind`. +#[derive(Debug)] +pub enum Error { + Invalid, + Unsupported, + LengthExceedsMax, + LengthMismatch, + NonZeroPadding, + Utf8Error(core::str::Utf8Error), + #[cfg(feature = "alloc")] + InvalidHex, + #[cfg(feature = "std")] + Io(io::Error), + DepthLimitExceeded, + #[cfg(feature = "serde_json")] + Json(serde_json::Error), + LengthLimitExceeded, +} + +impl PartialEq for Error { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Utf8Error(l), Self::Utf8Error(r)) => l == r, + // IO errors cannot be compared, but in the absence of any more + // meaningful way to compare the errors we compare the kind of error + // and ignore the embedded source error or OS error. The main use + // case for comparing errors outputted by the XDR library is for + // error case testing, and a lack of the ability to compare has a + // detrimental affect on failure testing, so this is a tradeoff. + #[cfg(feature = "std")] + (Self::Io(l), Self::Io(r)) => l.kind() == r.kind(), + _ => core::mem::discriminant(self) == core::mem::discriminant(other), + } + } +} + +#[cfg(feature = "std")] +impl error::Error for Error { + #[must_use] + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Self::Io(e) => Some(e), + #[cfg(feature = "serde_json")] + Self::Json(e) => Some(e), + _ => None, + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Error::Invalid => write!(f, "xdr value invalid"), + Error::Unsupported => write!(f, "xdr value unsupported"), + Error::LengthExceedsMax => write!(f, "xdr value max length exceeded"), + Error::LengthMismatch => write!(f, "xdr value length does not match"), + Error::NonZeroPadding => write!(f, "xdr padding contains non-zero bytes"), + Error::Utf8Error(e) => write!(f, "{e}"), + #[cfg(feature = "alloc")] + Error::InvalidHex => write!(f, "hex invalid"), + #[cfg(feature = "std")] + Error::Io(e) => write!(f, "{e}"), + Error::DepthLimitExceeded => write!(f, "depth limit exceeded"), + #[cfg(feature = "serde_json")] + Error::Json(e) => write!(f, "{e}"), + Error::LengthLimitExceeded => write!(f, "length limit exceeded"), + } + } +} + +impl From for Error { + fn from(_: TryFromSliceError) -> Error { + Error::LengthMismatch + } +} + +impl From for Error { + #[must_use] + fn from(e: core::str::Utf8Error) -> Self { + Error::Utf8Error(e) + } +} + +#[cfg(feature = "alloc")] +impl From for Error { + #[must_use] + fn from(e: FromUtf8Error) -> Self { + Error::Utf8Error(e.utf8_error()) + } +} + +#[cfg(feature = "std")] +impl From for Error { + #[must_use] + fn from(e: io::Error) -> Self { + Error::Io(e) + } +} + +#[cfg(feature = "serde_json")] +impl From for Error { + #[must_use] + fn from(e: serde_json::Error) -> Self { + Error::Json(e) + } +} + +impl From for () { + fn from(_: Error) {} +} + +#[allow(dead_code)] +type Result = core::result::Result; + +/// Name defines types that assign a static name to their value, such as the +/// name given to an identifier in an XDR enum, or the name given to the case in +/// a union. +pub trait Name { + fn name(&self) -> &'static str; +} + +/// Discriminant defines types that may contain a one-of value determined +/// according to the discriminant, and exposes the value of the discriminant for +/// that type, such as in an XDR union. +pub trait Discriminant { + fn discriminant(&self) -> D; +} + +/// Iter defines types that have variants that can be iterated. +pub trait Variants { + fn variants() -> slice::Iter<'static, V> + where + V: Sized; +} + +// Enum defines a type that is represented as an XDR enumeration when encoded. +pub trait Enum: Name + Variants + Sized {} + +// Union defines a type that is represented as an XDR union when encoded. +pub trait Union: Name + Discriminant + Variants +where + D: Sized, +{ +} + +/// `Limits` contains the limits that a limited reader or writer will be +/// constrained to. +#[cfg(feature = "std")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Limits { + /// Defines the maximum depth for recursive calls in `Read/WriteXdr` to + /// prevent stack overflow. + /// + /// The depth limit is akin to limiting stack depth. Its purpose is to + /// prevent the program from hitting the maximum stack size allowed by Rust, + /// which would result in an unrecoverable `SIGABRT`. For more information + /// about Rust's stack size limit, refer to the [Rust + /// documentation](https://doc.rust-lang.org/std/thread/#stack-size). + pub depth: u32, + + /// Defines the maximum number of bytes that will be read or written. + pub len: usize, +} + +#[cfg(feature = "std")] +impl Limits { + #[must_use] + pub fn none() -> Self { + Self { + depth: u32::MAX, + len: usize::MAX, + } + } + + #[must_use] + pub fn depth(depth: u32) -> Self { + Limits { + depth, + ..Limits::none() + } + } + + #[must_use] + pub fn len(len: usize) -> Self { + Limits { + len, + ..Limits::none() + } + } +} + +/// `Limited` wraps an object and provides functions for enforcing limits. +/// +/// Intended for use with readers and writers and limiting their reads and +/// writes. +#[cfg(feature = "std")] +pub struct Limited { + pub inner: L, + pub(crate) limits: Limits, +} + +#[cfg(feature = "std")] +impl Limited { + /// Constructs a new `Limited`. + /// + /// - `inner`: The value being limited. + /// - `limits`: The limits to enforce. + pub fn new(inner: L, limits: Limits) -> Self { + Limited { inner, limits } + } + + /// Consume the given length from the internal remaining length limit. + /// + /// ### Errors + /// + /// If the length would consume more length than the remaining length limit + /// allows. + pub(crate) fn consume_len(&mut self, len: usize) -> Result<()> { + if let Some(len) = self.limits.len.checked_sub(len) { + self.limits.len = len; + Ok(()) + } else { + Err(Error::LengthLimitExceeded) + } + } + + /// Consumes a single depth for the duration of the given function. + /// + /// ### Errors + /// + /// If the depth limit is already exhausted. + pub(crate) fn with_limited_depth(&mut self, f: F) -> Result + where + F: FnOnce(&mut Self) -> Result, + { + if let Some(depth) = self.limits.depth.checked_sub(1) { + self.limits.depth = depth; + let res = f(self); + self.limits.depth = self.limits.depth.saturating_add(1); + res + } else { + Err(Error::DepthLimitExceeded) + } + } +} + +#[cfg(feature = "std")] +impl Read for Limited { + /// Forwards the read operation to the wrapped object. + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + self.inner.read(buf) + } +} + +#[cfg(feature = "std")] +impl BufRead for Limited { + /// Forwards the read operation to the wrapped object. + fn fill_buf(&mut self) -> std::io::Result<&[u8]> { + self.inner.fill_buf() + } + + /// Forwards the read operation to the wrapped object. + fn consume(&mut self, amt: usize) { + self.inner.consume(amt); + } +} + +#[cfg(feature = "std")] +impl Write for Limited { + /// Forwards the write operation to the wrapped object. + fn write(&mut self, buf: &[u8]) -> std::io::Result { + self.inner.write(buf) + } + + /// Forwards the flush operation to the wrapped object. + fn flush(&mut self) -> std::io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "std")] +pub struct ReadXdrIter { + reader: Limited>, + _s: PhantomData, +} + +#[cfg(feature = "std")] +impl ReadXdrIter { + fn new(r: R, limits: Limits) -> Self { + Self { + reader: Limited { + inner: BufReader::new(r), + limits, + }, + _s: PhantomData, + } + } +} + +#[cfg(feature = "std")] +impl Iterator for ReadXdrIter { + type Item = Result; + + // Next reads the internal reader and XDR decodes it into the Self type. If + // the EOF is reached without reading any new bytes `None` is returned. If + // EOF is reached after reading some bytes a truncated entry is assumed an + // an `Error::Io` containing an `UnexpectedEof`. If any other IO error + // occurs it is returned. Iteration of this iterator stops naturally when + // `None` is returned, but not when a `Some(Err(...))` is returned. The + // caller is responsible for checking each Result. + fn next(&mut self) -> Option { + // Try to fill the buffer to see if the EOF has been reached or not. + // This happens to effectively peek to see if the stream has finished + // and there are no more items. It is necessary to do this because the + // xdr types in this crate heavily use the `std::io::Read::read_exact` + // method that doesn't distinguish between an EOF at the beginning of a + // read and an EOF after a partial fill of a read_exact. + match self.reader.fill_buf() { + // If the reader has no more data and is unable to fill any new data + // into its internal buf, then the EOF has been reached. + Ok([]) => return None, + // If an error occurs filling the buffer, treat that as an error and stop. + Err(e) => return Some(Err(Error::Io(e))), + // If there is data in the buf available for reading, continue. + Ok([..]) => (), + }; + // Read the buf into the type. + let r = self.reader.with_limited_depth(|dlr| S::read_xdr(dlr)); + match r { + Ok(s) => Some(Ok(s)), + Err(e) => Some(Err(e)), + } + } +} + +pub trait ReadXdr +where + Self: Sized, +{ + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_to_end`] when the intent is for all bytes in the + /// read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result; + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_to_end(r: &mut Limited) -> Result { + let s = Self::read_xdr(r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64_to_end(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_into_to_end`] when the intent is for all bytes + /// in the read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr_into(&mut self, r: &mut Limited) -> Result<()> { + *self = Self::read_xdr(r)?; + Ok(()) + } + + /// Read the XDR into the existing value, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_into_to_end(&mut self, r: &mut Limited) -> Result<()> { + Self::read_xdr_into(self, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(()) + } else { + Err(Error::Invalid) + } + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_iter(r: &mut Limited) -> ReadXdrIter<&mut R, Self> { + ReadXdrIter::new(&mut r.inner, r.limits.clone()) + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + #[cfg(feature = "base64")] + fn read_xdr_base64_iter( + r: &mut Limited, + ) -> ReadXdrIter, Self> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + ReadXdrIter::new(dec, r.limits.clone()) + } + + /// Construct the type from the XDR bytes. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "std")] + fn from_xdr(bytes: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(&mut cursor)?; + Ok(t) + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn from_xdr_base64(b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), + limits, + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } +} + +pub trait WriteXdr { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()>; + + #[cfg(feature = "std")] + fn to_xdr(&self, limits: Limits) -> Result> { + let mut cursor = Limited::new(Cursor::new(vec![]), limits); + self.write_xdr(&mut cursor)?; + let bytes = cursor.inner.into_inner(); + Ok(bytes) + } + + #[cfg(feature = "base64")] + fn to_xdr_base64(&self, limits: Limits) -> Result { + let mut enc = Limited::new( + base64::write::EncoderStringWriter::new(base64::STANDARD), + limits, + ); + self.write_xdr(&mut enc)?; + let b64 = enc.inner.into_inner(); + Ok(b64) + } +} + +/// `Pad_len` returns the number of bytes to pad an XDR value of the given +/// length to make the final serialized size a multiple of 4. +#[cfg(feature = "std")] +fn pad_len(len: usize) -> usize { + (4 - (len % 4)) % 4 +} + +impl ReadXdr for i32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for i64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for f32 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f32 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for f64 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f64 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for bool { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + let b = i == 1; + Ok(b) + }) + } +} + +impl WriteXdr for bool { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i = u32::from(*self); // true = 1, false = 0 + i.write_xdr(w) + }) + } +} + +impl ReadXdr for Option { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + match i { + 0 => Ok(None), + 1 => { + let t = T::read_xdr(r)?; + Ok(Some(t)) + } + _ => Err(Error::Invalid), + } + }) + } +} + +impl WriteXdr for Option { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + if let Some(t) = self { + 1u32.write_xdr(w)?; + t.write_xdr(w)?; + } else { + 0u32.write_xdr(w)?; + } + Ok(()) + }) + } +} + +impl ReadXdr for Box { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| Ok(Box::new(T::read_xdr(r)?))) + } +} + +impl WriteXdr for Box { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| T::write_xdr(self, w)) + } +} + +impl ReadXdr for () { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + Ok(()) + } +} + +impl WriteXdr for () { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + Ok(()) + } +} + +impl ReadXdr for [u8; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + r.consume_len(N)?; + let padding = pad_len(N); + r.consume_len(padding)?; + let mut arr = [0u8; N]; + r.read_exact(&mut arr)?; + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + Ok(arr) + }) + } +} + +impl WriteXdr for [u8; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + w.consume_len(N)?; + let padding = pad_len(N); + w.consume_len(padding)?; + w.write_all(self)?; + w.write_all(&[0u8; 3][..padding])?; + Ok(()) + }) + } +} + +impl ReadXdr for [T; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let mut vec = Vec::with_capacity(N); + for _ in 0..N { + let t = T::read_xdr(r)?; + vec.push(t); + } + let arr: [T; N] = vec.try_into().unwrap_or_else(|_: Vec| unreachable!()); + Ok(arr) + }) + } +} + +impl WriteXdr for [T; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + for t in self { + t.write_xdr(w)?; + } + Ok(()) + }) + } +} + +// VecM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec) +where + T: 'static; + +impl Deref for VecM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for VecM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + +impl VecM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl VecM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl VecM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl VecM { + #[must_use] + pub fn to_option(&self) -> Option { + if self.len() > 0 { + Some(self.0[0].clone()) + } else { + None + } + } +} + +#[cfg(not(feature = "alloc"))] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.to_option() + } +} + +#[cfg(feature = "alloc")] +impl VecM { + #[must_use] + pub fn into_option(mut self) -> Option { + self.0.drain(..).next() + } +} + +#[cfg(feature = "alloc")] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.into_option() + } +} + +impl TryFrom> for VecM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: VecM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&VecM> for Vec { + #[must_use] + fn from(v: &VecM) -> Self { + v.0.clone() + } +} + +impl AsRef> for VecM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for VecM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T]> for VecM { + type Error = Error; + + fn try_from(v: &[T]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[T]> for VecM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[T; N]> for VecM { + type Error = Error; + + fn try_from(v: [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [T; N] { + type Error = VecM; + + fn try_from(v: VecM) -> core::result::Result { + let s: [T; N] = v.0.try_into().map_err(|v: Vec| VecM::(v))?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T; N]> for VecM { + type Error = Error; + + fn try_from(v: &[T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [T; N]> for VecM { + type Error = Error; + + fn try_from(v: &'static [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for VecM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for VecM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: VecM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&VecM> for String { + type Error = Error; + + fn try_from(v: &VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for VecM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for VecM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a VecM> for &'a str { + type Error = Error; + + fn try_from(v: &'a VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + let mut vec = Vec::new(); + for _ in 0..len { + let t = T::read_xdr(r)?; + vec.push(t); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + for t in &self.0 { + t.write_xdr(w)?; + } + + Ok(()) + }) + } +} + +// BytesM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +impl core::fmt::Display for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in v { + write!(f, "{b:02x}")?; + } + Ok(()) + } +} + +impl core::fmt::Debug for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "BytesM(")?; + for b in v { + write!(f, "{b:02x}")?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for BytesM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() + } +} + +impl Deref for BytesM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl Default for BytesM { + fn default() -> Self { + Self(Vec::default()) + } +} + +impl BytesM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl BytesM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl BytesM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for BytesM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: BytesM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&BytesM> for Vec { + #[must_use] + fn from(v: &BytesM) -> Self { + v.0.clone() + } +} + +impl AsRef> for BytesM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for BytesM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for BytesM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = BytesM; + + fn try_from(v: BytesM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(BytesM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for BytesM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for BytesM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: BytesM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&BytesM> for String { + type Error = Error; + + fn try_from(v: &BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for BytesM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for BytesM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a BytesM> for &'a str { + type Error = Error; + + fn try_from(v: &'a BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for BytesM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(BytesM(vec)) + }) + } +} + +impl WriteXdr for BytesM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..pad_len(len as usize)])?; + + Ok(()) + }) + } +} + +// StringM ------------------------------------------------------------------------ + +/// A string type that contains arbitrary bytes. +/// +/// Convertible, fallibly, to/from a Rust UTF-8 String using +/// [`TryFrom`]/[`TryInto`]/[`StringM::to_utf8_string`]. +/// +/// Convertible, lossyly, to a Rust UTF-8 String using +/// [`StringM::to_utf8_string_lossy`]. +/// +/// Convertible to/from escaped printable-ASCII using +/// [`Display`]/[`ToString`]/[`FromStr`]. + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +impl core::fmt::Display for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + Ok(()) + } +} + +impl core::fmt::Debug for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "StringM(")?; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for StringM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + let b = escape_bytes::unescape(s.as_bytes()).map_err(|_| Error::Invalid)?; + Ok(Self(b)) + } +} + +impl Deref for StringM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for StringM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl StringM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl StringM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl StringM { + #[cfg(feature = "alloc")] + pub fn to_utf8_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_utf8_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_utf8_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_utf8_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for StringM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: StringM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&StringM> for Vec { + #[must_use] + fn from(v: &StringM) -> Self { + v.0.clone() + } +} + +impl AsRef> for StringM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for StringM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for StringM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for StringM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = StringM; + + fn try_from(v: StringM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(StringM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for StringM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for StringM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: StringM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&StringM> for String { + type Error = Error; + + fn try_from(v: &StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for StringM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for StringM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a StringM> for &'a str { + type Error = Error; + + fn try_from(v: &'a StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for StringM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(StringM(vec)) + }) + } +} + +impl WriteXdr for StringM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +// Frame ------------------------------------------------------------------------ + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") +)] +pub struct Frame(pub T) +where + T: ReadXdr; + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + +impl ReadXdr for Frame +where + T: ReadXdr, +{ + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + // Read the frame header value that contains 1 flag-bit and a 33-bit length. + // - The 1 flag bit is 0 when there are more frames for the same record. + // - The 31-bit length is the length of the bytes within the frame that + // follow the frame header. + let header = u32::read_xdr(r)?; + // TODO: Use the length and cap the length we'll read from `r`. + let last_record = header >> 31 == 1; + if last_record { + // Read the record in the frame. + Ok(Self(T::read_xdr(r)?)) + } else { + // TODO: Support reading those additional frames for the same + // record. + Err(Error::Unsupported) + } + } +} + +#[cfg(all(test, feature = "std"))] +mod tests { + use std::io::Cursor; + + use super::*; + + #[test] + pub fn vec_u8_read_without_padding() { + let buf = Cursor::new(vec![0, 0, 0, 4, 2, 2, 2, 2]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_read_with_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0, 0]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2]); + } + + #[test] + pub fn vec_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn vec_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 3, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn vec_u8_write_without_padding() { + let mut buf = vec![]; + let v: VecM = vec![2, 2, 2, 2].try_into().unwrap(); + + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 4, 2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_write_with_padding() { + let mut buf = vec![]; + let v: VecM = vec![2].try_into().unwrap(); + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 1, 2, 0, 0, 0]); + } + + #[test] + pub fn arr_u8_read_without_padding() { + let buf = Cursor::new(vec![2, 2, 2, 2]); + let v = <[u8; 4]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_read_with_padding() { + let buf = Cursor::new(vec![2, 0, 0, 0]); + let v = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2]); + } + + #[test] + pub fn arr_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![2, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn arr_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![2, 3, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn arr_u8_write_without_padding() { + let mut buf = vec![]; + [2u8, 2, 2, 2] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_write_with_padding() { + let mut buf = vec![]; + [2u8] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 0, 0, 0]); + } +} + +#[cfg(all(test, feature = "std"))] +mod test { + use super::*; + + #[test] + fn into_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.into_option(), None); + } + + #[test] + fn into_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.into_option(), Some(1)); + } + + #[test] + fn to_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } + + #[test] + fn depth_limited_read_write_under_the_limit_success() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(4)); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::depth(4)); + let a_back: Option>> = ReadXdr::read_xdr(&mut dlr).unwrap(); + assert_eq!(a, a_back); + } + + #[test] + fn write_over_depth_limit_fail() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(3)); + let res = a.write_xdr(&mut buf); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn read_over_depth_limit_fail() { + let read_limits = Limits::depth(3); + let write_limits = Limits::depth(5); + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), write_limits); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), read_limits); + let res: Result>>> = ReadXdr::read_xdr(&mut dlr); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn length_limited_read_write_i32() { + // Exact limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u32() { + // Exact limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_i64() { + // Exact limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u64() { + // Exact limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bool() { + // Exact limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_option() { + // Exact limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_u8() { + // Exact limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + <[u8; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_type() { + // Exact limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(12)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(13)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(13)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(11)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(11)); + assert_eq!( + <[bool; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_vec() { + // Exact limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(16)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(17)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(17)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(15)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(15)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bytes() { + // Exact limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_string() { + // Exact limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } +} + +#[cfg(all(test, not(feature = "alloc")))] +mod test { + use super::VecM; + + #[test] + fn to_option_none() { + let v: VecM = (&[]).try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = (&[1]).try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } +} + +/// Uint512 is an XDR Typedef defines as: +/// +/// ```text +/// typedef opaque uint512[64]; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr))] +pub struct Uint512(pub [u8; 64]); + +impl core::fmt::Debug for Uint512 { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let v = &self.0; + write!(f, "Uint512(")?; + for b in v { + write!(f, "{b:02x}")?; + } + write!(f, ")")?; + Ok(()) + } +} +impl core::fmt::Display for Uint512 { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let v = &self.0; + for b in v { + write!(f, "{b:02x}")?; + } + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for Uint512 { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() + } +} +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Uint512 { + fn schema_name() -> String { + "Uint512".to_string() + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: 64_u32.checked_mul(2).map(Some).unwrap_or_default(), + min_length: 64_u32.checked_mul(2).map(Some).unwrap_or_default(), + ..string + })); + schema.into() + } else { + schema + } + } +} +impl From for [u8; 64] { + #[must_use] + fn from(x: Uint512) -> Self { + x.0 + } +} + +impl From<[u8; 64]> for Uint512 { + #[must_use] + fn from(x: [u8; 64]) -> Self { + Uint512(x) + } +} + +impl AsRef<[u8; 64]> for Uint512 { + #[must_use] + fn as_ref(&self) -> &[u8; 64] { + &self.0 + } +} + +impl ReadXdr for Uint512 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = <[u8; 64]>::read_xdr(r)?; + let v = Uint512(i); + Ok(v) + }) + } +} + +impl WriteXdr for Uint512 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + +impl Uint512 { + #[must_use] + pub fn as_slice(&self) -> &[u8] { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for Uint512 { + type Error = Error; + fn try_from(x: Vec) -> Result { + x.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for Uint512 { + type Error = Error; + fn try_from(x: &Vec) -> Result { + x.as_slice().try_into() + } +} + +impl TryFrom<&[u8]> for Uint512 { + type Error = Error; + fn try_from(x: &[u8]) -> Result { + Ok(Uint512(x.try_into()?)) + } +} + +impl AsRef<[u8]> for Uint512 { + #[must_use] + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +/// Uint513 is an XDR Typedef defines as: +/// +/// ```text +/// typedef opaque uint513<64>; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[derive(Default)] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug)] +pub struct Uint513(pub BytesM::<64>); + +impl From for BytesM::<64> { + #[must_use] + fn from(x: Uint513) -> Self { + x.0 + } +} + +impl From> for Uint513 { + #[must_use] + fn from(x: BytesM::<64>) -> Self { + Uint513(x) + } +} + +impl AsRef> for Uint513 { + #[must_use] + fn as_ref(&self) -> &BytesM::<64> { + &self.0 + } +} + +impl ReadXdr for Uint513 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = BytesM::<64>::read_xdr(r)?; + let v = Uint513(i); + Ok(v) + }) + } +} + +impl WriteXdr for Uint513 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + +impl Deref for Uint513 { + type Target = BytesM::<64>; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for Vec { + #[must_use] + fn from(x: Uint513) -> Self { + x.0.0 + } +} + +impl TryFrom> for Uint513 { + type Error = Error; + fn try_from(x: Vec) -> Result { + Ok(Uint513(x.try_into()?)) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for Uint513 { + type Error = Error; + fn try_from(x: &Vec) -> Result { + Ok(Uint513(x.try_into()?)) + } +} + +impl AsRef> for Uint513 { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0.0 + } +} + +impl AsRef<[u8]> for Uint513 { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + &self.0.0 + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.0 + } +} + +/// Uint514 is an XDR Typedef defines as: +/// +/// ```text +/// typedef opaque uint514<>; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[derive(Default)] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug)] +pub struct Uint514(pub BytesM); + +impl From for BytesM { + #[must_use] + fn from(x: Uint514) -> Self { + x.0 + } +} + +impl From for Uint514 { + #[must_use] + fn from(x: BytesM) -> Self { + Uint514(x) + } +} + +impl AsRef for Uint514 { + #[must_use] + fn as_ref(&self) -> &BytesM { + &self.0 + } +} + +impl ReadXdr for Uint514 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = BytesM::read_xdr(r)?; + let v = Uint514(i); + Ok(v) + }) + } +} + +impl WriteXdr for Uint514 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + +impl Deref for Uint514 { + type Target = BytesM; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for Vec { + #[must_use] + fn from(x: Uint514) -> Self { + x.0.0 + } +} + +impl TryFrom> for Uint514 { + type Error = Error; + fn try_from(x: Vec) -> Result { + Ok(Uint514(x.try_into()?)) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for Uint514 { + type Error = Error; + fn try_from(x: &Vec) -> Result { + Ok(Uint514(x.try_into()?)) + } +} + +impl AsRef> for Uint514 { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0.0 + } +} + +impl AsRef<[u8]> for Uint514 { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + &self.0.0 + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.0 + } +} + +/// Str is an XDR Typedef defines as: +/// +/// ```text +/// typedef string str<64>; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[derive(Default)] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug)] +pub struct Str(pub StringM::<64>); + +impl From for StringM::<64> { + #[must_use] + fn from(x: Str) -> Self { + x.0 + } +} + +impl From> for Str { + #[must_use] + fn from(x: StringM::<64>) -> Self { + Str(x) + } +} + +impl AsRef> for Str { + #[must_use] + fn as_ref(&self) -> &StringM::<64> { + &self.0 + } +} + +impl ReadXdr for Str { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = StringM::<64>::read_xdr(r)?; + let v = Str(i); + Ok(v) + }) + } +} + +impl WriteXdr for Str { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + +impl Deref for Str { + type Target = StringM::<64>; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for Vec { + #[must_use] + fn from(x: Str) -> Self { + x.0.0 + } +} + +impl TryFrom> for Str { + type Error = Error; + fn try_from(x: Vec) -> Result { + Ok(Str(x.try_into()?)) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for Str { + type Error = Error; + fn try_from(x: &Vec) -> Result { + Ok(Str(x.try_into()?)) + } +} + +impl AsRef> for Str { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0.0 + } +} + +impl AsRef<[u8]> for Str { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + &self.0.0 + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.0 + } +} + +/// Str2 is an XDR Typedef defines as: +/// +/// ```text +/// typedef string str2<>; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[derive(Default)] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug)] +pub struct Str2(pub StringM); + +impl From for StringM { + #[must_use] + fn from(x: Str2) -> Self { + x.0 + } +} + +impl From for Str2 { + #[must_use] + fn from(x: StringM) -> Self { + Str2(x) + } +} + +impl AsRef for Str2 { + #[must_use] + fn as_ref(&self) -> &StringM { + &self.0 + } +} + +impl ReadXdr for Str2 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = StringM::read_xdr(r)?; + let v = Str2(i); + Ok(v) + }) + } +} + +impl WriteXdr for Str2 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + +impl Deref for Str2 { + type Target = StringM; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for Vec { + #[must_use] + fn from(x: Str2) -> Self { + x.0.0 + } +} + +impl TryFrom> for Str2 { + type Error = Error; + fn try_from(x: Vec) -> Result { + Ok(Str2(x.try_into()?)) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for Str2 { + type Error = Error; + fn try_from(x: &Vec) -> Result { + Ok(Str2(x.try_into()?)) + } +} + +impl AsRef> for Str2 { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0.0 + } +} + +impl AsRef<[u8]> for Str2 { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + &self.0.0 + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.0 + } +} + +/// Hash is an XDR Typedef defines as: +/// +/// ```text +/// typedef opaque Hash[32]; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr))] +pub struct Hash(pub [u8; 32]); + +impl core::fmt::Debug for Hash { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let v = &self.0; + write!(f, "Hash(")?; + for b in v { + write!(f, "{b:02x}")?; + } + write!(f, ")")?; + Ok(()) + } +} +impl core::fmt::Display for Hash { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let v = &self.0; + for b in v { + write!(f, "{b:02x}")?; + } + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for Hash { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() + } +} +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Hash { + fn schema_name() -> String { + "Hash".to_string() + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: 32_u32.checked_mul(2).map(Some).unwrap_or_default(), + min_length: 32_u32.checked_mul(2).map(Some).unwrap_or_default(), + ..string + })); + schema.into() + } else { + schema + } + } +} +impl From for [u8; 32] { + #[must_use] + fn from(x: Hash) -> Self { + x.0 + } +} + +impl From<[u8; 32]> for Hash { + #[must_use] + fn from(x: [u8; 32]) -> Self { + Hash(x) + } +} + +impl AsRef<[u8; 32]> for Hash { + #[must_use] + fn as_ref(&self) -> &[u8; 32] { + &self.0 + } +} + +impl ReadXdr for Hash { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = <[u8; 32]>::read_xdr(r)?; + let v = Hash(i); + Ok(v) + }) + } +} + +impl WriteXdr for Hash { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + +impl Hash { + #[must_use] + pub fn as_slice(&self) -> &[u8] { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for Hash { + type Error = Error; + fn try_from(x: Vec) -> Result { + x.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for Hash { + type Error = Error; + fn try_from(x: &Vec) -> Result { + x.as_slice().try_into() + } +} + +impl TryFrom<&[u8]> for Hash { + type Error = Error; + fn try_from(x: &[u8]) -> Result { + Ok(Hash(x.try_into()?)) + } +} + +impl AsRef<[u8]> for Hash { + #[must_use] + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +/// Hashes1 is an XDR Typedef defines as: +/// +/// ```text +/// typedef Hash Hashes1[12]; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug)] +pub struct Hashes1(pub [Hash; 12]); + +impl From for [Hash; 12] { + #[must_use] + fn from(x: Hashes1) -> Self { + x.0 + } +} + +impl From<[Hash; 12]> for Hashes1 { + #[must_use] + fn from(x: [Hash; 12]) -> Self { + Hashes1(x) + } +} + +impl AsRef<[Hash; 12]> for Hashes1 { + #[must_use] + fn as_ref(&self) -> &[Hash; 12] { + &self.0 + } +} + +impl ReadXdr for Hashes1 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = <[Hash; 12]>::read_xdr(r)?; + let v = Hashes1(i); + Ok(v) + }) + } +} + +impl WriteXdr for Hashes1 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + +impl Hashes1 { + #[must_use] + pub fn as_slice(&self) -> &[Hash] { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for Hashes1 { + type Error = Error; + fn try_from(x: Vec) -> Result { + x.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for Hashes1 { + type Error = Error; + fn try_from(x: &Vec) -> Result { + x.as_slice().try_into() + } +} + +impl TryFrom<&[Hash]> for Hashes1 { + type Error = Error; + fn try_from(x: &[Hash]) -> Result { + Ok(Hashes1(x.try_into()?)) + } +} + +impl AsRef<[Hash]> for Hashes1 { + #[must_use] + fn as_ref(&self) -> &[Hash] { + &self.0 + } +} + +/// Hashes2 is an XDR Typedef defines as: +/// +/// ```text +/// typedef Hash Hashes2<12>; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[derive(Default)] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug)] +pub struct Hashes2(pub VecM::); + +impl From for VecM:: { + #[must_use] + fn from(x: Hashes2) -> Self { + x.0 + } +} + +impl From> for Hashes2 { + #[must_use] + fn from(x: VecM::) -> Self { + Hashes2(x) + } +} + +impl AsRef> for Hashes2 { + #[must_use] + fn as_ref(&self) -> &VecM:: { + &self.0 + } +} + +impl ReadXdr for Hashes2 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = VecM::::read_xdr(r)?; + let v = Hashes2(i); + Ok(v) + }) + } +} + +impl WriteXdr for Hashes2 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + +impl Deref for Hashes2 { + type Target = VecM::; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for Vec { + #[must_use] + fn from(x: Hashes2) -> Self { + x.0.0 + } +} + +impl TryFrom> for Hashes2 { + type Error = Error; + fn try_from(x: Vec) -> Result { + Ok(Hashes2(x.try_into()?)) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for Hashes2 { + type Error = Error; + fn try_from(x: &Vec) -> Result { + Ok(Hashes2(x.try_into()?)) + } +} + +impl AsRef> for Hashes2 { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0.0 + } +} + +impl AsRef<[Hash]> for Hashes2 { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[Hash] { + &self.0.0 + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[Hash] { + self.0.0 + } +} + +/// Hashes3 is an XDR Typedef defines as: +/// +/// ```text +/// typedef Hash Hashes3<>; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[derive(Default)] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug)] +pub struct Hashes3(pub VecM::); + +impl From for VecM:: { + #[must_use] + fn from(x: Hashes3) -> Self { + x.0 + } +} + +impl From> for Hashes3 { + #[must_use] + fn from(x: VecM::) -> Self { + Hashes3(x) + } +} + +impl AsRef> for Hashes3 { + #[must_use] + fn as_ref(&self) -> &VecM:: { + &self.0 + } +} + +impl ReadXdr for Hashes3 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = VecM::::read_xdr(r)?; + let v = Hashes3(i); + Ok(v) + }) + } +} + +impl WriteXdr for Hashes3 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + +impl Deref for Hashes3 { + type Target = VecM::; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for Vec { + #[must_use] + fn from(x: Hashes3) -> Self { + x.0.0 + } +} + +impl TryFrom> for Hashes3 { + type Error = Error; + fn try_from(x: Vec) -> Result { + Ok(Hashes3(x.try_into()?)) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for Hashes3 { + type Error = Error; + fn try_from(x: &Vec) -> Result { + Ok(Hashes3(x.try_into()?)) + } +} + +impl AsRef> for Hashes3 { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0.0 + } +} + +impl AsRef<[Hash]> for Hashes3 { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[Hash] { + &self.0.0 + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[Hash] { + self.0.0 + } +} + +/// OptHash1 is an XDR Typedef defines as: +/// +/// ```text +/// typedef Hash *optHash1; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug)] +pub struct OptHash1(pub Option); + +impl From for Option { + #[must_use] + fn from(x: OptHash1) -> Self { + x.0 + } +} + +impl From> for OptHash1 { + #[must_use] + fn from(x: Option) -> Self { + OptHash1(x) + } +} + +impl AsRef> for OptHash1 { + #[must_use] + fn as_ref(&self) -> &Option { + &self.0 + } +} + +impl ReadXdr for OptHash1 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = Option::::read_xdr(r)?; + let v = OptHash1(i); + Ok(v) + }) + } +} + +impl WriteXdr for OptHash1 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + +/// OptHash2 is an XDR Typedef defines as: +/// +/// ```text +/// typedef Hash* optHash2; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug)] +pub struct OptHash2(pub Option); + +impl From for Option { + #[must_use] + fn from(x: OptHash2) -> Self { + x.0 + } +} + +impl From> for OptHash2 { + #[must_use] + fn from(x: Option) -> Self { + OptHash2(x) + } +} + +impl AsRef> for OptHash2 { + #[must_use] + fn as_ref(&self) -> &Option { + &self.0 + } +} + +impl ReadXdr for OptHash2 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = Option::::read_xdr(r)?; + let v = OptHash2(i); + Ok(v) + }) + } +} + +impl WriteXdr for OptHash2 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + +/// Int1 is an XDR Typedef defines as: +/// +/// ```text +/// typedef int int1; +/// ``` +/// +pub type Int1 = i32; + +/// Int2 is an XDR Typedef defines as: +/// +/// ```text +/// typedef hyper int2; +/// ``` +/// +pub type Int2 = i64; + +/// Int3 is an XDR Typedef defines as: +/// +/// ```text +/// typedef unsigned int int3; +/// ``` +/// +pub type Int3 = u32; + +/// Int4 is an XDR Typedef defines as: +/// +/// ```text +/// typedef unsigned hyper int4; +/// ``` +/// +pub type Int4 = u64; + +/// MyStruct is an XDR Struct defines as: +/// +/// ```text +/// struct MyStruct +/// { +/// uint512 field1; +/// optHash1 field2; +/// int1 field3; +/// unsigned int field4; +/// float field5; +/// double field6; +/// bool field7; +/// }; +/// ``` +/// +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +pub struct MyStruct { + pub field1: Uint512, + pub field2: OptHash1, + pub field3: i32, + pub field4: u32, + pub field5: f32, + pub field6: f64, + pub field7: bool, +} + + impl ReadXdr for MyStruct { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + Ok(Self{ + field1: Uint512::read_xdr(r)?, +field2: OptHash1::read_xdr(r)?, +field3: i32::read_xdr(r)?, +field4: u32::read_xdr(r)?, +field5: f32::read_xdr(r)?, +field6: f64::read_xdr(r)?, +field7: bool::read_xdr(r)?, + }) + }) + } + } + + impl WriteXdr for MyStruct { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.field1.write_xdr(w)?; +self.field2.write_xdr(w)?; +self.field3.write_xdr(w)?; +self.field4.write_xdr(w)?; +self.field5.write_xdr(w)?; +self.field6.write_xdr(w)?; +self.field7.write_xdr(w)?; + Ok(()) + }) + } + } + +/// LotsOfMyStructs is an XDR Struct defines as: +/// +/// ```text +/// struct LotsOfMyStructs +/// { +/// MyStruct members<>; +/// }; +/// ``` +/// +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +pub struct LotsOfMyStructs { + pub members: VecM::, +} + +impl ReadXdr for LotsOfMyStructs { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + Ok(Self{ + members: VecM::::read_xdr(r)?, + }) + }) + } +} + +impl WriteXdr for LotsOfMyStructs { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.members.write_xdr(w)?; + Ok(()) + }) + } +} + +/// HasStuff is an XDR Struct defines as: +/// +/// ```text +/// struct HasStuff +/// { +/// LotsOfMyStructs data; +/// }; +/// ``` +/// +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct HasStuff { + pub data: LotsOfMyStructs, +} + +impl ReadXdr for HasStuff { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + Ok(Self{ + data: LotsOfMyStructs::read_xdr(r)?, + }) + }) + } +} + +impl WriteXdr for HasStuff { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.data.write_xdr(w)?; + Ok(()) + }) + } +} + +/// Color is an XDR Enum defines as: +/// +/// ```text +/// enum Color { +/// RED, +/// BLUE = 5, +/// GREEN +/// }; +/// ``` +/// +// enum +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[repr(i32)] +pub enum Color { + Red = 0, + Blue = 5, + Green = 6, +} + + impl Color { + pub const VARIANTS: [Color; 3] = [ Color::Red, +Color::Blue, +Color::Green, ]; + pub const VARIANTS_STR: [&'static str; 3] = [ "Red", +"Blue", +"Green", ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::Red => "Red", +Self::Blue => "Blue", +Self::Green => "Green", + } + } + + #[must_use] + pub const fn variants() -> [Color; 3] { + Self::VARIANTS + } + } + + impl Name for Color { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for Color { + fn variants() -> slice::Iter<'static, Color> { + Self::VARIANTS.iter() + } + } + + impl Enum for Color {} + + impl fmt::Display for Color { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.name()) + } + } + + impl TryFrom for Color { + type Error = Error; + + fn try_from(i: i32) -> Result { + let e = match i { + 0 => Color::Red, +5 => Color::Blue, +6 => Color::Green, + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(e) + } + } + + impl From for i32 { + #[must_use] + fn from(e: Color) -> Self { + e as Self + } + } + + impl ReadXdr for Color { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let e = i32::read_xdr(r)?; + let v: Self = e.try_into()?; + Ok(v) + }) + } + } + + impl WriteXdr for Color { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i: i32 = (*self).into(); + i.write_xdr(w) + }) + } + } + +/// Foo is an XDR Const defines as: +/// +/// ```text +/// const FOO = 1244; +/// ``` +/// +pub const FOO: u64 = 1244; + +/// Bar is an XDR Const defines as: +/// +/// ```text +/// const BAR = FOO; +/// ``` +/// +pub const BAR: u64 = FOO; + +/// NesterNestedEnum is an XDR NestedEnum defines as: +/// +/// ```text +/// enum { +/// BLAH_1, +/// BLAH_2 +/// } +/// ``` +/// +// enum +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[repr(i32)] +pub enum NesterNestedEnum { + 1 = 0, + 2 = 1, +} + + impl NesterNestedEnum { + pub const VARIANTS: [NesterNestedEnum; 2] = [ NesterNestedEnum::1, +NesterNestedEnum::2, ]; + pub const VARIANTS_STR: [&'static str; 2] = [ "1", +"2", ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::1 => "1", +Self::2 => "2", + } + } + + #[must_use] + pub const fn variants() -> [NesterNestedEnum; 2] { + Self::VARIANTS + } + } + + impl Name for NesterNestedEnum { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for NesterNestedEnum { + fn variants() -> slice::Iter<'static, NesterNestedEnum> { + Self::VARIANTS.iter() + } + } + + impl Enum for NesterNestedEnum {} + + impl fmt::Display for NesterNestedEnum { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.name()) + } + } + + impl TryFrom for NesterNestedEnum { + type Error = Error; + + fn try_from(i: i32) -> Result { + let e = match i { + 0 => NesterNestedEnum::1, +1 => NesterNestedEnum::2, + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(e) + } + } + + impl From for i32 { + #[must_use] + fn from(e: NesterNestedEnum) -> Self { + e as Self + } + } + + impl ReadXdr for NesterNestedEnum { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let e = i32::read_xdr(r)?; + let v: Self = e.try_into()?; + Ok(v) + }) + } + } + + impl WriteXdr for NesterNestedEnum { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i: i32 = (*self).into(); + i.write_xdr(w) + }) + } + } + +/// NesterNestedStruct is an XDR NestedStruct defines as: +/// +/// ```text +/// struct { +/// int blah; +/// } +/// ``` +/// +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct NesterNestedStruct { + pub blah: i32, +} + +impl ReadXdr for NesterNestedStruct { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + Ok(Self{ + blah: i32::read_xdr(r)?, + }) + }) + } +} + +impl WriteXdr for NesterNestedStruct { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.blah.write_xdr(w)?; + Ok(()) + }) + } +} + +/// NesterNestedUnion is an XDR NestedUnion defines as: +/// +/// ```text +/// union switch (Color color) { +/// case RED: +/// void; +/// default: +/// int blah2; +/// } +/// ``` +/// +// union with discriminant Color +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[allow(clippy::large_enum_variant)] +pub enum NesterNestedUnion { + Red, +} + +impl NesterNestedUnion { + pub const VARIANTS: [Color; 1] = [ + Color::Red, + ]; + pub const VARIANTS_STR: [&'static str; 1] = [ + "Red", + ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::Red => "Red", + } + } + + #[must_use] + pub const fn discriminant(&self) -> Color { + #[allow(clippy::match_same_arms)] + match self { + Self::Red => Color::Red, + } + } + + #[must_use] + pub const fn variants() -> [Color; 1] { + Self::VARIANTS + } +} + +impl Name for NesterNestedUnion { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } +} + +impl Discriminant for NesterNestedUnion { + #[must_use] + fn discriminant(&self) -> Color { + Self::discriminant(self) + } +} + +impl Variants for NesterNestedUnion { + fn variants() -> slice::Iter<'static, Color> { + Self::VARIANTS.iter() + } +} + +impl Union for NesterNestedUnion {} + +impl ReadXdr for NesterNestedUnion { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let dv: Color = ::read_xdr(r)?; + #[allow(clippy::match_same_arms, clippy::match_wildcard_for_single_variants)] + let v = match dv { + Color::Red => Self::Red, + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(v) + }) + } +} + +impl WriteXdr for NesterNestedUnion { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.discriminant().write_xdr(w)?; + #[allow(clippy::match_same_arms)] + match self { + Self::Red => ().write_xdr(w)?, + }; + Ok(()) + }) + } +} + +/// Nester is an XDR Struct defines as: +/// +/// ```text +/// struct Nester +/// { +/// enum { +/// BLAH_1, +/// BLAH_2 +/// } nestedEnum; +/// +/// struct { +/// int blah; +/// } nestedStruct; +/// +/// union switch (Color color) { +/// case RED: +/// void; +/// default: +/// int blah2; +/// } nestedUnion; +/// +/// +/// }; +/// ``` +/// +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct Nester { + pub nested_enum: NesterNestedEnum, + pub nested_struct: NesterNestedStruct, + pub nested_union: NesterNestedUnion, +} + + impl ReadXdr for Nester { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + Ok(Self{ + nested_enum: NesterNestedEnum::read_xdr(r)?, +nested_struct: NesterNestedStruct::read_xdr(r)?, +nested_union: NesterNestedUnion::read_xdr(r)?, + }) + }) + } + } + + impl WriteXdr for Nester { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.nested_enum.write_xdr(w)?; +self.nested_struct.write_xdr(w)?; +self.nested_union.write_xdr(w)?; + Ok(()) + }) + } + } + + #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum TypeVariant { + Uint512, +Uint513, +Uint514, +Str, +Str2, +Hash, +Hashes1, +Hashes2, +Hashes3, +OptHash1, +OptHash2, +Int1, +Int2, +Int3, +Int4, +MyStruct, +LotsOfMyStructs, +HasStuff, +Color, +Nester, +NesterNestedEnum, +NesterNestedStruct, +NesterNestedUnion, + } + + impl TypeVariant { + pub const VARIANTS: [TypeVariant; 23] = [ TypeVariant::Uint512, +TypeVariant::Uint513, +TypeVariant::Uint514, +TypeVariant::Str, +TypeVariant::Str2, +TypeVariant::Hash, +TypeVariant::Hashes1, +TypeVariant::Hashes2, +TypeVariant::Hashes3, +TypeVariant::OptHash1, +TypeVariant::OptHash2, +TypeVariant::Int1, +TypeVariant::Int2, +TypeVariant::Int3, +TypeVariant::Int4, +TypeVariant::MyStruct, +TypeVariant::LotsOfMyStructs, +TypeVariant::HasStuff, +TypeVariant::Color, +TypeVariant::Nester, +TypeVariant::NesterNestedEnum, +TypeVariant::NesterNestedStruct, +TypeVariant::NesterNestedUnion, ]; + pub const VARIANTS_STR: [&'static str; 23] = [ "Uint512", +"Uint513", +"Uint514", +"Str", +"Str2", +"Hash", +"Hashes1", +"Hashes2", +"Hashes3", +"OptHash1", +"OptHash2", +"Int1", +"Int2", +"Int3", +"Int4", +"MyStruct", +"LotsOfMyStructs", +"HasStuff", +"Color", +"Nester", +"NesterNestedEnum", +"NesterNestedStruct", +"NesterNestedUnion", ]; + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::Uint512 => "Uint512", +Self::Uint513 => "Uint513", +Self::Uint514 => "Uint514", +Self::Str => "Str", +Self::Str2 => "Str2", +Self::Hash => "Hash", +Self::Hashes1 => "Hashes1", +Self::Hashes2 => "Hashes2", +Self::Hashes3 => "Hashes3", +Self::OptHash1 => "OptHash1", +Self::OptHash2 => "OptHash2", +Self::Int1 => "Int1", +Self::Int2 => "Int2", +Self::Int3 => "Int3", +Self::Int4 => "Int4", +Self::MyStruct => "MyStruct", +Self::LotsOfMyStructs => "LotsOfMyStructs", +Self::HasStuff => "HasStuff", +Self::Color => "Color", +Self::Nester => "Nester", +Self::NesterNestedEnum => "NesterNestedEnum", +Self::NesterNestedStruct => "NesterNestedStruct", +Self::NesterNestedUnion => "NesterNestedUnion", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 23] { + Self::VARIANTS + } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::Uint512 => gen.into_root_schema_for::(), +Self::Uint513 => gen.into_root_schema_for::(), +Self::Uint514 => gen.into_root_schema_for::(), +Self::Str => gen.into_root_schema_for::(), +Self::Str2 => gen.into_root_schema_for::(), +Self::Hash => gen.into_root_schema_for::(), +Self::Hashes1 => gen.into_root_schema_for::(), +Self::Hashes2 => gen.into_root_schema_for::(), +Self::Hashes3 => gen.into_root_schema_for::(), +Self::OptHash1 => gen.into_root_schema_for::(), +Self::OptHash2 => gen.into_root_schema_for::(), +Self::Int1 => gen.into_root_schema_for::(), +Self::Int2 => gen.into_root_schema_for::(), +Self::Int3 => gen.into_root_schema_for::(), +Self::Int4 => gen.into_root_schema_for::(), +Self::MyStruct => gen.into_root_schema_for::(), +Self::LotsOfMyStructs => gen.into_root_schema_for::(), +Self::HasStuff => gen.into_root_schema_for::(), +Self::Color => gen.into_root_schema_for::(), +Self::Nester => gen.into_root_schema_for::(), +Self::NesterNestedEnum => gen.into_root_schema_for::(), +Self::NesterNestedStruct => gen.into_root_schema_for::(), +Self::NesterNestedUnion => gen.into_root_schema_for::(), + } + } + } + + impl Name for TypeVariant { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for TypeVariant { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl core::str::FromStr for TypeVariant { + type Err = Error; + #[allow(clippy::too_many_lines)] + fn from_str(s: &str) -> Result { + match s { + "Uint512" => Ok(Self::Uint512), +"Uint513" => Ok(Self::Uint513), +"Uint514" => Ok(Self::Uint514), +"Str" => Ok(Self::Str), +"Str2" => Ok(Self::Str2), +"Hash" => Ok(Self::Hash), +"Hashes1" => Ok(Self::Hashes1), +"Hashes2" => Ok(Self::Hashes2), +"Hashes3" => Ok(Self::Hashes3), +"OptHash1" => Ok(Self::OptHash1), +"OptHash2" => Ok(Self::OptHash2), +"Int1" => Ok(Self::Int1), +"Int2" => Ok(Self::Int2), +"Int3" => Ok(Self::Int3), +"Int4" => Ok(Self::Int4), +"MyStruct" => Ok(Self::MyStruct), +"LotsOfMyStructs" => Ok(Self::LotsOfMyStructs), +"HasStuff" => Ok(Self::HasStuff), +"Color" => Ok(Self::Color), +"Nester" => Ok(Self::Nester), +"NesterNestedEnum" => Ok(Self::NesterNestedEnum), +"NesterNestedStruct" => Ok(Self::NesterNestedStruct), +"NesterNestedUnion" => Ok(Self::NesterNestedUnion), + _ => Err(Error::Invalid), + } + } + } + + #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case"), + serde(untagged), + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum Type { + Uint512(Box), +Uint513(Box), +Uint514(Box), +Str(Box), +Str2(Box), +Hash(Box), +Hashes1(Box), +Hashes2(Box), +Hashes3(Box), +OptHash1(Box), +OptHash2(Box), +Int1(Box), +Int2(Box), +Int3(Box), +Int4(Box), +MyStruct(Box), +LotsOfMyStructs(Box), +HasStuff(Box), +Color(Box), +Nester(Box), +NesterNestedEnum(Box), +NesterNestedStruct(Box), +NesterNestedUnion(Box), + } + + impl Type { + pub const VARIANTS: [TypeVariant; 23] = [ TypeVariant::Uint512, +TypeVariant::Uint513, +TypeVariant::Uint514, +TypeVariant::Str, +TypeVariant::Str2, +TypeVariant::Hash, +TypeVariant::Hashes1, +TypeVariant::Hashes2, +TypeVariant::Hashes3, +TypeVariant::OptHash1, +TypeVariant::OptHash2, +TypeVariant::Int1, +TypeVariant::Int2, +TypeVariant::Int3, +TypeVariant::Int4, +TypeVariant::MyStruct, +TypeVariant::LotsOfMyStructs, +TypeVariant::HasStuff, +TypeVariant::Color, +TypeVariant::Nester, +TypeVariant::NesterNestedEnum, +TypeVariant::NesterNestedStruct, +TypeVariant::NesterNestedUnion, ]; + pub const VARIANTS_STR: [&'static str; 23] = [ "Uint512", +"Uint513", +"Uint514", +"Str", +"Str2", +"Hash", +"Hashes1", +"Hashes2", +"Hashes3", +"OptHash1", +"OptHash2", +"Int1", +"Int2", +"Int3", +"Int4", +"MyStruct", +"LotsOfMyStructs", +"HasStuff", +"Color", +"Nester", +"NesterNestedEnum", +"NesterNestedStruct", +"NesterNestedUnion", ]; + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr(v: TypeVariant, r: &mut Limited) -> Result { + match v { + TypeVariant::Uint512 => r.with_limited_depth(|r| Ok(Self::Uint512(Box::new(Uint512::read_xdr(r)?)))), +TypeVariant::Uint513 => r.with_limited_depth(|r| Ok(Self::Uint513(Box::new(Uint513::read_xdr(r)?)))), +TypeVariant::Uint514 => r.with_limited_depth(|r| Ok(Self::Uint514(Box::new(Uint514::read_xdr(r)?)))), +TypeVariant::Str => r.with_limited_depth(|r| Ok(Self::Str(Box::new(Str::read_xdr(r)?)))), +TypeVariant::Str2 => r.with_limited_depth(|r| Ok(Self::Str2(Box::new(Str2::read_xdr(r)?)))), +TypeVariant::Hash => r.with_limited_depth(|r| Ok(Self::Hash(Box::new(Hash::read_xdr(r)?)))), +TypeVariant::Hashes1 => r.with_limited_depth(|r| Ok(Self::Hashes1(Box::new(Hashes1::read_xdr(r)?)))), +TypeVariant::Hashes2 => r.with_limited_depth(|r| Ok(Self::Hashes2(Box::new(Hashes2::read_xdr(r)?)))), +TypeVariant::Hashes3 => r.with_limited_depth(|r| Ok(Self::Hashes3(Box::new(Hashes3::read_xdr(r)?)))), +TypeVariant::OptHash1 => r.with_limited_depth(|r| Ok(Self::OptHash1(Box::new(OptHash1::read_xdr(r)?)))), +TypeVariant::OptHash2 => r.with_limited_depth(|r| Ok(Self::OptHash2(Box::new(OptHash2::read_xdr(r)?)))), +TypeVariant::Int1 => r.with_limited_depth(|r| Ok(Self::Int1(Box::new(Int1::read_xdr(r)?)))), +TypeVariant::Int2 => r.with_limited_depth(|r| Ok(Self::Int2(Box::new(Int2::read_xdr(r)?)))), +TypeVariant::Int3 => r.with_limited_depth(|r| Ok(Self::Int3(Box::new(Int3::read_xdr(r)?)))), +TypeVariant::Int4 => r.with_limited_depth(|r| Ok(Self::Int4(Box::new(Int4::read_xdr(r)?)))), +TypeVariant::MyStruct => r.with_limited_depth(|r| Ok(Self::MyStruct(Box::new(MyStruct::read_xdr(r)?)))), +TypeVariant::LotsOfMyStructs => r.with_limited_depth(|r| Ok(Self::LotsOfMyStructs(Box::new(LotsOfMyStructs::read_xdr(r)?)))), +TypeVariant::HasStuff => r.with_limited_depth(|r| Ok(Self::HasStuff(Box::new(HasStuff::read_xdr(r)?)))), +TypeVariant::Color => r.with_limited_depth(|r| Ok(Self::Color(Box::new(Color::read_xdr(r)?)))), +TypeVariant::Nester => r.with_limited_depth(|r| Ok(Self::Nester(Box::new(Nester::read_xdr(r)?)))), +TypeVariant::NesterNestedEnum => r.with_limited_depth(|r| Ok(Self::NesterNestedEnum(Box::new(NesterNestedEnum::read_xdr(r)?)))), +TypeVariant::NesterNestedStruct => r.with_limited_depth(|r| Ok(Self::NesterNestedStruct(Box::new(NesterNestedStruct::read_xdr(r)?)))), +TypeVariant::NesterNestedUnion => r.with_limited_depth(|r| Ok(Self::NesterNestedUnion(Box::new(NesterNestedUnion::read_xdr(r)?)))), + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + pub fn read_xdr_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let s = Self::read_xdr(v, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::Uint512 => Box::new(ReadXdrIter::<_, Uint512>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Uint512(Box::new(t))))), +TypeVariant::Uint513 => Box::new(ReadXdrIter::<_, Uint513>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Uint513(Box::new(t))))), +TypeVariant::Uint514 => Box::new(ReadXdrIter::<_, Uint514>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Uint514(Box::new(t))))), +TypeVariant::Str => Box::new(ReadXdrIter::<_, Str>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Str(Box::new(t))))), +TypeVariant::Str2 => Box::new(ReadXdrIter::<_, Str2>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Str2(Box::new(t))))), +TypeVariant::Hash => Box::new(ReadXdrIter::<_, Hash>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Hash(Box::new(t))))), +TypeVariant::Hashes1 => Box::new(ReadXdrIter::<_, Hashes1>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Hashes1(Box::new(t))))), +TypeVariant::Hashes2 => Box::new(ReadXdrIter::<_, Hashes2>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Hashes2(Box::new(t))))), +TypeVariant::Hashes3 => Box::new(ReadXdrIter::<_, Hashes3>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Hashes3(Box::new(t))))), +TypeVariant::OptHash1 => Box::new(ReadXdrIter::<_, OptHash1>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::OptHash1(Box::new(t))))), +TypeVariant::OptHash2 => Box::new(ReadXdrIter::<_, OptHash2>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::OptHash2(Box::new(t))))), +TypeVariant::Int1 => Box::new(ReadXdrIter::<_, Int1>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Int1(Box::new(t))))), +TypeVariant::Int2 => Box::new(ReadXdrIter::<_, Int2>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Int2(Box::new(t))))), +TypeVariant::Int3 => Box::new(ReadXdrIter::<_, Int3>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Int3(Box::new(t))))), +TypeVariant::Int4 => Box::new(ReadXdrIter::<_, Int4>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Int4(Box::new(t))))), +TypeVariant::MyStruct => Box::new(ReadXdrIter::<_, MyStruct>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyStruct(Box::new(t))))), +TypeVariant::LotsOfMyStructs => Box::new(ReadXdrIter::<_, LotsOfMyStructs>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::LotsOfMyStructs(Box::new(t))))), +TypeVariant::HasStuff => Box::new(ReadXdrIter::<_, HasStuff>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::HasStuff(Box::new(t))))), +TypeVariant::Color => Box::new(ReadXdrIter::<_, Color>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Color(Box::new(t))))), +TypeVariant::Nester => Box::new(ReadXdrIter::<_, Nester>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Nester(Box::new(t))))), +TypeVariant::NesterNestedEnum => Box::new(ReadXdrIter::<_, NesterNestedEnum>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::NesterNestedEnum(Box::new(t))))), +TypeVariant::NesterNestedStruct => Box::new(ReadXdrIter::<_, NesterNestedStruct>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::NesterNestedStruct(Box::new(t))))), +TypeVariant::NesterNestedUnion => Box::new(ReadXdrIter::<_, NesterNestedUnion>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::NesterNestedUnion(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_framed_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::Uint512 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Uint512(Box::new(t.0))))), +TypeVariant::Uint513 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Uint513(Box::new(t.0))))), +TypeVariant::Uint514 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Uint514(Box::new(t.0))))), +TypeVariant::Str => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Str(Box::new(t.0))))), +TypeVariant::Str2 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Str2(Box::new(t.0))))), +TypeVariant::Hash => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Hash(Box::new(t.0))))), +TypeVariant::Hashes1 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Hashes1(Box::new(t.0))))), +TypeVariant::Hashes2 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Hashes2(Box::new(t.0))))), +TypeVariant::Hashes3 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Hashes3(Box::new(t.0))))), +TypeVariant::OptHash1 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::OptHash1(Box::new(t.0))))), +TypeVariant::OptHash2 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::OptHash2(Box::new(t.0))))), +TypeVariant::Int1 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Int1(Box::new(t.0))))), +TypeVariant::Int2 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Int2(Box::new(t.0))))), +TypeVariant::Int3 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Int3(Box::new(t.0))))), +TypeVariant::Int4 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Int4(Box::new(t.0))))), +TypeVariant::MyStruct => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyStruct(Box::new(t.0))))), +TypeVariant::LotsOfMyStructs => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::LotsOfMyStructs(Box::new(t.0))))), +TypeVariant::HasStuff => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::HasStuff(Box::new(t.0))))), +TypeVariant::Color => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Color(Box::new(t.0))))), +TypeVariant::Nester => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Nester(Box::new(t.0))))), +TypeVariant::NesterNestedEnum => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::NesterNestedEnum(Box::new(t.0))))), +TypeVariant::NesterNestedStruct => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::NesterNestedStruct(Box::new(t.0))))), +TypeVariant::NesterNestedUnion => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::NesterNestedUnion(Box::new(t.0))))), + } + } + + #[cfg(feature = "base64")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_base64_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + match v { + TypeVariant::Uint512 => Box::new(ReadXdrIter::<_, Uint512>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Uint512(Box::new(t))))), +TypeVariant::Uint513 => Box::new(ReadXdrIter::<_, Uint513>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Uint513(Box::new(t))))), +TypeVariant::Uint514 => Box::new(ReadXdrIter::<_, Uint514>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Uint514(Box::new(t))))), +TypeVariant::Str => Box::new(ReadXdrIter::<_, Str>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Str(Box::new(t))))), +TypeVariant::Str2 => Box::new(ReadXdrIter::<_, Str2>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Str2(Box::new(t))))), +TypeVariant::Hash => Box::new(ReadXdrIter::<_, Hash>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Hash(Box::new(t))))), +TypeVariant::Hashes1 => Box::new(ReadXdrIter::<_, Hashes1>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Hashes1(Box::new(t))))), +TypeVariant::Hashes2 => Box::new(ReadXdrIter::<_, Hashes2>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Hashes2(Box::new(t))))), +TypeVariant::Hashes3 => Box::new(ReadXdrIter::<_, Hashes3>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Hashes3(Box::new(t))))), +TypeVariant::OptHash1 => Box::new(ReadXdrIter::<_, OptHash1>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::OptHash1(Box::new(t))))), +TypeVariant::OptHash2 => Box::new(ReadXdrIter::<_, OptHash2>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::OptHash2(Box::new(t))))), +TypeVariant::Int1 => Box::new(ReadXdrIter::<_, Int1>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Int1(Box::new(t))))), +TypeVariant::Int2 => Box::new(ReadXdrIter::<_, Int2>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Int2(Box::new(t))))), +TypeVariant::Int3 => Box::new(ReadXdrIter::<_, Int3>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Int3(Box::new(t))))), +TypeVariant::Int4 => Box::new(ReadXdrIter::<_, Int4>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Int4(Box::new(t))))), +TypeVariant::MyStruct => Box::new(ReadXdrIter::<_, MyStruct>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::MyStruct(Box::new(t))))), +TypeVariant::LotsOfMyStructs => Box::new(ReadXdrIter::<_, LotsOfMyStructs>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::LotsOfMyStructs(Box::new(t))))), +TypeVariant::HasStuff => Box::new(ReadXdrIter::<_, HasStuff>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::HasStuff(Box::new(t))))), +TypeVariant::Color => Box::new(ReadXdrIter::<_, Color>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Color(Box::new(t))))), +TypeVariant::Nester => Box::new(ReadXdrIter::<_, Nester>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Nester(Box::new(t))))), +TypeVariant::NesterNestedEnum => Box::new(ReadXdrIter::<_, NesterNestedEnum>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::NesterNestedEnum(Box::new(t))))), +TypeVariant::NesterNestedStruct => Box::new(ReadXdrIter::<_, NesterNestedStruct>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::NesterNestedStruct(Box::new(t))))), +TypeVariant::NesterNestedUnion => Box::new(ReadXdrIter::<_, NesterNestedUnion>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::NesterNestedUnion(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + pub fn from_xdr>(v: TypeVariant, bytes: B, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(v, &mut cursor)?; + Ok(t) + } + + #[cfg(feature = "base64")] + pub fn from_xdr_base64(v: TypeVariant, b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), limits); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(all(feature = "std", feature = "serde_json"))] + #[allow(clippy::too_many_lines)] + pub fn read_json(v: TypeVariant, r: impl Read) -> Result { + match v { + TypeVariant::Uint512 => Ok(Self::Uint512(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Uint513 => Ok(Self::Uint513(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Uint514 => Ok(Self::Uint514(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Str => Ok(Self::Str(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Str2 => Ok(Self::Str2(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Hash => Ok(Self::Hash(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Hashes1 => Ok(Self::Hashes1(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Hashes2 => Ok(Self::Hashes2(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Hashes3 => Ok(Self::Hashes3(Box::new(serde_json::from_reader(r)?))), +TypeVariant::OptHash1 => Ok(Self::OptHash1(Box::new(serde_json::from_reader(r)?))), +TypeVariant::OptHash2 => Ok(Self::OptHash2(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Int1 => Ok(Self::Int1(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Int2 => Ok(Self::Int2(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Int3 => Ok(Self::Int3(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Int4 => Ok(Self::Int4(Box::new(serde_json::from_reader(r)?))), +TypeVariant::MyStruct => Ok(Self::MyStruct(Box::new(serde_json::from_reader(r)?))), +TypeVariant::LotsOfMyStructs => Ok(Self::LotsOfMyStructs(Box::new(serde_json::from_reader(r)?))), +TypeVariant::HasStuff => Ok(Self::HasStuff(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Color => Ok(Self::Color(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Nester => Ok(Self::Nester(Box::new(serde_json::from_reader(r)?))), +TypeVariant::NesterNestedEnum => Ok(Self::NesterNestedEnum(Box::new(serde_json::from_reader(r)?))), +TypeVariant::NesterNestedStruct => Ok(Self::NesterNestedStruct(Box::new(serde_json::from_reader(r)?))), +TypeVariant::NesterNestedUnion => Ok(Self::NesterNestedUnion(Box::new(serde_json::from_reader(r)?))), + } + } + + #[cfg(feature = "alloc")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn value(&self) -> &dyn core::any::Any { + #[allow(clippy::match_same_arms)] + match self { + Self::Uint512(ref v) => v.as_ref(), +Self::Uint513(ref v) => v.as_ref(), +Self::Uint514(ref v) => v.as_ref(), +Self::Str(ref v) => v.as_ref(), +Self::Str2(ref v) => v.as_ref(), +Self::Hash(ref v) => v.as_ref(), +Self::Hashes1(ref v) => v.as_ref(), +Self::Hashes2(ref v) => v.as_ref(), +Self::Hashes3(ref v) => v.as_ref(), +Self::OptHash1(ref v) => v.as_ref(), +Self::OptHash2(ref v) => v.as_ref(), +Self::Int1(ref v) => v.as_ref(), +Self::Int2(ref v) => v.as_ref(), +Self::Int3(ref v) => v.as_ref(), +Self::Int4(ref v) => v.as_ref(), +Self::MyStruct(ref v) => v.as_ref(), +Self::LotsOfMyStructs(ref v) => v.as_ref(), +Self::HasStuff(ref v) => v.as_ref(), +Self::Color(ref v) => v.as_ref(), +Self::Nester(ref v) => v.as_ref(), +Self::NesterNestedEnum(ref v) => v.as_ref(), +Self::NesterNestedStruct(ref v) => v.as_ref(), +Self::NesterNestedUnion(ref v) => v.as_ref(), + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::Uint512(_) => "Uint512", +Self::Uint513(_) => "Uint513", +Self::Uint514(_) => "Uint514", +Self::Str(_) => "Str", +Self::Str2(_) => "Str2", +Self::Hash(_) => "Hash", +Self::Hashes1(_) => "Hashes1", +Self::Hashes2(_) => "Hashes2", +Self::Hashes3(_) => "Hashes3", +Self::OptHash1(_) => "OptHash1", +Self::OptHash2(_) => "OptHash2", +Self::Int1(_) => "Int1", +Self::Int2(_) => "Int2", +Self::Int3(_) => "Int3", +Self::Int4(_) => "Int4", +Self::MyStruct(_) => "MyStruct", +Self::LotsOfMyStructs(_) => "LotsOfMyStructs", +Self::HasStuff(_) => "HasStuff", +Self::Color(_) => "Color", +Self::Nester(_) => "Nester", +Self::NesterNestedEnum(_) => "NesterNestedEnum", +Self::NesterNestedStruct(_) => "NesterNestedStruct", +Self::NesterNestedUnion(_) => "NesterNestedUnion", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 23] { + Self::VARIANTS + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variant(&self) -> TypeVariant { + match self { + Self::Uint512(_) => TypeVariant::Uint512, +Self::Uint513(_) => TypeVariant::Uint513, +Self::Uint514(_) => TypeVariant::Uint514, +Self::Str(_) => TypeVariant::Str, +Self::Str2(_) => TypeVariant::Str2, +Self::Hash(_) => TypeVariant::Hash, +Self::Hashes1(_) => TypeVariant::Hashes1, +Self::Hashes2(_) => TypeVariant::Hashes2, +Self::Hashes3(_) => TypeVariant::Hashes3, +Self::OptHash1(_) => TypeVariant::OptHash1, +Self::OptHash2(_) => TypeVariant::OptHash2, +Self::Int1(_) => TypeVariant::Int1, +Self::Int2(_) => TypeVariant::Int2, +Self::Int3(_) => TypeVariant::Int3, +Self::Int4(_) => TypeVariant::Int4, +Self::MyStruct(_) => TypeVariant::MyStruct, +Self::LotsOfMyStructs(_) => TypeVariant::LotsOfMyStructs, +Self::HasStuff(_) => TypeVariant::HasStuff, +Self::Color(_) => TypeVariant::Color, +Self::Nester(_) => TypeVariant::Nester, +Self::NesterNestedEnum(_) => TypeVariant::NesterNestedEnum, +Self::NesterNestedStruct(_) => TypeVariant::NesterNestedStruct, +Self::NesterNestedUnion(_) => TypeVariant::NesterNestedUnion, + } + } + } + + impl Name for Type { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for Type { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl WriteXdr for Type { + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + match self { + Self::Uint512(v) => v.write_xdr(w), +Self::Uint513(v) => v.write_xdr(w), +Self::Uint514(v) => v.write_xdr(w), +Self::Str(v) => v.write_xdr(w), +Self::Str2(v) => v.write_xdr(w), +Self::Hash(v) => v.write_xdr(w), +Self::Hashes1(v) => v.write_xdr(w), +Self::Hashes2(v) => v.write_xdr(w), +Self::Hashes3(v) => v.write_xdr(w), +Self::OptHash1(v) => v.write_xdr(w), +Self::OptHash2(v) => v.write_xdr(w), +Self::Int1(v) => v.write_xdr(w), +Self::Int2(v) => v.write_xdr(w), +Self::Int3(v) => v.write_xdr(w), +Self::Int4(v) => v.write_xdr(w), +Self::MyStruct(v) => v.write_xdr(w), +Self::LotsOfMyStructs(v) => v.write_xdr(w), +Self::HasStuff(v) => v.write_xdr(w), +Self::Color(v) => v.write_xdr(w), +Self::Nester(v) => v.write_xdr(w), +Self::NesterNestedEnum(v) => v.write_xdr(w), +Self::NesterNestedStruct(v) => v.write_xdr(w), +Self::NesterNestedUnion(v) => v.write_xdr(w), + } + } + } diff --git a/spec/output/generator_spec_rust_custom_jsonschema_impls/union.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_jsonschema_impls/union.x/MyXDR.rs new file mode 100644 index 000000000..093cea099 --- /dev/null +++ b/spec/output/generator_spec_rust_custom_jsonschema_impls/union.x/MyXDR.rs @@ -0,0 +1,3478 @@ +// Module is generated from: +// spec/fixtures/generator/union.x + +#![allow(clippy::missing_errors_doc, clippy::unreadable_literal)] + +/// `XDR_FILES_SHA256` is a list of pairs of source files and their SHA256 hashes. +pub const XDR_FILES_SHA256: [(&str, &str); 1] = [ + ("spec/fixtures/generator/union.x", "c251258d967223b341ebcf2d5bb0718e9a039b46232cb743865d9acd0c4bbe41") +]; + +use core::{array::TryFromSliceError, fmt, fmt::Debug, marker::Sized, ops::Deref, slice}; + +#[cfg(feature = "std")] +use core::marker::PhantomData; + +// When feature alloc is turned off use static lifetime Box and Vec types. +#[cfg(not(feature = "alloc"))] +mod noalloc { + pub mod boxed { + pub type Box = &'static T; + } + pub mod vec { + pub type Vec = &'static [T]; + } +} +#[cfg(not(feature = "alloc"))] +use noalloc::{boxed::Box, vec::Vec}; + +// When feature std is turned off, but feature alloc is turned on import the +// alloc crate and use its Box and Vec types. +#[cfg(all(not(feature = "std"), feature = "alloc"))] +extern crate alloc; +#[cfg(all(not(feature = "std"), feature = "alloc"))] +use alloc::{ + borrow::ToOwned, + boxed::Box, + string::{FromUtf8Error, String}, + vec::Vec, +}; +#[cfg(feature = "std")] +use std::string::FromUtf8Error; + +#[cfg(feature = "arbitrary")] +use arbitrary::Arbitrary; + +// TODO: Add support for read/write xdr fns when std not available. + +#[cfg(feature = "std")] +use std::{ + error, io, + io::{BufRead, BufReader, Cursor, Read, Write}, +}; + +/// Error contains all errors returned by functions in this crate. It can be +/// compared via `PartialEq`, however any contained IO errors will only be +/// compared on their `ErrorKind`. +#[derive(Debug)] +pub enum Error { + Invalid, + Unsupported, + LengthExceedsMax, + LengthMismatch, + NonZeroPadding, + Utf8Error(core::str::Utf8Error), + #[cfg(feature = "alloc")] + InvalidHex, + #[cfg(feature = "std")] + Io(io::Error), + DepthLimitExceeded, + #[cfg(feature = "serde_json")] + Json(serde_json::Error), + LengthLimitExceeded, +} + +impl PartialEq for Error { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Utf8Error(l), Self::Utf8Error(r)) => l == r, + // IO errors cannot be compared, but in the absence of any more + // meaningful way to compare the errors we compare the kind of error + // and ignore the embedded source error or OS error. The main use + // case for comparing errors outputted by the XDR library is for + // error case testing, and a lack of the ability to compare has a + // detrimental affect on failure testing, so this is a tradeoff. + #[cfg(feature = "std")] + (Self::Io(l), Self::Io(r)) => l.kind() == r.kind(), + _ => core::mem::discriminant(self) == core::mem::discriminant(other), + } + } +} + +#[cfg(feature = "std")] +impl error::Error for Error { + #[must_use] + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Self::Io(e) => Some(e), + #[cfg(feature = "serde_json")] + Self::Json(e) => Some(e), + _ => None, + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Error::Invalid => write!(f, "xdr value invalid"), + Error::Unsupported => write!(f, "xdr value unsupported"), + Error::LengthExceedsMax => write!(f, "xdr value max length exceeded"), + Error::LengthMismatch => write!(f, "xdr value length does not match"), + Error::NonZeroPadding => write!(f, "xdr padding contains non-zero bytes"), + Error::Utf8Error(e) => write!(f, "{e}"), + #[cfg(feature = "alloc")] + Error::InvalidHex => write!(f, "hex invalid"), + #[cfg(feature = "std")] + Error::Io(e) => write!(f, "{e}"), + Error::DepthLimitExceeded => write!(f, "depth limit exceeded"), + #[cfg(feature = "serde_json")] + Error::Json(e) => write!(f, "{e}"), + Error::LengthLimitExceeded => write!(f, "length limit exceeded"), + } + } +} + +impl From for Error { + fn from(_: TryFromSliceError) -> Error { + Error::LengthMismatch + } +} + +impl From for Error { + #[must_use] + fn from(e: core::str::Utf8Error) -> Self { + Error::Utf8Error(e) + } +} + +#[cfg(feature = "alloc")] +impl From for Error { + #[must_use] + fn from(e: FromUtf8Error) -> Self { + Error::Utf8Error(e.utf8_error()) + } +} + +#[cfg(feature = "std")] +impl From for Error { + #[must_use] + fn from(e: io::Error) -> Self { + Error::Io(e) + } +} + +#[cfg(feature = "serde_json")] +impl From for Error { + #[must_use] + fn from(e: serde_json::Error) -> Self { + Error::Json(e) + } +} + +impl From for () { + fn from(_: Error) {} +} + +#[allow(dead_code)] +type Result = core::result::Result; + +/// Name defines types that assign a static name to their value, such as the +/// name given to an identifier in an XDR enum, or the name given to the case in +/// a union. +pub trait Name { + fn name(&self) -> &'static str; +} + +/// Discriminant defines types that may contain a one-of value determined +/// according to the discriminant, and exposes the value of the discriminant for +/// that type, such as in an XDR union. +pub trait Discriminant { + fn discriminant(&self) -> D; +} + +/// Iter defines types that have variants that can be iterated. +pub trait Variants { + fn variants() -> slice::Iter<'static, V> + where + V: Sized; +} + +// Enum defines a type that is represented as an XDR enumeration when encoded. +pub trait Enum: Name + Variants + Sized {} + +// Union defines a type that is represented as an XDR union when encoded. +pub trait Union: Name + Discriminant + Variants +where + D: Sized, +{ +} + +/// `Limits` contains the limits that a limited reader or writer will be +/// constrained to. +#[cfg(feature = "std")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Limits { + /// Defines the maximum depth for recursive calls in `Read/WriteXdr` to + /// prevent stack overflow. + /// + /// The depth limit is akin to limiting stack depth. Its purpose is to + /// prevent the program from hitting the maximum stack size allowed by Rust, + /// which would result in an unrecoverable `SIGABRT`. For more information + /// about Rust's stack size limit, refer to the [Rust + /// documentation](https://doc.rust-lang.org/std/thread/#stack-size). + pub depth: u32, + + /// Defines the maximum number of bytes that will be read or written. + pub len: usize, +} + +#[cfg(feature = "std")] +impl Limits { + #[must_use] + pub fn none() -> Self { + Self { + depth: u32::MAX, + len: usize::MAX, + } + } + + #[must_use] + pub fn depth(depth: u32) -> Self { + Limits { + depth, + ..Limits::none() + } + } + + #[must_use] + pub fn len(len: usize) -> Self { + Limits { + len, + ..Limits::none() + } + } +} + +/// `Limited` wraps an object and provides functions for enforcing limits. +/// +/// Intended for use with readers and writers and limiting their reads and +/// writes. +#[cfg(feature = "std")] +pub struct Limited { + pub inner: L, + pub(crate) limits: Limits, +} + +#[cfg(feature = "std")] +impl Limited { + /// Constructs a new `Limited`. + /// + /// - `inner`: The value being limited. + /// - `limits`: The limits to enforce. + pub fn new(inner: L, limits: Limits) -> Self { + Limited { inner, limits } + } + + /// Consume the given length from the internal remaining length limit. + /// + /// ### Errors + /// + /// If the length would consume more length than the remaining length limit + /// allows. + pub(crate) fn consume_len(&mut self, len: usize) -> Result<()> { + if let Some(len) = self.limits.len.checked_sub(len) { + self.limits.len = len; + Ok(()) + } else { + Err(Error::LengthLimitExceeded) + } + } + + /// Consumes a single depth for the duration of the given function. + /// + /// ### Errors + /// + /// If the depth limit is already exhausted. + pub(crate) fn with_limited_depth(&mut self, f: F) -> Result + where + F: FnOnce(&mut Self) -> Result, + { + if let Some(depth) = self.limits.depth.checked_sub(1) { + self.limits.depth = depth; + let res = f(self); + self.limits.depth = self.limits.depth.saturating_add(1); + res + } else { + Err(Error::DepthLimitExceeded) + } + } +} + +#[cfg(feature = "std")] +impl Read for Limited { + /// Forwards the read operation to the wrapped object. + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + self.inner.read(buf) + } +} + +#[cfg(feature = "std")] +impl BufRead for Limited { + /// Forwards the read operation to the wrapped object. + fn fill_buf(&mut self) -> std::io::Result<&[u8]> { + self.inner.fill_buf() + } + + /// Forwards the read operation to the wrapped object. + fn consume(&mut self, amt: usize) { + self.inner.consume(amt); + } +} + +#[cfg(feature = "std")] +impl Write for Limited { + /// Forwards the write operation to the wrapped object. + fn write(&mut self, buf: &[u8]) -> std::io::Result { + self.inner.write(buf) + } + + /// Forwards the flush operation to the wrapped object. + fn flush(&mut self) -> std::io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "std")] +pub struct ReadXdrIter { + reader: Limited>, + _s: PhantomData, +} + +#[cfg(feature = "std")] +impl ReadXdrIter { + fn new(r: R, limits: Limits) -> Self { + Self { + reader: Limited { + inner: BufReader::new(r), + limits, + }, + _s: PhantomData, + } + } +} + +#[cfg(feature = "std")] +impl Iterator for ReadXdrIter { + type Item = Result; + + // Next reads the internal reader and XDR decodes it into the Self type. If + // the EOF is reached without reading any new bytes `None` is returned. If + // EOF is reached after reading some bytes a truncated entry is assumed an + // an `Error::Io` containing an `UnexpectedEof`. If any other IO error + // occurs it is returned. Iteration of this iterator stops naturally when + // `None` is returned, but not when a `Some(Err(...))` is returned. The + // caller is responsible for checking each Result. + fn next(&mut self) -> Option { + // Try to fill the buffer to see if the EOF has been reached or not. + // This happens to effectively peek to see if the stream has finished + // and there are no more items. It is necessary to do this because the + // xdr types in this crate heavily use the `std::io::Read::read_exact` + // method that doesn't distinguish between an EOF at the beginning of a + // read and an EOF after a partial fill of a read_exact. + match self.reader.fill_buf() { + // If the reader has no more data and is unable to fill any new data + // into its internal buf, then the EOF has been reached. + Ok([]) => return None, + // If an error occurs filling the buffer, treat that as an error and stop. + Err(e) => return Some(Err(Error::Io(e))), + // If there is data in the buf available for reading, continue. + Ok([..]) => (), + }; + // Read the buf into the type. + let r = self.reader.with_limited_depth(|dlr| S::read_xdr(dlr)); + match r { + Ok(s) => Some(Ok(s)), + Err(e) => Some(Err(e)), + } + } +} + +pub trait ReadXdr +where + Self: Sized, +{ + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_to_end`] when the intent is for all bytes in the + /// read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result; + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_to_end(r: &mut Limited) -> Result { + let s = Self::read_xdr(r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn read_xdr_base64_to_end(r: &mut Limited) -> Result { + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), + r.limits.clone(), + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } + + /// Read the XDR and construct the type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type. Any residual bytes remain in the read implementation. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + /// + /// Use [`ReadXdR: Read_xdr_into_to_end`] when the intent is for all bytes + /// in the read implementation to be consumed by the read. + #[cfg(feature = "std")] + fn read_xdr_into(&mut self, r: &mut Limited) -> Result<()> { + *self = Self::read_xdr(r)?; + Ok(()) + } + + /// Read the XDR into the existing value, and consider it an error if the + /// read does not completely consume the read implementation. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_into_to_end(&mut self, r: &mut Limited) -> Result<()> { + Self::read_xdr_into(self, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(()) + } else { + Err(Error::Invalid) + } + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + /// + /// Read bytes from the given read implementation, decoding the bytes as + /// XDR, and construct the type implementing this interface from those + /// bytes. + /// + /// Just enough bytes are read from the read implementation to construct the + /// type, and then confirm that no further bytes remain. To confirm no + /// further bytes remain additional bytes are attempted to be read from the + /// read implementation. If it is possible to read any residual bytes from + /// the read implementation an error is returned. The read implementation + /// may not be exhaustively read if there are residual bytes, and it is + /// considered undefined how many residual bytes or how much of the residual + /// buffer are consumed in this case. + /// + /// All implementations should continue if the read implementation returns + /// [`ErrorKind::Interrupted`](std::io::ErrorKind::Interrupted). + #[cfg(feature = "std")] + fn read_xdr_iter(r: &mut Limited) -> ReadXdrIter<&mut R, Self> { + ReadXdrIter::new(&mut r.inner, r.limits.clone()) + } + + /// Create an iterator that reads the read implementation as a stream of + /// values that are read into the implementing type. + #[cfg(feature = "base64")] + fn read_xdr_base64_iter( + r: &mut Limited, + ) -> ReadXdrIter, Self> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + ReadXdrIter::new(dec, r.limits.clone()) + } + + /// Construct the type from the XDR bytes. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "std")] + fn from_xdr(bytes: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(&mut cursor)?; + Ok(t) + } + + /// Construct the type from the XDR bytes base64 encoded. + /// + /// An error is returned if the bytes are not completely consumed by the + /// deserialization. + #[cfg(feature = "base64")] + fn from_xdr_base64(b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new( + base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), + limits, + ); + let t = Self::read_xdr_to_end(&mut dec)?; + Ok(t) + } +} + +pub trait WriteXdr { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()>; + + #[cfg(feature = "std")] + fn to_xdr(&self, limits: Limits) -> Result> { + let mut cursor = Limited::new(Cursor::new(vec![]), limits); + self.write_xdr(&mut cursor)?; + let bytes = cursor.inner.into_inner(); + Ok(bytes) + } + + #[cfg(feature = "base64")] + fn to_xdr_base64(&self, limits: Limits) -> Result { + let mut enc = Limited::new( + base64::write::EncoderStringWriter::new(base64::STANDARD), + limits, + ); + self.write_xdr(&mut enc)?; + let b64 = enc.inner.into_inner(); + Ok(b64) + } +} + +/// `Pad_len` returns the number of bytes to pad an XDR value of the given +/// length to make the final serialized size a multiple of 4. +#[cfg(feature = "std")] +fn pad_len(len: usize) -> usize { + (4 - (len % 4)) % 4 +} + +impl ReadXdr for i32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u32 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 4]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u32::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u32 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 4] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for i64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(i64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for i64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for u64 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + let mut b = [0u8; 8]; + r.with_limited_depth(|r| { + r.consume_len(b.len())?; + r.read_exact(&mut b)?; + Ok(u64::from_be_bytes(b)) + }) + } +} + +impl WriteXdr for u64 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + let b: [u8; 8] = self.to_be_bytes(); + w.with_limited_depth(|w| { + w.consume_len(b.len())?; + Ok(w.write_all(&b)?) + }) + } +} + +impl ReadXdr for f32 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f32 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for f64 { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + todo!() + } +} + +impl WriteXdr for f64 { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + todo!() + } +} + +impl ReadXdr for bool { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + let b = i == 1; + Ok(b) + }) + } +} + +impl WriteXdr for bool { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i = u32::from(*self); // true = 1, false = 0 + i.write_xdr(w) + }) + } +} + +impl ReadXdr for Option { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = u32::read_xdr(r)?; + match i { + 0 => Ok(None), + 1 => { + let t = T::read_xdr(r)?; + Ok(Some(t)) + } + _ => Err(Error::Invalid), + } + }) + } +} + +impl WriteXdr for Option { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + if let Some(t) = self { + 1u32.write_xdr(w)?; + t.write_xdr(w)?; + } else { + 0u32.write_xdr(w)?; + } + Ok(()) + }) + } +} + +impl ReadXdr for Box { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| Ok(Box::new(T::read_xdr(r)?))) + } +} + +impl WriteXdr for Box { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| T::write_xdr(self, w)) + } +} + +impl ReadXdr for () { + #[cfg(feature = "std")] + fn read_xdr(_r: &mut Limited) -> Result { + Ok(()) + } +} + +impl WriteXdr for () { + #[cfg(feature = "std")] + fn write_xdr(&self, _w: &mut Limited) -> Result<()> { + Ok(()) + } +} + +impl ReadXdr for [u8; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + r.consume_len(N)?; + let padding = pad_len(N); + r.consume_len(padding)?; + let mut arr = [0u8; N]; + r.read_exact(&mut arr)?; + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + Ok(arr) + }) + } +} + +impl WriteXdr for [u8; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + w.consume_len(N)?; + let padding = pad_len(N); + w.consume_len(padding)?; + w.write_all(self)?; + w.write_all(&[0u8; 3][..padding])?; + Ok(()) + }) + } +} + +impl ReadXdr for [T; N] { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let mut vec = Vec::with_capacity(N); + for _ in 0..N { + let t = T::read_xdr(r)?; + vec.push(t); + } + let arr: [T; N] = vec.try_into().unwrap_or_else(|_: Vec| unreachable!()); + Ok(arr) + }) + } +} + +impl WriteXdr for [T; N] { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + for t in self { + t.write_xdr(w)?; + } + Ok(()) + }) + } +} + +// VecM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct VecM(Vec) +where + T: 'static; + +impl Deref for VecM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for VecM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + +impl VecM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl VecM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl VecM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl VecM { + #[must_use] + pub fn to_option(&self) -> Option { + if self.len() > 0 { + Some(self.0[0].clone()) + } else { + None + } + } +} + +#[cfg(not(feature = "alloc"))] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.to_option() + } +} + +#[cfg(feature = "alloc")] +impl VecM { + #[must_use] + pub fn into_option(mut self) -> Option { + self.0.drain(..).next() + } +} + +#[cfg(feature = "alloc")] +impl From> for Option { + #[must_use] + fn from(v: VecM) -> Self { + v.into_option() + } +} + +impl TryFrom> for VecM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: VecM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&VecM> for Vec { + #[must_use] + fn from(v: &VecM) -> Self { + v.0.clone() + } +} + +impl AsRef> for VecM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for VecM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T]> for VecM { + type Error = Error; + + fn try_from(v: &[T]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[T]> for VecM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[T] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[T; N]> for VecM { + type Error = Error; + + fn try_from(v: [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [T; N] { + type Error = VecM; + + fn try_from(v: VecM) -> core::result::Result { + let s: [T; N] = v.0.try_into().map_err(|v: Vec| VecM::(v))?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[T; N]> for VecM { + type Error = Error; + + fn try_from(v: &[T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [T; N]> for VecM { + type Error = Error; + + fn try_from(v: &'static [T; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for VecM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for VecM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: VecM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&VecM> for String { + type Error = Error; + + fn try_from(v: &VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for VecM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for VecM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(VecM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a VecM> for &'a str { + type Error = Error; + + fn try_from(v: &'a VecM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +impl ReadXdr for VecM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + let mut vec = Vec::new(); + for _ in 0..len { + let t = T::read_xdr(r)?; + vec.push(t); + } + + Ok(VecM(vec)) + }) + } +} + +impl WriteXdr for VecM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + for t in &self.0 { + t.write_xdr(w)?; + } + + Ok(()) + }) + } +} + +// BytesM ------------------------------------------------------------------------ + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct BytesM(Vec); + +impl core::fmt::Display for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in v { + write!(f, "{b:02x}")?; + } + Ok(()) + } +} + +impl core::fmt::Debug for BytesM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "BytesM(")?; + for b in v { + write!(f, "{b:02x}")?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for BytesM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() + } +} + +impl Deref for BytesM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl Default for BytesM { + fn default() -> Self { + Self(Vec::default()) + } +} + +impl BytesM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl BytesM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl BytesM { + #[cfg(feature = "alloc")] + pub fn to_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for BytesM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: BytesM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&BytesM> for Vec { + #[must_use] + fn from(v: &BytesM) -> Self { + v.0.clone() + } +} + +impl AsRef> for BytesM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for BytesM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for BytesM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = BytesM; + + fn try_from(v: BytesM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(BytesM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for BytesM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for BytesM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for BytesM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: BytesM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&BytesM> for String { + type Error = Error; + + fn try_from(v: &BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for BytesM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for BytesM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(BytesM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a BytesM> for &'a str { + type Error = Error; + + fn try_from(v: &'a BytesM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for BytesM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(BytesM(vec)) + }) + } +} + +impl WriteXdr for BytesM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..pad_len(len as usize)])?; + + Ok(()) + }) + } +} + +// StringM ------------------------------------------------------------------------ + +/// A string type that contains arbitrary bytes. +/// +/// Convertible, fallibly, to/from a Rust UTF-8 String using +/// [`TryFrom`]/[`TryInto`]/[`StringM::to_utf8_string`]. +/// +/// Convertible, lossyly, to a Rust UTF-8 String using +/// [`StringM::to_utf8_string_lossy`]. +/// +/// Convertible to/from escaped printable-ASCII using +/// [`Display`]/[`ToString`]/[`FromStr`]. + +#[cfg(feature = "alloc")] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr) +)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +#[cfg(not(feature = "alloc"))] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct StringM(Vec); + +impl core::fmt::Display for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + Ok(()) + } +} + +impl core::fmt::Debug for StringM { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + #[cfg(feature = "alloc")] + let v = &self.0; + #[cfg(not(feature = "alloc"))] + let v = self.0; + write!(f, "StringM(")?; + for b in escape_bytes::Escape::new(v) { + write!(f, "{}", b as char)?; + } + write!(f, ")")?; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +impl core::str::FromStr for StringM { + type Err = Error; + fn from_str(s: &str) -> core::result::Result { + let b = escape_bytes::unescape(s.as_bytes()).map_err(|_| Error::Invalid)?; + Ok(Self(b)) + } +} + +impl Deref for StringM { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for StringM { + fn default() -> Self { + Self(Vec::default()) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + +impl StringM { + pub const MAX_LEN: usize = { MAX as usize }; + + #[must_use] + #[allow(clippy::unused_self)] + pub fn max_len(&self) -> usize { + Self::MAX_LEN + } + + #[must_use] + pub fn as_vec(&self) -> &Vec { + self.as_ref() + } +} + +impl StringM { + #[must_use] + #[cfg(feature = "alloc")] + pub fn to_vec(&self) -> Vec { + self.into() + } + + #[must_use] + pub fn into_vec(self) -> Vec { + self.into() + } +} + +impl StringM { + #[cfg(feature = "alloc")] + pub fn to_utf8_string(&self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + pub fn into_utf8_string(self) -> Result { + self.try_into() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn to_utf8_string_lossy(&self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } + + #[cfg(feature = "alloc")] + #[must_use] + pub fn into_utf8_string_lossy(self) -> String { + String::from_utf8_lossy(&self.0).into_owned() + } +} + +impl TryFrom> for StringM { + type Error = Error; + + fn try_from(v: Vec) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl From> for Vec { + #[must_use] + fn from(v: StringM) -> Self { + v.0 + } +} + +#[cfg(feature = "alloc")] +impl From<&StringM> for Vec { + #[must_use] + fn from(v: &StringM) -> Self { + v.0.clone() + } +} + +impl AsRef> for StringM { + #[must_use] + fn as_ref(&self) -> &Vec { + &self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&Vec> for StringM { + type Error = Error; + + fn try_from(v: &Vec) -> Result { + v.as_slice().try_into() + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8]> for StringM { + type Error = Error; + + fn try_from(v: &[u8]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl AsRef<[u8]> for StringM { + #[cfg(feature = "alloc")] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } + #[cfg(not(feature = "alloc"))] + #[must_use] + fn as_ref(&self) -> &[u8] { + self.0 + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for [u8; N] { + type Error = StringM; + + fn try_from(v: StringM) -> core::result::Result { + let s: [u8; N] = v.0.try_into().map_err(StringM::)?; + Ok(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&[u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &[u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static [u8; N]> for StringM { + type Error = Error; + + fn try_from(v: &'static [u8; N]) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v)) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&String> for StringM { + type Error = Error; + + fn try_from(v: &String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes().to_vec())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom for StringM { + type Error = Error; + + fn try_from(v: String) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom> for String { + type Error = Error; + + fn try_from(v: StringM) -> Result { + Ok(String::from_utf8(v.0)?) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&StringM> for String { + type Error = Error; + + fn try_from(v: &StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?.to_owned()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<&str> for StringM { + type Error = Error; + + fn try_from(v: &str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.into())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +#[cfg(not(feature = "alloc"))] +impl TryFrom<&'static str> for StringM { + type Error = Error; + + fn try_from(v: &'static str) -> Result { + let len: u32 = v.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + if len <= MAX { + Ok(StringM(v.as_bytes())) + } else { + Err(Error::LengthExceedsMax) + } + } +} + +impl<'a, const MAX: u32> TryFrom<&'a StringM> for &'a str { + type Error = Error; + + fn try_from(v: &'a StringM) -> Result { + Ok(core::str::from_utf8(v.as_ref())?) + } +} + +impl ReadXdr for StringM { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let len: u32 = u32::read_xdr(r)?; + if len > MAX { + return Err(Error::LengthExceedsMax); + } + + r.consume_len(len as usize)?; + let padding = pad_len(len as usize); + r.consume_len(padding)?; + + let mut vec = vec![0u8; len as usize]; + r.read_exact(&mut vec)?; + + let pad = &mut [0u8; 3][..padding]; + r.read_exact(pad)?; + if pad.iter().any(|b| *b != 0) { + return Err(Error::NonZeroPadding); + } + + Ok(StringM(vec)) + }) + } +} + +impl WriteXdr for StringM { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let len: u32 = self.len().try_into().map_err(|_| Error::LengthExceedsMax)?; + len.write_xdr(w)?; + + w.consume_len(self.len())?; + let padding = pad_len(self.len()); + w.consume_len(padding)?; + + w.write_all(&self.0)?; + + w.write_all(&[0u8; 3][..padding])?; + + Ok(()) + }) + } +} + +// Frame ------------------------------------------------------------------------ + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") +)] +pub struct Frame(pub T) +where + T: ReadXdr; + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + +impl ReadXdr for Frame +where + T: ReadXdr, +{ + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + // Read the frame header value that contains 1 flag-bit and a 33-bit length. + // - The 1 flag bit is 0 when there are more frames for the same record. + // - The 31-bit length is the length of the bytes within the frame that + // follow the frame header. + let header = u32::read_xdr(r)?; + // TODO: Use the length and cap the length we'll read from `r`. + let last_record = header >> 31 == 1; + if last_record { + // Read the record in the frame. + Ok(Self(T::read_xdr(r)?)) + } else { + // TODO: Support reading those additional frames for the same + // record. + Err(Error::Unsupported) + } + } +} + +#[cfg(all(test, feature = "std"))] +mod tests { + use std::io::Cursor; + + use super::*; + + #[test] + pub fn vec_u8_read_without_padding() { + let buf = Cursor::new(vec![0, 0, 0, 4, 2, 2, 2, 2]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_read_with_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0, 0]); + let v = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v.to_vec(), vec![2]); + } + + #[test] + pub fn vec_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn vec_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![0, 0, 0, 1, 2, 3, 0, 0]); + let res = VecM::::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn vec_u8_write_without_padding() { + let mut buf = vec![]; + let v: VecM = vec![2, 2, 2, 2].try_into().unwrap(); + + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 4, 2, 2, 2, 2]); + } + + #[test] + pub fn vec_u8_write_with_padding() { + let mut buf = vec![]; + let v: VecM = vec![2].try_into().unwrap(); + v.write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![0, 0, 0, 1, 2, 0, 0, 0]); + } + + #[test] + pub fn arr_u8_read_without_padding() { + let buf = Cursor::new(vec![2, 2, 2, 2]); + let v = <[u8; 4]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_read_with_padding() { + let buf = Cursor::new(vec![2, 0, 0, 0]); + let v = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())).unwrap(); + assert_eq!(v, [2]); + } + + #[test] + pub fn arr_u8_read_with_insufficient_padding() { + let buf = Cursor::new(vec![2, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::Io(_)) => (), + _ => panic!("expected IO error got {res:?}"), + } + } + + #[test] + pub fn arr_u8_read_with_non_zero_padding() { + let buf = Cursor::new(vec![2, 3, 0, 0]); + let res = <[u8; 1]>::read_xdr(&mut Limited::new(buf, Limits::none())); + match res { + Err(Error::NonZeroPadding) => (), + _ => panic!("expected NonZeroPadding got {res:?}"), + } + } + + #[test] + pub fn arr_u8_write_without_padding() { + let mut buf = vec![]; + [2u8, 2, 2, 2] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 2, 2, 2]); + } + + #[test] + pub fn arr_u8_write_with_padding() { + let mut buf = vec![]; + [2u8] + .write_xdr(&mut Limited::new(Cursor::new(&mut buf), Limits::none())) + .unwrap(); + assert_eq!(buf, vec![2, 0, 0, 0]); + } +} + +#[cfg(all(test, feature = "std"))] +mod test { + use super::*; + + #[test] + fn into_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.into_option(), None); + } + + #[test] + fn into_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.into_option(), Some(1)); + } + + #[test] + fn to_option_none() { + let v: VecM = vec![].try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = vec![1].try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } + + #[test] + fn depth_limited_read_write_under_the_limit_success() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(4)); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::depth(4)); + let a_back: Option>> = ReadXdr::read_xdr(&mut dlr).unwrap(); + assert_eq!(a, a_back); + } + + #[test] + fn write_over_depth_limit_fail() { + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), Limits::depth(3)); + let res = a.write_xdr(&mut buf); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn read_over_depth_limit_fail() { + let read_limits = Limits::depth(3); + let write_limits = Limits::depth(5); + let a: Option>> = Some(Some(Some(5))); + let mut buf = Limited::new(Vec::new(), write_limits); + a.write_xdr(&mut buf).unwrap(); + + let mut dlr = Limited::new(Cursor::new(buf.inner.as_slice()), read_limits); + let res: Result>>> = ReadXdr::read_xdr(&mut dlr); + match res { + Err(Error::DepthLimitExceeded) => (), + _ => panic!("expected DepthLimitExceeded got {res:?}"), + } + } + + #[test] + fn length_limited_read_write_i32() { + // Exact limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: i32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u32() { + // Exact limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: u32 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u32; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_i64() { + // Exact limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: i64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123i64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_u64() { + // Exact limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: u64 = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = 123u64; + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bool() { + // Exact limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: bool = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = true; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + ::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_option() { + // Exact limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: Option = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = Some(true); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_u8() { + // Exact limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(4)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(5)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(5)); + let v_back: [u8; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(3)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [1u8, 2, 3]; + let mut buf = Limited::new(Vec::new(), Limits::len(4)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(3)); + assert_eq!( + <[u8; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_array_type() { + // Exact limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(12)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(13)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(13)); + let v_back: [bool; 3] = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(11)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = [true, false, true]; + let mut buf = Limited::new(Vec::new(), Limits::len(12)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(11)); + assert_eq!( + <[bool; 3] as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_vec() { + // Exact limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(16)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(17)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(17)); + let v_back: VecM = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(15)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = VecM::::try_from([1i32, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(16)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(15)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_bytes() { + // Exact limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: BytesM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = BytesM::<3>::try_from([1u8, 2, 3]).unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } + + #[test] + fn length_limited_read_write_string() { + // Exact limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(8)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 0); + assert_eq!(v, v_back); + + // Over limit, success + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(9)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 1); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(9)); + let v_back: StringM<3> = ReadXdr::read_xdr(&mut lr).unwrap(); + assert_eq!(buf.limits.len, 1); + assert_eq!(v, v_back); + + // Write under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(7)); + assert_eq!(v.write_xdr(&mut buf), Err(Error::LengthLimitExceeded)); + + // Read under limit, failure + let v = StringM::<3>::try_from("123").unwrap(); + let mut buf = Limited::new(Vec::new(), Limits::len(8)); + v.write_xdr(&mut buf).unwrap(); + assert_eq!(buf.limits.len, 0); + let mut lr = Limited::new(Cursor::new(buf.inner.as_slice()), Limits::len(7)); + assert_eq!( + as ReadXdr>::read_xdr(&mut lr), + Err(Error::LengthLimitExceeded) + ); + } +} + +#[cfg(all(test, not(feature = "alloc")))] +mod test { + use super::VecM; + + #[test] + fn to_option_none() { + let v: VecM = (&[]).try_into().unwrap(); + assert_eq!(v.to_option(), None); + } + + #[test] + fn to_option_some() { + let v: VecM<_, 1> = (&[1]).try_into().unwrap(); + assert_eq!(v.to_option(), Some(1)); + } +} + +/// SError is an XDR Typedef defines as: +/// +/// ```text +/// typedef int Error; +/// ``` +/// +pub type SError = i32; + +/// Multi is an XDR Typedef defines as: +/// +/// ```text +/// typedef int Multi; +/// ``` +/// +pub type Multi = i32; + +/// UnionKey is an XDR Enum defines as: +/// +/// ```text +/// enum UnionKey { +/// ERROR, +/// MULTI +/// }; +/// ``` +/// +// enum +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[repr(i32)] +pub enum UnionKey { + Error = 0, + Multi = 1, +} + + impl UnionKey { + pub const VARIANTS: [UnionKey; 2] = [ UnionKey::Error, +UnionKey::Multi, ]; + pub const VARIANTS_STR: [&'static str; 2] = [ "Error", +"Multi", ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::Error => "Error", +Self::Multi => "Multi", + } + } + + #[must_use] + pub const fn variants() -> [UnionKey; 2] { + Self::VARIANTS + } + } + + impl Name for UnionKey { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for UnionKey { + fn variants() -> slice::Iter<'static, UnionKey> { + Self::VARIANTS.iter() + } + } + + impl Enum for UnionKey {} + + impl fmt::Display for UnionKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.name()) + } + } + + impl TryFrom for UnionKey { + type Error = Error; + + fn try_from(i: i32) -> Result { + let e = match i { + 0 => UnionKey::Error, +1 => UnionKey::Multi, + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(e) + } + } + + impl From for i32 { + #[must_use] + fn from(e: UnionKey) -> Self { + e as Self + } + } + + impl ReadXdr for UnionKey { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let e = i32::read_xdr(r)?; + let v: Self = e.try_into()?; + Ok(v) + }) + } + } + + impl WriteXdr for UnionKey { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + let i: i32 = (*self).into(); + i.write_xdr(w) + }) + } + } + +/// MyUnion is an XDR Union defines as: +/// +/// ```text +/// union MyUnion switch (UnionKey type) +/// { +/// case ERROR: +/// Error error; +/// case MULTI: +/// Multi things<>; +/// +/// +/// }; +/// ``` +/// +// union with discriminant UnionKey +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[allow(clippy::large_enum_variant)] +pub enum MyUnion { + Error(i32), + Multi(VecM::), +} + + impl MyUnion { + pub const VARIANTS: [UnionKey; 2] = [ + UnionKey::Error, +UnionKey::Multi, + ]; + pub const VARIANTS_STR: [&'static str; 2] = [ + "Error", +"Multi", + ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::Error(_) => "Error", +Self::Multi(_) => "Multi", + } + } + + #[must_use] + pub const fn discriminant(&self) -> UnionKey { + #[allow(clippy::match_same_arms)] + match self { + Self::Error(_) => UnionKey::Error, +Self::Multi(_) => UnionKey::Multi, + } + } + + #[must_use] + pub const fn variants() -> [UnionKey; 2] { + Self::VARIANTS + } + } + + impl Name for MyUnion { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Discriminant for MyUnion { + #[must_use] + fn discriminant(&self) -> UnionKey { + Self::discriminant(self) + } + } + + impl Variants for MyUnion { + fn variants() -> slice::Iter<'static, UnionKey> { + Self::VARIANTS.iter() + } + } + + impl Union for MyUnion {} + + impl ReadXdr for MyUnion { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let dv: UnionKey = ::read_xdr(r)?; + #[allow(clippy::match_same_arms, clippy::match_wildcard_for_single_variants)] + let v = match dv { + UnionKey::Error => Self::Error(i32::read_xdr(r)?), +UnionKey::Multi => Self::Multi(VecM::::read_xdr(r)?), + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(v) + }) + } + } + + impl WriteXdr for MyUnion { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.discriminant().write_xdr(w)?; + #[allow(clippy::match_same_arms)] + match self { + Self::Error(v) => v.write_xdr(w)?, +Self::Multi(v) => v.write_xdr(w)?, + }; + Ok(()) + }) + } + } + +/// IntUnion is an XDR Union defines as: +/// +/// ```text +/// union IntUnion switch (int type) +/// { +/// case 0: +/// Error error; +/// case 1: +/// Multi things<>; +/// +/// }; +/// ``` +/// +// union with discriminant i32 +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[allow(clippy::large_enum_variant)] +pub enum IntUnion { + V0(i32), + V1(VecM::), +} + + impl IntUnion { + pub const VARIANTS: [i32; 2] = [ + 0, +1, + ]; + pub const VARIANTS_STR: [&'static str; 2] = [ + "V0", +"V1", + ]; + + #[must_use] + pub const fn name(&self) -> &'static str { + match self { + Self::V0(_) => "V0", +Self::V1(_) => "V1", + } + } + + #[must_use] + pub const fn discriminant(&self) -> i32 { + #[allow(clippy::match_same_arms)] + match self { + Self::V0(_) => 0, +Self::V1(_) => 1, + } + } + + #[must_use] + pub const fn variants() -> [i32; 2] { + Self::VARIANTS + } + } + + impl Name for IntUnion { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Discriminant for IntUnion { + #[must_use] + fn discriminant(&self) -> i32 { + Self::discriminant(self) + } + } + + impl Variants for IntUnion { + fn variants() -> slice::Iter<'static, i32> { + Self::VARIANTS.iter() + } + } + + impl Union for IntUnion {} + + impl ReadXdr for IntUnion { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let dv: i32 = ::read_xdr(r)?; + #[allow(clippy::match_same_arms, clippy::match_wildcard_for_single_variants)] + let v = match dv { + 0 => Self::V0(i32::read_xdr(r)?), +1 => Self::V1(VecM::::read_xdr(r)?), + #[allow(unreachable_patterns)] + _ => return Err(Error::Invalid), + }; + Ok(v) + }) + } + } + + impl WriteXdr for IntUnion { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w| { + self.discriminant().write_xdr(w)?; + #[allow(clippy::match_same_arms)] + match self { + Self::V0(v) => v.write_xdr(w)?, +Self::V1(v) => v.write_xdr(w)?, + }; + Ok(()) + }) + } + } + +/// IntUnion2 is an XDR Typedef defines as: +/// +/// ```text +/// typedef IntUnion IntUnion2; +/// ``` +/// +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +#[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug)] +pub struct IntUnion2(pub IntUnion); + +impl From for IntUnion { + #[must_use] + fn from(x: IntUnion2) -> Self { + x.0 + } +} + +impl From for IntUnion2 { + #[must_use] + fn from(x: IntUnion) -> Self { + IntUnion2(x) + } +} + +impl AsRef for IntUnion2 { + #[must_use] + fn as_ref(&self) -> &IntUnion { + &self.0 + } +} + +impl ReadXdr for IntUnion2 { + #[cfg(feature = "std")] + fn read_xdr(r: &mut Limited) -> Result { + r.with_limited_depth(|r| { + let i = IntUnion::read_xdr(r)?; + let v = IntUnion2(i); + Ok(v) + }) + } +} + +impl WriteXdr for IntUnion2 { + #[cfg(feature = "std")] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + w.with_limited_depth(|w|{ self.0.write_xdr(w) }) + } +} + + #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case") + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum TypeVariant { + SError, +Multi, +UnionKey, +MyUnion, +IntUnion, +IntUnion2, + } + + impl TypeVariant { + pub const VARIANTS: [TypeVariant; 6] = [ TypeVariant::SError, +TypeVariant::Multi, +TypeVariant::UnionKey, +TypeVariant::MyUnion, +TypeVariant::IntUnion, +TypeVariant::IntUnion2, ]; + pub const VARIANTS_STR: [&'static str; 6] = [ "SError", +"Multi", +"UnionKey", +"MyUnion", +"IntUnion", +"IntUnion2", ]; + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::SError => "SError", +Self::Multi => "Multi", +Self::UnionKey => "UnionKey", +Self::MyUnion => "MyUnion", +Self::IntUnion => "IntUnion", +Self::IntUnion2 => "IntUnion2", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 6] { + Self::VARIANTS + } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::SError => gen.into_root_schema_for::(), +Self::Multi => gen.into_root_schema_for::(), +Self::UnionKey => gen.into_root_schema_for::(), +Self::MyUnion => gen.into_root_schema_for::(), +Self::IntUnion => gen.into_root_schema_for::(), +Self::IntUnion2 => gen.into_root_schema_for::(), + } + } + } + + impl Name for TypeVariant { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for TypeVariant { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl core::str::FromStr for TypeVariant { + type Err = Error; + #[allow(clippy::too_many_lines)] + fn from_str(s: &str) -> Result { + match s { + "SError" => Ok(Self::SError), +"Multi" => Ok(Self::Multi), +"UnionKey" => Ok(Self::UnionKey), +"MyUnion" => Ok(Self::MyUnion), +"IntUnion" => Ok(Self::IntUnion), +"IntUnion2" => Ok(Self::IntUnion2), + _ => Err(Error::Invalid), + } + } + } + + #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] + #[cfg_attr( + all(feature = "serde", feature = "alloc"), + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "snake_case"), + serde(untagged), + )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] + pub enum Type { + SError(Box), +Multi(Box), +UnionKey(Box), +MyUnion(Box), +IntUnion(Box), +IntUnion2(Box), + } + + impl Type { + pub const VARIANTS: [TypeVariant; 6] = [ TypeVariant::SError, +TypeVariant::Multi, +TypeVariant::UnionKey, +TypeVariant::MyUnion, +TypeVariant::IntUnion, +TypeVariant::IntUnion2, ]; + pub const VARIANTS_STR: [&'static str; 6] = [ "SError", +"Multi", +"UnionKey", +"MyUnion", +"IntUnion", +"IntUnion2", ]; + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr(v: TypeVariant, r: &mut Limited) -> Result { + match v { + TypeVariant::SError => r.with_limited_depth(|r| Ok(Self::SError(Box::new(SError::read_xdr(r)?)))), +TypeVariant::Multi => r.with_limited_depth(|r| Ok(Self::Multi(Box::new(Multi::read_xdr(r)?)))), +TypeVariant::UnionKey => r.with_limited_depth(|r| Ok(Self::UnionKey(Box::new(UnionKey::read_xdr(r)?)))), +TypeVariant::MyUnion => r.with_limited_depth(|r| Ok(Self::MyUnion(Box::new(MyUnion::read_xdr(r)?)))), +TypeVariant::IntUnion => r.with_limited_depth(|r| Ok(Self::IntUnion(Box::new(IntUnion::read_xdr(r)?)))), +TypeVariant::IntUnion2 => r.with_limited_depth(|r| Ok(Self::IntUnion2(Box::new(IntUnion2::read_xdr(r)?)))), + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + pub fn read_xdr_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let s = Self::read_xdr(v, r)?; + // Check that any further reads, such as this read of one byte, read no + // data, indicating EOF. If a byte is read the data is invalid. + if r.read(&mut [0u8; 1])? == 0 { + Ok(s) + } else { + Err(Error::Invalid) + } + } + + #[cfg(feature = "base64")] + pub fn read_xdr_base64_to_end(v: TypeVariant, r: &mut Limited) -> Result { + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD), r.limits.clone()); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::SError => Box::new(ReadXdrIter::<_, SError>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::SError(Box::new(t))))), +TypeVariant::Multi => Box::new(ReadXdrIter::<_, Multi>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Multi(Box::new(t))))), +TypeVariant::UnionKey => Box::new(ReadXdrIter::<_, UnionKey>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::UnionKey(Box::new(t))))), +TypeVariant::MyUnion => Box::new(ReadXdrIter::<_, MyUnion>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyUnion(Box::new(t))))), +TypeVariant::IntUnion => Box::new(ReadXdrIter::<_, IntUnion>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::IntUnion(Box::new(t))))), +TypeVariant::IntUnion2 => Box::new(ReadXdrIter::<_, IntUnion2>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::IntUnion2(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_framed_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + match v { + TypeVariant::SError => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::SError(Box::new(t.0))))), +TypeVariant::Multi => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::Multi(Box::new(t.0))))), +TypeVariant::UnionKey => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::UnionKey(Box::new(t.0))))), +TypeVariant::MyUnion => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::MyUnion(Box::new(t.0))))), +TypeVariant::IntUnion => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::IntUnion(Box::new(t.0))))), +TypeVariant::IntUnion2 => Box::new(ReadXdrIter::<_, Frame>::new(&mut r.inner, r.limits.clone()).map(|r| r.map(|t| Self::IntUnion2(Box::new(t.0))))), + } + } + + #[cfg(feature = "base64")] + #[allow(clippy::too_many_lines)] + pub fn read_xdr_base64_iter(v: TypeVariant, r: &mut Limited) -> Box> + '_> { + let dec = base64::read::DecoderReader::new(&mut r.inner, base64::STANDARD); + match v { + TypeVariant::SError => Box::new(ReadXdrIter::<_, SError>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::SError(Box::new(t))))), +TypeVariant::Multi => Box::new(ReadXdrIter::<_, Multi>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::Multi(Box::new(t))))), +TypeVariant::UnionKey => Box::new(ReadXdrIter::<_, UnionKey>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::UnionKey(Box::new(t))))), +TypeVariant::MyUnion => Box::new(ReadXdrIter::<_, MyUnion>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::MyUnion(Box::new(t))))), +TypeVariant::IntUnion => Box::new(ReadXdrIter::<_, IntUnion>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::IntUnion(Box::new(t))))), +TypeVariant::IntUnion2 => Box::new(ReadXdrIter::<_, IntUnion2>::new(dec, r.limits.clone()).map(|r| r.map(|t| Self::IntUnion2(Box::new(t))))), + } + } + + #[cfg(feature = "std")] + pub fn from_xdr>(v: TypeVariant, bytes: B, limits: Limits) -> Result { + let mut cursor = Limited::new(Cursor::new(bytes.as_ref()), limits); + let t = Self::read_xdr_to_end(v, &mut cursor)?; + Ok(t) + } + + #[cfg(feature = "base64")] + pub fn from_xdr_base64(v: TypeVariant, b64: impl AsRef<[u8]>, limits: Limits) -> Result { + let mut b64_reader = Cursor::new(b64); + let mut dec = Limited::new(base64::read::DecoderReader::new(&mut b64_reader, base64::STANDARD), limits); + let t = Self::read_xdr_to_end(v, &mut dec)?; + Ok(t) + } + + #[cfg(all(feature = "std", feature = "serde_json"))] + #[allow(clippy::too_many_lines)] + pub fn read_json(v: TypeVariant, r: impl Read) -> Result { + match v { + TypeVariant::SError => Ok(Self::SError(Box::new(serde_json::from_reader(r)?))), +TypeVariant::Multi => Ok(Self::Multi(Box::new(serde_json::from_reader(r)?))), +TypeVariant::UnionKey => Ok(Self::UnionKey(Box::new(serde_json::from_reader(r)?))), +TypeVariant::MyUnion => Ok(Self::MyUnion(Box::new(serde_json::from_reader(r)?))), +TypeVariant::IntUnion => Ok(Self::IntUnion(Box::new(serde_json::from_reader(r)?))), +TypeVariant::IntUnion2 => Ok(Self::IntUnion2(Box::new(serde_json::from_reader(r)?))), + } + } + + #[cfg(feature = "alloc")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn value(&self) -> &dyn core::any::Any { + #[allow(clippy::match_same_arms)] + match self { + Self::SError(ref v) => v.as_ref(), +Self::Multi(ref v) => v.as_ref(), +Self::UnionKey(ref v) => v.as_ref(), +Self::MyUnion(ref v) => v.as_ref(), +Self::IntUnion(ref v) => v.as_ref(), +Self::IntUnion2(ref v) => v.as_ref(), + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn name(&self) -> &'static str { + match self { + Self::SError(_) => "SError", +Self::Multi(_) => "Multi", +Self::UnionKey(_) => "UnionKey", +Self::MyUnion(_) => "MyUnion", +Self::IntUnion(_) => "IntUnion", +Self::IntUnion2(_) => "IntUnion2", + } + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variants() -> [TypeVariant; 6] { + Self::VARIANTS + } + + #[must_use] + #[allow(clippy::too_many_lines)] + pub const fn variant(&self) -> TypeVariant { + match self { + Self::SError(_) => TypeVariant::SError, +Self::Multi(_) => TypeVariant::Multi, +Self::UnionKey(_) => TypeVariant::UnionKey, +Self::MyUnion(_) => TypeVariant::MyUnion, +Self::IntUnion(_) => TypeVariant::IntUnion, +Self::IntUnion2(_) => TypeVariant::IntUnion2, + } + } + } + + impl Name for Type { + #[must_use] + fn name(&self) -> &'static str { + Self::name(self) + } + } + + impl Variants for Type { + fn variants() -> slice::Iter<'static, TypeVariant> { + Self::VARIANTS.iter() + } + } + + impl WriteXdr for Type { + #[cfg(feature = "std")] + #[allow(clippy::too_many_lines)] + fn write_xdr(&self, w: &mut Limited) -> Result<()> { + match self { + Self::SError(v) => v.write_xdr(w), +Self::Multi(v) => v.write_xdr(w), +Self::UnionKey(v) => v.write_xdr(w), +Self::MyUnion(v) => v.write_xdr(w), +Self::IntUnion(v) => v.write_xdr(w), +Self::IntUnion2(v) => v.write_xdr(w), + } + } + } diff --git a/spec/output/generator_spec_rust_custom_str_impls/block_comments.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_str_impls/block_comments.x/MyXDR.rs index 70fa480fb..c7f9cf95b 100644 --- a/spec/output/generator_spec_rust_custom_str_impls/block_comments.x/MyXDR.rs +++ b/spec/output/generator_spec_rust_custom_str_impls/block_comments.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2699,6 +2791,7 @@ mod test { #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum AccountFlags { AuthRequiredFlag = 1, @@ -2789,6 +2882,7 @@ impl WriteXdr for AccountFlags { derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { AccountFlags, } @@ -2810,6 +2904,15 @@ impl TypeVariant { pub const fn variants() -> [TypeVariant; 1] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::AccountFlags => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -2843,6 +2946,7 @@ impl core::str::FromStr for TypeVariant { serde(rename_all = "snake_case"), serde(untagged), )] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { AccountFlags(Box), } diff --git a/spec/output/generator_spec_rust_custom_str_impls/const.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_str_impls/const.x/MyXDR.rs index 459077778..80670d3ce 100644 --- a/spec/output/generator_spec_rust_custom_str_impls/const.x/MyXDR.rs +++ b/spec/output/generator_spec_rust_custom_str_impls/const.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2716,6 +2808,7 @@ pub type TestArray2 = VecM::; derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { TestArray, TestArray2, @@ -2741,6 +2834,16 @@ Self::TestArray2 => "TestArray2", pub const fn variants() -> [TypeVariant; 2] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::TestArray => gen.into_root_schema_for::(), +Self::TestArray2 => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -2775,6 +2878,7 @@ Self::TestArray2 => "TestArray2", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { TestArray(Box), TestArray2(Box), diff --git a/spec/output/generator_spec_rust_custom_str_impls/enum.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_str_impls/enum.x/MyXDR.rs index 3e9b8dd70..650ffec70 100644 --- a/spec/output/generator_spec_rust_custom_str_impls/enum.x/MyXDR.rs +++ b/spec/output/generator_spec_rust_custom_str_impls/enum.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2718,6 +2810,7 @@ mod test { #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum MessageType { ErrorMsg = 0, @@ -2881,6 +2974,7 @@ Self::FbaMessage => "FbaMessage", #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum Color { Red = 0, @@ -2989,6 +3083,7 @@ Self::Blue => "Blue", #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum Color2 { Red2 = 0, @@ -3089,6 +3184,7 @@ Self::Blue2 => "Blue2", derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { MessageType, Color, @@ -3118,6 +3214,17 @@ Self::Color2 => "Color2", pub const fn variants() -> [TypeVariant; 3] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::MessageType => gen.into_root_schema_for::(), +Self::Color => gen.into_root_schema_for::(), +Self::Color2 => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -3153,6 +3260,7 @@ Self::Color2 => "Color2", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { MessageType(Box), Color(Box), diff --git a/spec/output/generator_spec_rust_custom_str_impls/nesting.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_str_impls/nesting.x/MyXDR.rs index 3b9486e32..cf7a2d6a0 100644 --- a/spec/output/generator_spec_rust_custom_str_impls/nesting.x/MyXDR.rs +++ b/spec/output/generator_spec_rust_custom_str_impls/nesting.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2700,6 +2792,7 @@ mod test { #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum UnionKey { One = 1, @@ -2813,6 +2906,7 @@ pub type Foo = i32; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct MyUnionOne { pub some_int: i32, } @@ -2850,6 +2944,7 @@ impl WriteXdr for MyUnionOne { #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct MyUnionTwo { pub some_int: i32, pub foo: i32, @@ -2903,6 +2998,7 @@ self.foo.write_xdr(w)?; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[allow(clippy::large_enum_variant)] pub enum MyUnion { One(MyUnionOne), @@ -3009,6 +3105,7 @@ Self::Offer => ().write_xdr(w)?, derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { UnionKey, Foo, @@ -3046,6 +3143,19 @@ Self::MyUnionTwo => "MyUnionTwo", pub const fn variants() -> [TypeVariant; 5] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::UnionKey => gen.into_root_schema_for::(), +Self::Foo => gen.into_root_schema_for::(), +Self::MyUnion => gen.into_root_schema_for::(), +Self::MyUnionOne => gen.into_root_schema_for::(), +Self::MyUnionTwo => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -3083,6 +3193,7 @@ Self::MyUnionTwo => "MyUnionTwo", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { UnionKey(Box), Foo(Box), diff --git a/spec/output/generator_spec_rust_custom_str_impls/optional.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_str_impls/optional.x/MyXDR.rs index 735992c52..4def34ed7 100644 --- a/spec/output/generator_spec_rust_custom_str_impls/optional.x/MyXDR.rs +++ b/spec/output/generator_spec_rust_custom_str_impls/optional.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2708,6 +2800,7 @@ pub type Arr = [i32; 2]; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct HasOptions { pub first_option: Option, pub second_option: Option, @@ -2745,6 +2838,7 @@ self.third_option.write_xdr(w)?; derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { Arr, HasOptions, @@ -2770,6 +2864,16 @@ Self::HasOptions => "HasOptions", pub const fn variants() -> [TypeVariant; 2] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::Arr => gen.into_root_schema_for::(), +Self::HasOptions => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -2804,6 +2908,7 @@ Self::HasOptions => "HasOptions", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { Arr(Box), HasOptions(Box), diff --git a/spec/output/generator_spec_rust_custom_str_impls/struct.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_str_impls/struct.x/MyXDR.rs index ace8f79b3..6278822dd 100644 --- a/spec/output/generator_spec_rust_custom_str_impls/struct.x/MyXDR.rs +++ b/spec/output/generator_spec_rust_custom_str_impls/struct.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2710,6 +2802,7 @@ pub type Int64 = i64; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct MyStruct { pub some_int: i32, pub a_big_int: i64, @@ -2753,6 +2846,7 @@ self.max_string.write_xdr(w)?; derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { Int64, MyStruct, @@ -2778,6 +2872,16 @@ Self::MyStruct => "MyStruct", pub const fn variants() -> [TypeVariant; 2] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::Int64 => gen.into_root_schema_for::(), +Self::MyStruct => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -2812,6 +2916,7 @@ Self::MyStruct => "MyStruct", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { Int64(Box), MyStruct(Box), diff --git a/spec/output/generator_spec_rust_custom_str_impls/test.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_str_impls/test.x/MyXDR.rs index a4b4224a9..1f8261303 100644 --- a/spec/output/generator_spec_rust_custom_str_impls/test.x/MyXDR.rs +++ b/spec/output/generator_spec_rust_custom_str_impls/test.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2725,6 +2817,39 @@ impl core::str::FromStr for Uint512 { hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Uint512 { + fn schema_name() -> String { + "Uint512".to_string() + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: 64_u32.checked_mul(2).map(Some).unwrap_or_default(), + min_length: 64_u32.checked_mul(2).map(Some).unwrap_or_default(), + ..string + })); + schema.into() + } else { + schema + } + } +} impl From for [u8; 64] { #[must_use] fn from(x: Uint512) -> Self { @@ -2811,6 +2936,7 @@ impl AsRef<[u8]> for Uint512 { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Uint513(pub BytesM::<64>); @@ -2912,6 +3038,7 @@ impl AsRef<[u8]> for Uint513 { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Uint514(pub BytesM); @@ -3013,6 +3140,7 @@ impl AsRef<[u8]> for Uint514 { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Str(pub StringM::<64>); @@ -3114,6 +3242,7 @@ impl AsRef<[u8]> for Str { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Str2(pub StringM); @@ -3244,6 +3373,39 @@ impl core::str::FromStr for Hash { hex::decode(s).map_err(|_| Error::InvalidHex)?.try_into() } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Hash { + fn schema_name() -> String { + "Hash".to_string() + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: 32_u32.checked_mul(2).map(Some).unwrap_or_default(), + min_length: 32_u32.checked_mul(2).map(Some).unwrap_or_default(), + ..string + })); + schema.into() + } else { + schema + } + } +} impl From for [u8; 32] { #[must_use] fn from(x: Hash) -> Self { @@ -3329,6 +3491,7 @@ impl AsRef<[u8]> for Hash { #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Hashes1(pub [Hash; 12]); @@ -3418,6 +3581,7 @@ impl AsRef<[Hash]> for Hashes1 { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Hashes2(pub VecM::); @@ -3519,6 +3683,7 @@ impl AsRef<[Hash]> for Hashes2 { #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Default)] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct Hashes3(pub VecM::); @@ -3619,6 +3784,7 @@ impl AsRef<[Hash]> for Hashes3 { #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct OptHash1(pub Option); @@ -3670,6 +3836,7 @@ impl WriteXdr for OptHash1 { #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct OptHash2(pub Option); @@ -3762,6 +3929,7 @@ pub type Int4 = u64; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct MyStruct { pub field1: Uint512, pub field2: OptHash1, @@ -3817,6 +3985,7 @@ self.field7.write_xdr(w)?; #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct LotsOfMyStructs { pub members: VecM::, } @@ -3854,6 +4023,7 @@ impl WriteXdr for LotsOfMyStructs { #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct HasStuff { pub data: LotsOfMyStructs, } @@ -3893,6 +4063,7 @@ impl WriteXdr for HasStuff { #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum Color { Red = 0, @@ -4016,6 +4187,7 @@ pub const BAR: u64 = FOO; #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum NesterNestedEnum { 1 = 0, @@ -4116,6 +4288,7 @@ Self::2 => "2", #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct NesterNestedStruct { pub blah: i32, } @@ -4156,6 +4329,7 @@ impl WriteXdr for NesterNestedStruct { #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[allow(clippy::large_enum_variant)] pub enum NesterNestedUnion { Red, @@ -4270,6 +4444,7 @@ impl WriteXdr for NesterNestedUnion { #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct Nester { pub nested_enum: NesterNestedEnum, pub nested_struct: NesterNestedStruct, @@ -4307,6 +4482,7 @@ self.nested_union.write_xdr(w)?; derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { Uint512, Uint513, @@ -4416,6 +4592,37 @@ Self::NesterNestedUnion => "NesterNestedUnion", pub const fn variants() -> [TypeVariant; 23] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::Uint512 => gen.into_root_schema_for::(), +Self::Uint513 => gen.into_root_schema_for::(), +Self::Uint514 => gen.into_root_schema_for::(), +Self::Str => gen.into_root_schema_for::(), +Self::Str2 => gen.into_root_schema_for::(), +Self::Hash => gen.into_root_schema_for::(), +Self::Hashes1 => gen.into_root_schema_for::(), +Self::Hashes2 => gen.into_root_schema_for::(), +Self::Hashes3 => gen.into_root_schema_for::(), +Self::OptHash1 => gen.into_root_schema_for::(), +Self::OptHash2 => gen.into_root_schema_for::(), +Self::Int1 => gen.into_root_schema_for::(), +Self::Int2 => gen.into_root_schema_for::(), +Self::Int3 => gen.into_root_schema_for::(), +Self::Int4 => gen.into_root_schema_for::(), +Self::MyStruct => gen.into_root_schema_for::(), +Self::LotsOfMyStructs => gen.into_root_schema_for::(), +Self::HasStuff => gen.into_root_schema_for::(), +Self::Color => gen.into_root_schema_for::(), +Self::Nester => gen.into_root_schema_for::(), +Self::NesterNestedEnum => gen.into_root_schema_for::(), +Self::NesterNestedStruct => gen.into_root_schema_for::(), +Self::NesterNestedUnion => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -4471,6 +4678,7 @@ Self::NesterNestedUnion => "NesterNestedUnion", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { Uint512(Box), Uint513(Box), diff --git a/spec/output/generator_spec_rust_custom_str_impls/union.x/MyXDR.rs b/spec/output/generator_spec_rust_custom_str_impls/union.x/MyXDR.rs index 54787a07d..3f8552c43 100644 --- a/spec/output/generator_spec_rust_custom_str_impls/union.x/MyXDR.rs +++ b/spec/output/generator_spec_rust_custom_str_impls/union.x/MyXDR.rs @@ -898,6 +898,32 @@ impl Default for VecM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for VecM { + fn schema_name() -> String { + format!("VecM<{}, {}>", T::schema_name(), MAX) + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = Vec::::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + if let Some(array) = schema.array.clone() { + schema.array = Some(Box::new(schemars::schema::ArrayValidation { + max_items: Some(MAX), + ..*array + })); + } + schema.into() + } else { + schema + } + } +} + impl VecM { pub const MAX_LEN: usize = { MAX as usize }; @@ -1333,6 +1359,40 @@ impl Deref for BytesM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BytesM { + fn schema_name() -> String { + format!("BytesM<{MAX}>") + } + + fn is_referenceable() -> bool { + false + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + schema.extensions.insert( + "contentEncoding".to_owned(), + serde_json::Value::String("hex".to_string()), + ); + schema.extensions.insert( + "contentMediaType".to_owned(), + serde_json::Value::String("application/binary".to_string()), + ); + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: MAX.checked_mul(2).map(Some).unwrap_or_default(), + min_length: None, + ..string + })); + schema.into() + } else { + schema + } + } +} + impl Default for BytesM { fn default() -> Self { Self(Vec::default()) @@ -1721,6 +1781,27 @@ impl Default for StringM { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for StringM { + fn schema_name() -> String { + format!("StringM<{MAX}>") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + let schema = String::json_schema(gen); + if let schemars::schema::Schema::Object(mut schema) = schema { + let string = *schema.string.unwrap_or_default().clone(); + schema.string = Some(Box::new(schemars::schema::StringValidation { + max_length: Some(MAX), + ..string + })); + schema.into() + } else { + schema + } + } +} + impl StringM { pub const MAX_LEN: usize = { MAX as usize }; @@ -2037,6 +2118,17 @@ pub struct Frame(pub T) where T: ReadXdr; +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Frame { + fn schema_name() -> String { + format!("Frame<{}>", T::schema_name()) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + T::json_schema(gen) + } +} + impl ReadXdr for Frame where T: ReadXdr, @@ -2715,6 +2807,7 @@ pub type Multi = i32; #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[repr(i32)] pub enum UnionKey { Error = 0, @@ -2822,6 +2915,7 @@ Self::Multi => "Multi", #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde_with::SerializeDisplay, serde_with::DeserializeFromStr))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[allow(clippy::large_enum_variant)] pub enum MyUnion { Error(i32), @@ -2932,6 +3026,7 @@ Self::Multi(v) => v.write_xdr(w)?, #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[allow(clippy::large_enum_variant)] pub enum IntUnion { V0(i32), @@ -3034,6 +3129,7 @@ Self::V1(v) => v.write_xdr(w)?, #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(all(feature = "serde", feature = "alloc"), derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case"))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug)] pub struct IntUnion2(pub IntUnion); @@ -3082,6 +3178,7 @@ impl WriteXdr for IntUnion2 { derive(serde::Serialize, serde::Deserialize), serde(rename_all = "snake_case") )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum TypeVariant { SError, Multi, @@ -3123,6 +3220,20 @@ Self::IntUnion2 => "IntUnion2", pub const fn variants() -> [TypeVariant; 6] { Self::VARIANTS } + + #[cfg(feature = "schemars")] + #[must_use] + #[allow(clippy::too_many_lines)] + pub fn json_schema(&self, gen: schemars::gen::SchemaGenerator) -> schemars::schema::RootSchema { + match self { + Self::SError => gen.into_root_schema_for::(), +Self::Multi => gen.into_root_schema_for::(), +Self::UnionKey => gen.into_root_schema_for::(), +Self::MyUnion => gen.into_root_schema_for::(), +Self::IntUnion => gen.into_root_schema_for::(), +Self::IntUnion2 => gen.into_root_schema_for::(), + } + } } impl Name for TypeVariant { @@ -3161,6 +3272,7 @@ Self::IntUnion2 => "IntUnion2", serde(rename_all = "snake_case"), serde(untagged), )] + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub enum Type { SError(Box), Multi(Box), diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 64a088134..b38c824c9 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -4,7 +4,7 @@ require 'pry' require 'xdrgen' -SPEC_ROOT = __dir__ +SPEC_ROOT = 'spec' Dir["#{__dir__}/support/**/*.rb"].each { |f| require f }