diff --git a/.gitignore b/.gitignore index 3fac22c..c7301c2 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,5 @@ Cargo.lock .pre-commit-config.yaml config.toml + +**/*.swagger.yaml diff --git a/actix-prost-build/src/conversions.rs b/actix-prost-build/src/conversions.rs index e18fdbd..445d679 100644 --- a/actix-prost-build/src/conversions.rs +++ b/actix-prost-build/src/conversions.rs @@ -9,7 +9,10 @@ use std::{ use crate::helpers::extract_type_from_option; use proc_macro2::{Ident, TokenStream}; use prost_build::Service; -use prost_reflect::{DescriptorPool, DynamicMessage, MessageDescriptor}; +use prost_reflect::{ + Cardinality, DescriptorPool, DynamicMessage, ExtensionDescriptor, FieldDescriptor, Kind, + MessageDescriptor, +}; use quote::quote; use syn::{punctuated::Punctuated, Expr, Field, Fields, Lit, Meta, MetaNameValue, Token, Type}; @@ -20,6 +23,7 @@ pub struct ExtraFieldOptions { } #[derive(Debug)] pub struct ConvertFieldOptions { + pub field: FieldDescriptor, pub ty: Option, pub val_override: Option, pub required: bool, @@ -66,11 +70,7 @@ impl TryFrom<(&DescriptorPool, &MessageDescriptor)> for ConvertOptions { let fields = message .fields() .map(|f| { - let options = f.options(); - let ext_val = options.get_extension(&fields_extension); - let ext_val = ext_val.as_message().unwrap(); - - let convert_options = ConvertFieldOptions::from(ext_val); + let convert_options = ConvertFieldOptions::from((&f, &fields_extension)); (String::from(f.name()), convert_options) }) @@ -79,12 +79,17 @@ impl TryFrom<(&DescriptorPool, &MessageDescriptor)> for ConvertOptions { } } -impl From<&DynamicMessage> for ConvertFieldOptions { - fn from(value: &DynamicMessage) -> Self { +impl From<(&FieldDescriptor, &ExtensionDescriptor)> for ConvertFieldOptions { + fn from((f, ext): (&FieldDescriptor, &ExtensionDescriptor)) -> Self { + let options = f.options(); + let ext_val = options.get_extension(ext); + let ext_val = ext_val.as_message().unwrap(); + Self { - ty: get_string_field(value, "type"), - val_override: get_string_field(value, "override"), - required: match value.get_field_by_name("required") { + field: f.clone(), + ty: get_string_field(ext_val, "type"), + val_override: get_string_field(ext_val, "override"), + required: match ext_val.get_field_by_name("required") { Some(v) => v.as_bool().unwrap(), None => false, }, @@ -226,9 +231,13 @@ impl ConversionsGenerator { _ => { let convert = &self.convert_prefix; + let from = match field_conversions.len() + extra_field_conversions.len() { + 0 => quote!(_from), + _ => quote!(from), + }; quote!( impl #convert<#from_struct_ident> for #to_struct_ident { - fn try_convert(from: #from_struct_ident) -> Result { + fn try_convert(#from: #from_struct_ident) -> Result { Ok(Self { #(#field_conversions,)* #(#extra_field_conversions,)* @@ -297,43 +306,114 @@ impl ConversionsGenerator { f: &Field, convert_field: Option<&ConvertFieldOptions>, res: &mut Vec, + ) -> Option { + self.try_process_option(m_type, f, convert_field, res) + .or(self.try_process_map(m_type, f, convert_field, res)) + } + + fn try_process_option( + &mut self, + m_type: MessageType, + f: &Field, + convert_field: Option<&ConvertFieldOptions>, + res: &mut Vec, ) -> Option { let name = f.ident.as_ref().unwrap(); - // Check if the field contains a nested message - let internal_struct = match extract_type_from_option(&f.ty) { - Some(Type::Path(ty)) => ty - .path - .segments - .first() - .and_then(|ty| self.messages.get(&ty.ident.to_string())), + match extract_type_from_option(&f.ty) { + Some(Type::Path(ty)) => { + let ty = ty.path.segments.first()?; + let rust_struct_name = self.messages.get(&ty.ident.to_string())?.ident.clone(); + let new_struct_name = + self.build_internal_nested_struct(m_type, &rust_struct_name, res); + let convert = &self.convert_prefix; + let (ty, conversion) = match convert_field { + Some(ConvertFieldOptions { required: true, .. }) => { + let require_message = format!("field {} is required", name); + ( + quote!(#new_struct_name), + quote!(#convert::try_convert(from.#name.ok_or(#require_message)?)?), + ) + } + _ => ( + quote!(::core::option::Option<#new_struct_name>), + quote!(#convert::try_convert(from.#name)?), + ), + }; + Some((ty, conversion)) + } + _ => None, + } + } + + fn try_process_map( + &mut self, + m_type: MessageType, + f: &Field, + convert_field: Option<&ConvertFieldOptions>, + res: &mut Vec, + ) -> Option { + let name = f.ident.as_ref().unwrap(); + + let field_desc = convert_field.map(|cf| &cf.field)?; + let map_type = match (field_desc.cardinality(), field_desc.kind()) { + (Cardinality::Repeated, Kind::Message(m)) => Some(m), + _ => None, + }?; + // Map keys can only be of scalar types, so we search for nested messages only in values + let map_value_type = match map_type.map_entry_value_field().kind() { + Kind::Message(m) => Some(m), _ => None, }?; + let map_key_type = map_type.map_entry_key_field().kind(); + let map_key_rust_type = match map_key_type { + Kind::String => quote!(::prost::alloc::string::String), + Kind::Int32 => quote!(i32), + Kind::Int64 => quote!(i64), + Kind::Uint32 => quote!(u32), + Kind::Uint64 => quote!(u64), + Kind::Sint32 => quote!(i32), + Kind::Sint64 => quote!(i64), + Kind::Fixed32 => quote!(u32), + Kind::Fixed64 => quote!(u64), + Kind::Sfixed32 => quote!(i32), + Kind::Sfixed64 => quote!(i64), + Kind::Bool => quote!(bool), + _ => panic!("Map key type not supported {:?}", map_key_type), + }; + // TODO: Proto name might not be the same as Rust struct name + let rust_struct_name = self.messages.get(map_value_type.name())?.ident.clone(); + + let new_struct_name = self.build_internal_nested_struct(m_type, &rust_struct_name, res); - // Process the nested message - let ident = &internal_struct.ident; + let convert = &self.convert_prefix; + let map_collection = if let Type::Path(p) = &f.ty { + match p.path.segments.iter().find(|s| s.ident == "HashMap") { + Some(_) => quote!(::std::collections::HashMap), + None => quote!(::std::collections::BTreeMap), + } + } else { + panic!("Type of map field is not a path") + }; + let ty = quote!(#map_collection<#map_key_rust_type, #new_struct_name>); + let conversion = quote!(#convert::try_convert(from.#name)?); + Some((ty, conversion)) + } + + fn build_internal_nested_struct( + &mut self, + m_type: MessageType, + nested_struct_name: &Ident, + res: &mut Vec, + ) -> Ident { // TODO: could incorrectly detect messages with same name in different packages let message = self .descriptors .all_messages() - .find(|m| *ident == m.name()) + .find(|m| *nested_struct_name == m.name()) .unwrap(); - let new_struct_name = self.create_convert_struct(m_type, &message, &ident.to_string(), res); - let convert = &self.convert_prefix; - Some(match convert_field { - Some(ConvertFieldOptions { required: true, .. }) => { - let require_message = format!("field {} is required", name); - ( - quote!(#new_struct_name), - quote!(#convert::try_convert(from.#name.ok_or(#require_message)?)?), - ) - } - _ => ( - quote!(::core::option::Option<#new_struct_name>), - quote!(#convert::try_convert(from.#name)?), - ), - }) + self.create_convert_struct(m_type, &message, &nested_struct_name.to_string(), res) } fn process_enum(m_type: MessageType, f: &Field) -> Option { diff --git a/actix-prost-build/src/generator.rs b/actix-prost-build/src/generator.rs index d7cd110..5f0802a 100644 --- a/actix-prost-build/src/generator.rs +++ b/actix-prost-build/src/generator.rs @@ -1,4 +1,4 @@ -use crate::{config::HttpRule, conversions::ConversionsGenerator, method::Method, Config}; +use crate::{config::HttpRule, method::Method, Config}; use proc_macro2::TokenStream; use prost_build::{Service, ServiceGenerator}; use quote::quote; @@ -135,6 +135,7 @@ impl ServiceGenerator for ActixGenerator { #[cfg(feature = "conversions")] { + use crate::conversions::ConversionsGenerator; let conversions = ConversionsGenerator::new().ok().map(|mut g| { g.messages = Rc::clone(&self.messages); g.create_conversions(&service) diff --git a/convert-trait/src/impls.rs b/convert-trait/src/impls.rs index f267dbd..a166820 100644 --- a/convert-trait/src/impls.rs +++ b/convert-trait/src/impls.rs @@ -1,4 +1,5 @@ use crate::TryConvert; +use std::collections::{BTreeMap, HashMap, HashSet}; impl> TryConvert> for Option { fn try_convert(input: Option) -> Result { @@ -15,10 +16,26 @@ impl> TryConvert> for Vec { } } -impl + std::hash::Hash + Eq> TryConvert> - for std::collections::HashSet -{ +impl + std::hash::Hash + Eq> TryConvert> for HashSet { fn try_convert(input: Vec) -> Result { input.into_iter().map(TryConvert::try_convert).collect() } } + +impl> TryConvert> for HashMap { + fn try_convert(input: HashMap) -> Result { + input + .into_iter() + .map(|(k, v)| Ok((k, TryConvert::try_convert(v)?))) + .collect() + } +} + +impl> TryConvert> for BTreeMap { + fn try_convert(input: BTreeMap) -> Result { + input + .into_iter() + .map(|(k, v)| Ok((k, TryConvert::try_convert(v)?))) + .collect() + } +} diff --git a/tests/Cargo.toml b/tests/Cargo.toml index a162b8d..24b557e 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -7,6 +7,7 @@ edition = "2021" actix-prost = { path = "../actix-prost" } actix-prost-macros = { path = "../actix-prost-macros" } async-trait = "0.1" +convert-trait ={ path = "../convert-trait" } tonic = "0.8" prost = "0.11" tokio = { version = "1", features = ["rt-multi-thread", "macros"] } @@ -15,9 +16,10 @@ actix-web = "4" http = "0.2" serde_json = "1.0" serde_with = { version = "2.0", features = ["base64"] } +ethers = "2.0.14" [build-dependencies] -actix-prost-build = { path = "../actix-prost-build" } +actix-prost-build = { path = "../actix-prost-build", features = ["conversions"]} tonic-build = "0.8" prost-build = "0.11" diff --git a/tests/build.rs b/tests/build.rs index 1092978..adbf570 100644 --- a/tests/build.rs +++ b/tests/build.rs @@ -1,6 +1,9 @@ use actix_prost_build::{ActixGenerator, GeneratorList}; use prost_build::{Config, ServiceGenerator}; -use std::path::Path; +use std::{ + env, + path::{Path, PathBuf}, +}; // custom function to include custom generator fn compile( @@ -11,6 +14,10 @@ fn compile( let mut config = Config::new(); config .service_generator(generator) + .file_descriptor_set_path( + PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR environment variable not set")) + .join("file_descriptor_set.bin"), + ) .out_dir("src/proto") .bytes(["."]) .compile_well_known_types() @@ -19,17 +26,6 @@ fn compile( .protoc_arg("grpc_api_configuration=proto/http_api.yaml,output_format=yaml") .type_attribute(".", "#[actix_prost_macros::serde]"); - // for path in protos.iter() { - // println!("cargo:rerun-if-changed={}", path.as_ref().display()) - // } - - // for path in includes.iter() { - // // Cargo will watch the **entire** directory recursively. If we - // // could figure out which files are imported by our protos we - // // could specify only those files instead. - // println!("cargo:rerun-if-changed={}", path.as_ref().display()) - // } - config.compile_protos(protos, includes)?; Ok(()) } @@ -42,8 +38,10 @@ fn main() -> Result<(), Box> { compile( &[ "proto/rest.proto", + "proto/simple.proto", "proto/types.proto", "proto/errors.proto", + "proto/conversions.proto", ], &["proto/", "proto/googleapis", "proto/grpc-gateway"], gens, diff --git a/tests/proto/conversions.proto b/tests/proto/conversions.proto new file mode 100644 index 0000000..c6544cb --- /dev/null +++ b/tests/proto/conversions.proto @@ -0,0 +1,38 @@ +syntax = "proto3"; +package conversions; + +import "convert_options.proto"; + +option go_package = "github.com/blockscout/actix-prost/tests"; + +service ConversionsRPC { rpc ConvertRPC(ConversionsRequest) returns (ConversionsResponse); } + +message Nested { + string address = 3 [ (convert_options.convert) = { type : "ethers::types::Address" } ]; +} + +message MapValue { + string address = 1 [ (convert_options.convert) = { type : "ethers::types::Address" } ]; +} + +message ConversionsRequest { + option (convert_options.extra_fields) = { name: "field1", type: "String" }; + option (convert_options.extra_fields) = { name: "field2", type: "i32" }; + map map_field = 1; + + enum NestedEnum { + NESTED_OK = 0; + NESTED_ERROR = 1; + } + + string query = 2 [ (convert_options.convert) = { override : "Default::default()" } ]; + repeated string addresses = 3 [ (convert_options.convert) = { type : "std::collections::HashSet" } ]; + NestedEnum nested_enum = 4; + Nested nested = 5 [ (convert_options.convert) = { required : true } ]; +} + +message ConversionsResponse { + string address = 1 [ (convert_options.convert) = { type : "ethers::types::Address" } ]; + Nested nested = 2; + map map_field = 3; +} diff --git a/tests/proto/convert_options.proto b/tests/proto/convert_options.proto new file mode 100644 index 0000000..aa1ae88 --- /dev/null +++ b/tests/proto/convert_options.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package convert_options; +option go_package = "github.com/blockscout/actix-prost/convert_options"; + +import "google/protobuf/descriptor.proto"; + +message ConvertOptions { + string type = 1; + string override = 2; + bool required = 3; +} + +message ExtraFieldOptions { + string name = 1; + string type = 2; +} + +extend google.protobuf.MessageOptions { repeated ExtraFieldOptions extra_fields = 50000; } +extend google.protobuf.FieldOptions { optional ConvertOptions convert = 50001; } \ No newline at end of file diff --git a/tests/proto/errors.swagger.yaml b/tests/proto/errors.swagger.yaml deleted file mode 100644 index e8bc201..0000000 --- a/tests/proto/errors.swagger.yaml +++ /dev/null @@ -1,172 +0,0 @@ -swagger: "2.0" -info: - title: errors.proto - version: version not set -tags: - - name: ErrorsRPC -consumes: - - application/json -produces: - - application/json -paths: - /errors/{code}: - post: - operationId: ErrorsRPC_Error - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/errorsErrorResponse' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: code - in: path - required: true - type: integer - format: int32 - - name: message - in: body - required: true - schema: - type: string - - name: query - in: query - required: false - type: string - tags: - - ErrorsRPC -definitions: - errorsErrorResponse: - type: object - protobufAny: - type: object - properties: - '@type': - type: string - description: |- - A URL/resource name that uniquely identifies the type of the serialized - protocol buffer message. This string must contain at least - one "/" character. The last segment of the URL's path must represent - the fully qualified name of the type (as in - `path/google.protobuf.Duration`). The name should be in a canonical form - (e.g., leading "." is not accepted). - - In practice, teams usually precompile into the binary all types that they - expect it to use in the context of Any. However, for URLs which use the - scheme `http`, `https`, or no scheme, one can optionally set up a type - server that maps type URLs to message definitions as follows: - - * If no scheme is provided, `https` is assumed. - * An HTTP GET on the URL must yield a [google.protobuf.Type][] - value in binary format, or produce an error. - * Applications are allowed to cache lookup results based on the - URL, or have them precompiled into a binary to avoid any - lookup. Therefore, binary compatibility needs to be preserved - on changes to types. (Use versioned type names to manage - breaking changes.) - - Note: this functionality is not currently available in the official - protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. - - Schemes other than `http`, `https` (or the empty scheme) might be - used with implementation specific semantics. - additionalProperties: {} - description: |- - `Any` contains an arbitrary serialized protocol buffer message along with a - URL that describes the type of the serialized message. - - Protobuf library provides support to pack/unpack Any values in the form - of utility functions or additional generated methods of the Any type. - - Example 1: Pack and unpack a message in C++. - - Foo foo = ...; - Any any; - any.PackFrom(foo); - ... - if (any.UnpackTo(&foo)) { - ... - } - - Example 2: Pack and unpack a message in Java. - - Foo foo = ...; - Any any = Any.pack(foo); - ... - if (any.is(Foo.class)) { - foo = any.unpack(Foo.class); - } - - Example 3: Pack and unpack a message in Python. - - foo = Foo(...) - any = Any() - any.Pack(foo) - ... - if any.Is(Foo.DESCRIPTOR): - any.Unpack(foo) - ... - - Example 4: Pack and unpack a message in Go - - foo := &pb.Foo{...} - any, err := anypb.New(foo) - if err != nil { - ... - } - ... - foo := &pb.Foo{} - if err := any.UnmarshalTo(foo); err != nil { - ... - } - - The pack methods provided by protobuf library will by default use - 'type.googleapis.com/full.type.name' as the type URL and the unpack - methods only use the fully qualified type name after the last '/' - in the type URL, for example "foo.bar.com/x/y.z" will yield type - name "y.z". - - - JSON - - The JSON representation of an `Any` value uses the regular - representation of the deserialized, embedded message, with an - additional field `@type` which contains the type URL. Example: - - package google.profile; - message Person { - string first_name = 1; - string last_name = 2; - } - - { - "@type": "type.googleapis.com/google.profile.Person", - "firstName": , - "lastName": - } - - If the embedded message type is well-known and has a custom JSON - representation, that representation will be embedded adding a field - `value` which holds the custom JSON in addition to the `@type` - field. Example (for message [google.protobuf.Duration][]): - - { - "@type": "type.googleapis.com/google.protobuf.Duration", - "value": "1.212s" - } - rpcStatus: - type: object - properties: - code: - type: integer - format: int32 - details: - type: array - items: - $ref: '#/definitions/protobufAny' - message: - type: string diff --git a/tests/proto/http_api.yaml b/tests/proto/http_api.yaml index e5aa476..1274f6c 100644 --- a/tests/proto/http_api.yaml +++ b/tests/proto/http_api.yaml @@ -32,7 +32,7 @@ http: body: "*" response_body: "foo" - - selector: "rest.SimpleRPC.PostRPC" + - selector: "simple.SimpleRPC.PostRPC" post: /rest/post/{foo} body: long_name @@ -64,3 +64,7 @@ http: - selector: "errors.ErrorsRPC.Error" post: /errors/{code} body: "message" + + - selector: "conversions.ConversionsRPC.ConvertRPC" + post: /conversions + body: "*" \ No newline at end of file diff --git a/tests/proto/rest.proto b/tests/proto/rest.proto index 085a3b3..ed4bb71 100644 --- a/tests/proto/rest.proto +++ b/tests/proto/rest.proto @@ -18,8 +18,6 @@ service RestRPC { rpc PostResponseGetRPC(Post) returns (Get); } -service SimpleRPC { rpc PostRPC(Post) returns (Post); } - message Get { string foo = 1; int64 bar = 2; diff --git a/tests/proto/rest.swagger.yaml b/tests/proto/rest.swagger.yaml deleted file mode 100644 index ee75ab8..0000000 --- a/tests/proto/rest.swagger.yaml +++ /dev/null @@ -1,404 +0,0 @@ -swagger: "2.0" -info: - title: rest.proto - version: version not set -tags: - - name: RestRPC - - name: SimpleRPC -consumes: - - application/json -produces: - - application/json -paths: - /rest/get/{foo}: - get: - operationId: RestRPC_GetQueryRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/restGet' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: foo - in: path - required: true - type: string - - name: bar - in: query - required: false - type: string - format: int64 - tags: - - RestRPC - /rest/get/{foo}/{bar}: - get: - operationId: RestRPC_GetRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/restGet' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: foo - in: path - required: true - type: string - - name: bar - in: path - required: true - type: string - format: int64 - tags: - - RestRPC - /rest/post: - post: - operationId: RestRPC_PostNoPathRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/restPost' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: body - in: body - required: true - schema: - $ref: '#/definitions/restPost' - tags: - - RestRPC - /rest/post/{foo}: - post: - operationId: SimpleRPC_PostRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/restPost' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: foo - in: path - required: true - type: string - - name: longName - in: body - required: true - schema: - type: number - format: double - - name: bar - in: query - required: false - type: string - format: int64 - tags: - - SimpleRPC - /rest/post/{foo}/{bar}: - post: - operationId: RestRPC_PostRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/restPost' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: foo - in: path - required: true - type: string - - name: bar - in: path - required: true - type: string - format: int64 - - name: longName - in: body - required: true - schema: - type: number - format: double - tags: - - RestRPC - /rest/post/{longName}: - post: - operationId: RestRPC_PostQueryRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/restPost' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: longName - in: path - required: true - type: number - format: double - - name: foo - in: body - required: true - schema: - type: string - - name: bar - in: query - required: false - type: string - format: int64 - tags: - - RestRPC - /rest/post_get: - post: - operationId: RestRPC_PostGetRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/restGet' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: body - in: body - required: true - schema: - $ref: '#/definitions/restPost' - tags: - - RestRPC - /rest/response/get/{foo}/{bar}: - get: - operationId: RestRPC_GetResponseRPC - responses: - "200": - description: "" - schema: - type: string - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: foo - in: path - required: true - type: string - - name: bar - in: path - required: true - type: string - format: int64 - tags: - - RestRPC - /rest/response/post: - post: - operationId: RestRPC_PostResponseRPC - responses: - "200": - description: "" - schema: - type: string - format: int64 - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: body - in: body - required: true - schema: - $ref: '#/definitions/restPost' - tags: - - RestRPC - /rest/response/post_get: - post: - operationId: RestRPC_PostResponseGetRPC - responses: - "200": - description: "" - schema: - type: string - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: body - in: body - required: true - schema: - $ref: '#/definitions/restPost' - tags: - - RestRPC -definitions: - protobufAny: - type: object - properties: - '@type': - type: string - description: |- - A URL/resource name that uniquely identifies the type of the serialized - protocol buffer message. This string must contain at least - one "/" character. The last segment of the URL's path must represent - the fully qualified name of the type (as in - `path/google.protobuf.Duration`). The name should be in a canonical form - (e.g., leading "." is not accepted). - - In practice, teams usually precompile into the binary all types that they - expect it to use in the context of Any. However, for URLs which use the - scheme `http`, `https`, or no scheme, one can optionally set up a type - server that maps type URLs to message definitions as follows: - - * If no scheme is provided, `https` is assumed. - * An HTTP GET on the URL must yield a [google.protobuf.Type][] - value in binary format, or produce an error. - * Applications are allowed to cache lookup results based on the - URL, or have them precompiled into a binary to avoid any - lookup. Therefore, binary compatibility needs to be preserved - on changes to types. (Use versioned type names to manage - breaking changes.) - - Note: this functionality is not currently available in the official - protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. - - Schemes other than `http`, `https` (or the empty scheme) might be - used with implementation specific semantics. - additionalProperties: {} - description: |- - `Any` contains an arbitrary serialized protocol buffer message along with a - URL that describes the type of the serialized message. - - Protobuf library provides support to pack/unpack Any values in the form - of utility functions or additional generated methods of the Any type. - - Example 1: Pack and unpack a message in C++. - - Foo foo = ...; - Any any; - any.PackFrom(foo); - ... - if (any.UnpackTo(&foo)) { - ... - } - - Example 2: Pack and unpack a message in Java. - - Foo foo = ...; - Any any = Any.pack(foo); - ... - if (any.is(Foo.class)) { - foo = any.unpack(Foo.class); - } - - Example 3: Pack and unpack a message in Python. - - foo = Foo(...) - any = Any() - any.Pack(foo) - ... - if any.Is(Foo.DESCRIPTOR): - any.Unpack(foo) - ... - - Example 4: Pack and unpack a message in Go - - foo := &pb.Foo{...} - any, err := anypb.New(foo) - if err != nil { - ... - } - ... - foo := &pb.Foo{} - if err := any.UnmarshalTo(foo); err != nil { - ... - } - - The pack methods provided by protobuf library will by default use - 'type.googleapis.com/full.type.name' as the type URL and the unpack - methods only use the fully qualified type name after the last '/' - in the type URL, for example "foo.bar.com/x/y.z" will yield type - name "y.z". - - - JSON - - The JSON representation of an `Any` value uses the regular - representation of the deserialized, embedded message, with an - additional field `@type` which contains the type URL. Example: - - package google.profile; - message Person { - string first_name = 1; - string last_name = 2; - } - - { - "@type": "type.googleapis.com/google.profile.Person", - "firstName": , - "lastName": - } - - If the embedded message type is well-known and has a custom JSON - representation, that representation will be embedded adding a field - `value` which holds the custom JSON in addition to the `@type` - field. Example (for message [google.protobuf.Duration][]): - - { - "@type": "type.googleapis.com/google.protobuf.Duration", - "value": "1.212s" - } - restGet: - type: object - properties: - bar: - type: string - format: int64 - foo: - type: string - restPost: - type: object - properties: - bar: - type: string - format: int64 - foo: - type: string - longName: - type: number - format: double - rpcStatus: - type: object - properties: - code: - type: integer - format: int32 - details: - type: array - items: - $ref: '#/definitions/protobufAny' - message: - type: string diff --git a/tests/proto/simple.proto b/tests/proto/simple.proto new file mode 100644 index 0000000..c719139 --- /dev/null +++ b/tests/proto/simple.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; +package simple; + +option go_package = "github.com/blockscout/actix-prost/tests"; + +service SimpleRPC { rpc PostRPC(Post) returns (Post); } + +message Post { + string foo = 1; + int64 bar = 2; + double long_name = 3; +} diff --git a/tests/proto/types.swagger.yaml b/tests/proto/types.swagger.yaml deleted file mode 100644 index 0356e32..0000000 --- a/tests/proto/types.swagger.yaml +++ /dev/null @@ -1,364 +0,0 @@ -swagger: "2.0" -info: - title: types.proto - version: version not set -tags: - - name: TypesRPC -consumes: - - application/json -produces: - - application/json -paths: - /types/complex: - post: - summary: rpc GoogleRPC(Google) returns (Google); - operationId: TypesRPC_ComplexRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/typesComplex' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: body - in: body - required: true - schema: - $ref: '#/definitions/typesComplex' - tags: - - TypesRPC - /types/enums: - post: - operationId: TypesRPC_EnumsRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/typesEnums' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: body - in: body - required: true - schema: - $ref: '#/definitions/typesEnums' - tags: - - TypesRPC - /types/maps: - post: - operationId: TypesRPC_MapsRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/typesMaps' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: body - in: body - required: true - schema: - $ref: '#/definitions/typesMaps' - tags: - - TypesRPC - /types/oneofs: - post: - operationId: TypesRPC_OneOfsRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/typesOneOfs' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: body - in: body - required: true - schema: - $ref: '#/definitions/typesOneOfs' - tags: - - TypesRPC - /types/optional_scalars: - post: - operationId: TypesRPC_OptionalScalarsRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/typesOptionalScalars' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: body - in: body - required: true - schema: - $ref: '#/definitions/typesOptionalScalars' - tags: - - TypesRPC - /types/repeated: - post: - operationId: TypesRPC_RepeatedRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/typesRepeated' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: body - in: body - required: true - schema: - $ref: '#/definitions/typesRepeated' - tags: - - TypesRPC - /types/scalars: - post: - operationId: TypesRPC_ScalarsRPC - responses: - "200": - description: A successful response. - schema: - $ref: '#/definitions/typesScalars' - default: - description: An unexpected error response. - schema: - $ref: '#/definitions/rpcStatus' - parameters: - - name: body - in: body - required: true - schema: - $ref: '#/definitions/typesScalars' - tags: - - TypesRPC -definitions: - protobufAny: - type: object - properties: - '@type': - type: string - description: |- - A URL/resource name that uniquely identifies the type of the serialized - protocol buffer message. This string must contain at least - one "/" character. The last segment of the URL's path must represent - the fully qualified name of the type (as in - `path/google.protobuf.Duration`). The name should be in a canonical form - (e.g., leading "." is not accepted). - - In practice, teams usually precompile into the binary all types that they - expect it to use in the context of Any. However, for URLs which use the - scheme `http`, `https`, or no scheme, one can optionally set up a type - server that maps type URLs to message definitions as follows: - - * If no scheme is provided, `https` is assumed. - * An HTTP GET on the URL must yield a [google.protobuf.Type][] - value in binary format, or produce an error. - * Applications are allowed to cache lookup results based on the - URL, or have them precompiled into a binary to avoid any - lookup. Therefore, binary compatibility needs to be preserved - on changes to types. (Use versioned type names to manage - breaking changes.) - - Note: this functionality is not currently available in the official - protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. - - Schemes other than `http`, `https` (or the empty scheme) might be - used with implementation specific semantics. - additionalProperties: {} - description: |- - `Any` contains an arbitrary serialized protocol buffer message along with a - URL that describes the type of the serialized message. - - Protobuf library provides support to pack/unpack Any values in the form - of utility functions or additional generated methods of the Any type. - - Example 1: Pack and unpack a message in C++. - - Foo foo = ...; - Any any; - any.PackFrom(foo); - ... - if (any.UnpackTo(&foo)) { - ... - } - - Example 2: Pack and unpack a message in Java. - - Foo foo = ...; - Any any = Any.pack(foo); - ... - if (any.is(Foo.class)) { - foo = any.unpack(Foo.class); - } - - Example 3: Pack and unpack a message in Python. - - foo = Foo(...) - any = Any() - any.Pack(foo) - ... - if any.Is(Foo.DESCRIPTOR): - any.Unpack(foo) - ... - - Example 4: Pack and unpack a message in Go - - foo := &pb.Foo{...} - any, err := anypb.New(foo) - if err != nil { - ... - } - ... - foo := &pb.Foo{} - if err := any.UnmarshalTo(foo); err != nil { - ... - } - - The pack methods provided by protobuf library will by default use - 'type.googleapis.com/full.type.name' as the type URL and the unpack - methods only use the fully qualified type name after the last '/' - in the type URL, for example "foo.bar.com/x/y.z" will yield type - name "y.z". - - - JSON - - The JSON representation of an `Any` value uses the regular - representation of the deserialized, embedded message, with an - additional field `@type` which contains the type URL. Example: - - package google.profile; - message Person { - string first_name = 1; - string last_name = 2; - } - - { - "@type": "type.googleapis.com/google.profile.Person", - "firstName": , - "lastName": - } - - If the embedded message type is well-known and has a custom JSON - representation, that representation will be embedded adding a field - `value` which holds the custom JSON in addition to the `@type` - field. Example (for message [google.protobuf.Duration][]): - - { - "@type": "type.googleapis.com/google.protobuf.Duration", - "value": "1.212s" - } - rpcStatus: - type: object - properties: - code: - type: integer - format: int32 - details: - type: array - items: - $ref: '#/definitions/protobufAny' - message: - type: string - typesComplex: - type: object - properties: - enums: - $ref: '#/definitions/typesEnums' - maps: - $ref: '#/definitions/typesMaps' - oneofs: - $ref: '#/definitions/typesOneOfs' - repeated: - $ref: '#/definitions/typesRepeated' - scalars: - $ref: '#/definitions/typesScalars' - typesEnums: - type: object - properties: - values: - $ref: '#/definitions/typesValues' - typesMaps: - type: object - properties: - foo: - type: object - additionalProperties: - type: integer - format: int32 - typesOneOfs: - type: object - properties: - bar: - type: string - format: byte - baz: - type: string - format: int64 - foo: - type: string - typesOptionalScalars: - type: object - properties: - a: - type: number - format: double - b: - type: string - format: int64 - c: - type: string - d: - type: string - format: byte - e: - type: boolean - typesRepeated: - type: object - properties: - foo: - type: array - items: - type: string - typesScalars: - type: object - properties: - a: - type: number - format: double - b: - type: string - format: int64 - c: - type: string - d: - type: string - format: byte - e: - type: boolean - typesValues: - type: string - enum: - - FOO - - BAR - default: FOO diff --git a/tests/src/conversions.rs b/tests/src/conversions.rs new file mode 100644 index 0000000..f9a928c --- /dev/null +++ b/tests/src/conversions.rs @@ -0,0 +1,113 @@ +use std::{collections::HashMap, net::SocketAddr, sync::Arc}; + +use crate::{ + proto::conversions::{ + conversions_rpc_actix::route_conversions_rpc, conversions_rpc_server::ConversionsRpc, + ConversionsRequest, ConversionsRequestInternal, ConversionsResponse, + ConversionsResponseInternal, MapValue, Nested, + }, + test, +}; +use actix_web::{App, HttpServer}; +use convert_trait::TryConvert; +use ethers::types::Address; +use serde_json::Value; +use tonic::{Request, Response, Status}; + +#[derive(Default)] +struct ConversionsServer {} + +#[async_trait::async_trait] +impl ConversionsRpc for ConversionsServer { + async fn convert_rpc( + &self, + request: Request, + ) -> Result, Status> { + let internal_request = ConversionsRequestInternal::try_convert(request.into_inner()) + .map_err(|err| Status::invalid_argument(format!("invalid request: {}", err)))?; + + let internal_response = ConversionsResponseInternal { + address: Address::from_low_u64_be(0), + nested: Some(internal_request.nested), + map_field: internal_request.map_field, + }; + + let response = ConversionsResponse::try_convert(internal_response) + .map_err(|err| Status::internal(format!("internal error: {}", err)))?; + + Ok(Response::new(response)) + } +} + +async fn send_post(addr: &SocketAddr, path: &str, request: Value) -> String { + reqwest::Client::new() + .post(format!("http://localhost:{}{}", addr.port(), path)) + .json(&request) + .header("Content-Type", "application/json") + .send() + .await + .expect("Failed to send request") + .text() + .await + .unwrap() +} + +#[tokio::test] +async fn conversions() { + let server = Arc::new(ConversionsServer::default()); + let addr = test::get_test_addr(); + let http = HttpServer::new(move || { + App::new().configure(|config| route_conversions_rpc(config, server.clone())) + }) + .bind(addr) + .unwrap(); + + tokio::spawn(http.run()); + + // Invalid request + let req = ConversionsRequest { + map_field: HashMap::from([( + "key".to_string(), + MapValue { + address: "".to_string(), + }, + )]), + query: "some_string".to_string(), + addresses: vec!["".to_string()], + nested_enum: 1, + nested: Some(Nested { + address: "".to_string(), + }), + }; + + let res = send_post(&addr, "/conversions", serde_json::to_value(req).unwrap()).await; + + let res: Value = serde_json::from_str(&res).unwrap(); + assert_eq!( + &res["message"], + "invalid request: Invalid address: Invalid input length" + ); + + // Valid request + let test_address = "0x000000000000000000000000000000000000dEaD".to_string(); + let req = ConversionsRequest { + map_field: HashMap::from([( + "key".to_string(), + MapValue { + address: test_address.clone(), + }, + )]), + query: "some_string".to_string(), + addresses: vec![test_address.clone()], + nested_enum: 1, + nested: Some(Nested { + address: test_address.clone(), + }), + }; + + let res = send_post(&addr, "/conversions", serde_json::to_value(req).unwrap()).await; + + let res: ConversionsResponse = serde_json::from_str(&res).unwrap(); + assert_eq!(res.nested.unwrap().address, test_address); + assert_eq!(res.map_field.get("key").unwrap().address, test_address); +} diff --git a/tests/src/errors.rs b/tests/src/errors.rs index e28fcb1..1f52877 100644 --- a/tests/src/errors.rs +++ b/tests/src/errors.rs @@ -53,7 +53,7 @@ async fn send_post( async fn send_code(addr: &SocketAddr, code: Code) { assert_eq!( send_post::( - &addr, + addr, &format!("/errors/{}?query=something", i32::from(code)), format!(r#"{{"message":"status {}"}}"#, code), ) @@ -61,7 +61,7 @@ async fn send_code(addr: &SocketAddr, code: Code) { ( Error { code, - message: format!("status {}", code).into() + message: format!("status {}", code) }, Error::map_tonic_code(code) ) diff --git a/tests/src/lib.rs b/tests/src/lib.rs index 22307e4..dba571e 100644 --- a/tests/src/lib.rs +++ b/tests/src/lib.rs @@ -15,3 +15,6 @@ mod types; #[cfg(test)] mod errors; + +#[cfg(test)] +mod conversions; diff --git a/tests/src/proto/conversions.rs b/tests/src/proto/conversions.rs new file mode 100644 index 0000000..3e6190e --- /dev/null +++ b/tests/src/proto/conversions.rs @@ -0,0 +1,458 @@ +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Nested { + #[prost(string, tag = "3")] + pub address: ::prost::alloc::string::String, +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MapValue { + #[prost(string, tag = "1")] + pub address: ::prost::alloc::string::String, +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ConversionsRequest { + #[prost(map = "string, message", tag = "1")] + pub map_field: ::std::collections::HashMap<::prost::alloc::string::String, MapValue>, + #[prost(string, tag = "2")] + pub query: ::prost::alloc::string::String, + #[prost(string, repeated, tag = "3")] + pub addresses: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(enumeration = "conversions_request::NestedEnum", tag = "4")] + pub nested_enum: i32, + #[prost(message, optional, tag = "5")] + pub nested: ::core::option::Option, +} +/// Nested message and enum types in `ConversionsRequest`. +pub mod conversions_request { + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum NestedEnum { + NestedOk = 0, + NestedError = 1, + } + impl NestedEnum { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + NestedEnum::NestedOk => "NESTED_OK", + NestedEnum::NestedError => "NESTED_ERROR", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "NESTED_OK" => Some(Self::NestedOk), + "NESTED_ERROR" => Some(Self::NestedError), + _ => None, + } + } + } +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ConversionsResponse { + #[prost(string, tag = "1")] + pub address: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub nested: ::core::option::Option, + #[prost(map = "string, message", tag = "3")] + pub map_field: ::std::collections::HashMap<::prost::alloc::string::String, MapValue>, +} +pub mod conversions_rpc_actix { + #![allow(unused_variables, dead_code, missing_docs)] + use super::*; + use super::conversions_rpc_server::ConversionsRpc; + use std::sync::Arc; + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct ConvertRPCJson { + #[prost(map = "string, message", tag = "1")] + pub map_field: ::std::collections::HashMap< + ::prost::alloc::string::String, + MapValue, + >, + #[prost(string, tag = "2")] + pub query: ::prost::alloc::string::String, + #[prost(string, repeated, tag = "3")] + pub addresses: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(enumeration = "conversions_request::NestedEnum", tag = "4")] + pub nested_enum: i32, + #[prost(message, optional, tag = "5")] + pub nested: ::core::option::Option, + } + async fn call_convert_rpc( + service: ::actix_web::web::Data, + http_request: ::actix_web::HttpRequest, + payload: ::actix_web::web::Payload, + ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { + let mut payload = payload.into_inner(); + let json = <::actix_web::web::Json< + ConvertRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); + let request = ConversionsRequest { + map_field: json.map_field, + query: json.query, + addresses: json.addresses, + nested_enum: json.nested_enum, + nested: json.nested, + }; + let request = ::actix_prost::new_request(request, &http_request); + let response = service.convert_rpc(request).await?; + let response = response.into_inner(); + Ok(::actix_web::web::Json(response)) + } + pub fn route_conversions_rpc( + config: &mut ::actix_web::web::ServiceConfig, + service: Arc, + ) { + config.app_data(::actix_web::web::Data::from(service)); + config.route("/conversions", ::actix_web::web::post().to(call_convert_rpc)); + } +} +#[derive(Debug)] +pub struct MapValueInternal { + pub address: ethers::types::Address, +} +impl convert_trait::TryConvert for MapValueInternal { + fn try_convert(from: MapValue) -> Result { + Ok(Self { + address: convert_trait::TryConvert::try_convert(from.address)?, + }) + } +} +#[derive(Debug)] +pub struct NestedInternal { + pub address: ethers::types::Address, +} +impl convert_trait::TryConvert for NestedInternal { + fn try_convert(from: Nested) -> Result { + Ok(Self { + address: convert_trait::TryConvert::try_convert(from.address)?, + }) + } +} +#[derive(Debug)] +pub struct ConversionsRequestInternal { + pub map_field: ::std::collections::HashMap< + ::prost::alloc::string::String, + MapValueInternal, + >, + pub query: ::prost::alloc::string::String, + pub addresses: std::collections::HashSet, + pub nested_enum: conversions_request::NestedEnum, + pub nested: NestedInternal, + pub field1: Option, + pub field2: Option, +} +impl convert_trait::TryConvert for ConversionsRequestInternal { + fn try_convert(from: ConversionsRequest) -> Result { + Ok(Self { + map_field: convert_trait::TryConvert::try_convert(from.map_field)?, + query: Default::default(), + addresses: convert_trait::TryConvert::try_convert(from.addresses)?, + nested_enum: conversions_request::NestedEnum::try_from(from.nested_enum)?, + nested: convert_trait::TryConvert::try_convert( + from.nested.ok_or("field nested is required")?, + )?, + field1: None, + field2: None, + }) + } +} +impl convert_trait::TryConvert for Nested { + fn try_convert(from: NestedInternal) -> Result { + Ok(Self { + address: convert_trait::TryConvert::try_convert(from.address)?, + }) + } +} +impl convert_trait::TryConvert for MapValue { + fn try_convert(from: MapValueInternal) -> Result { + Ok(Self { + address: convert_trait::TryConvert::try_convert(from.address)?, + }) + } +} +#[derive(Debug)] +pub struct ConversionsResponseInternal { + pub address: ethers::types::Address, + pub nested: ::core::option::Option, + pub map_field: ::std::collections::HashMap< + ::prost::alloc::string::String, + MapValueInternal, + >, +} +impl convert_trait::TryConvert for ConversionsResponse { + fn try_convert(from: ConversionsResponseInternal) -> Result { + Ok(Self { + address: convert_trait::TryConvert::try_convert(from.address)?, + nested: convert_trait::TryConvert::try_convert(from.nested)?, + map_field: convert_trait::TryConvert::try_convert(from.map_field)?, + }) + } +} +/// Generated client implementations. +pub mod conversions_rpc_client { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + #[derive(Debug, Clone)] + pub struct ConversionsRpcClient { + inner: tonic::client::Grpc, + } + impl ConversionsRpcClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: std::convert::TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl ConversionsRpcClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> ConversionsRpcClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + Send + Sync, + { + ConversionsRpcClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + pub async fn convert_rpc( + &mut self, + request: impl tonic::IntoRequest, + ) -> Result, tonic::Status> { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/conversions.ConversionsRPC/ConvertRPC", + ); + self.inner.unary(request.into_request(), path, codec).await + } + } +} +/// Generated server implementations. +pub mod conversions_rpc_server { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with ConversionsRpcServer. + #[async_trait] + pub trait ConversionsRpc: Send + Sync + 'static { + async fn convert_rpc( + &self, + request: tonic::Request, + ) -> Result, tonic::Status>; + } + #[derive(Debug)] + pub struct ConversionsRpcServer { + inner: _Inner, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + } + struct _Inner(Arc); + impl ConversionsRpcServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + } + impl tonic::codegen::Service> for ConversionsRpcServer + where + T: ConversionsRpc, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); + match req.uri().path() { + "/conversions.ConversionsRPC/ConvertRPC" => { + #[allow(non_camel_case_types)] + struct ConvertRPCSvc(pub Arc); + impl< + T: ConversionsRpc, + > tonic::server::UnaryService + for ConvertRPCSvc { + type Response = super::ConversionsResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = self.0.clone(); + let fut = async move { (*inner).convert_rpc(request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = ConvertRPCSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) + }) + } + } + } + } + impl Clone for ConversionsRpcServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + } + } + } + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(self.0.clone()) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for ConversionsRpcServer { + const NAME: &'static str = "conversions.ConversionsRPC"; + } +} diff --git a/tests/src/proto/convert_options.rs b/tests/src/proto/convert_options.rs new file mode 100644 index 0000000..5697656 --- /dev/null +++ b/tests/src/proto/convert_options.rs @@ -0,0 +1,20 @@ +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ConvertOptions { + #[prost(string, tag = "1")] + pub r#type: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub r#override: ::prost::alloc::string::String, + #[prost(bool, tag = "3")] + pub required: bool, +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ExtraFieldOptions { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub r#type: ::prost::alloc::string::String, +} diff --git a/tests/src/proto/errors.rs b/tests/src/proto/errors.rs index ee0854f..bda1816 100644 --- a/tests/src/proto/errors.rs +++ b/tests/src/proto/errors.rs @@ -15,7 +15,8 @@ pub struct ErrorRequest { pub struct ErrorResponse {} pub mod errors_rpc_actix { #![allow(unused_variables, dead_code, missing_docs)] - use super::{errors_rpc_server::ErrorsRpc, *}; + use super::*; + use super::errors_rpc_server::ErrorsRpc; use std::sync::Arc; #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -24,16 +25,16 @@ pub mod errors_rpc_actix { #[prost(int32, tag = "1")] pub code: i32, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct ErrorQuery { #[prost(string, tag = "2")] pub query: ::prost::alloc::string::String, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct ErrorJson { #[prost(string, tag = "3")] pub message: ::prost::alloc::string::String, @@ -44,26 +45,33 @@ pub mod errors_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let path = - <::actix_web::web::Path as ::actix_web::FromRequest>::extract(&http_request) - .await - .map_err(|err| { - ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument) - })? - .into_inner(); - let query = <::actix_web::web::Query as ::actix_web::FromRequest>::extract( - &http_request, - ) - .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? - .into_inner(); - let json = <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) - .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? - .into_inner(); + let path = <::actix_web::web::Path< + ErrorPath, + > as ::actix_web::FromRequest>::extract(&http_request) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); + let query = <::actix_web::web::Query< + ErrorQuery, + > as ::actix_web::FromRequest>::extract(&http_request) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); + let json = <::actix_web::web::Json< + ErrorJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); let request = ErrorRequest { code: path.code, query: query.query, @@ -82,10 +90,33 @@ pub mod errors_rpc_actix { config.route("/errors/{code}", ::actix_web::web::post().to(call_error)); } } +#[derive(Debug)] +pub struct ErrorRequestInternal { + pub code: i32, + pub query: ::prost::alloc::string::String, + pub message: ::prost::alloc::string::String, +} +impl convert_trait::TryConvert for ErrorRequestInternal { + fn try_convert(from: ErrorRequest) -> Result { + Ok(Self { + code: from.code, + query: from.query, + message: from.message, + }) + } +} +#[derive(Debug)] +pub struct ErrorResponseInternal {} +impl convert_trait::TryConvert for ErrorResponse { + fn try_convert(_from: ErrorResponseInternal) -> Result { + Ok(Self {}) + } +} /// Generated client implementations. pub mod errors_rpc_client { #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] - use tonic::codegen::{http::Uri, *}; + use tonic::codegen::*; + use tonic::codegen::http::Uri; #[derive(Debug, Clone)] pub struct ErrorsRpcClient { inner: tonic::client::Grpc, @@ -129,8 +160,9 @@ pub mod errors_rpc_client { >::ResponseBody, >, >, - >>::Error: - Into + Send + Sync, + , + >>::Error: Into + Send + Sync, { ErrorsRpcClient::new(InterceptedService::new(inner, interceptor)) } @@ -153,12 +185,15 @@ pub mod errors_rpc_client { &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/errors.ErrorsRPC/Error"); self.inner.unary(request.into_request(), path, codec).await @@ -196,7 +231,10 @@ pub mod errors_rpc_server { send_compression_encodings: Default::default(), } } - pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService where F: tonic::service::Interceptor, { @@ -224,7 +262,10 @@ pub mod errors_rpc_server { type Response = http::Response; type Error = std::convert::Infallible; type Future = BoxFuture; - fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { @@ -233,9 +274,13 @@ pub mod errors_rpc_server { "/errors.ErrorsRPC/Error" => { #[allow(non_camel_case_types)] struct ErrorSvc(pub Arc); - impl tonic::server::UnaryService for ErrorSvc { + impl tonic::server::UnaryService + for ErrorSvc { type Response = super::ErrorResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, @@ -252,23 +297,28 @@ pub mod errors_rpc_server { let inner = inner.0; let method = ErrorSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; Box::pin(fut) } - _ => Box::pin(async move { - Ok(http::Response::builder() - .status(200) - .header("grpc-status", "12") - .header("content-type", "application/grpc") - .body(empty_body()) - .unwrap()) - }), + _ => { + Box::pin(async move { + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) + }) + } } } } diff --git a/tests/src/proto/google.protobuf.rs b/tests/src/proto/google.protobuf.rs index d7f81e8..609de37 100644 --- a/tests/src/proto/google.protobuf.rs +++ b/tests/src/proto/google.protobuf.rs @@ -48,7 +48,6 @@ /// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) /// .setNanos((int) ((millis % 1000) * 1000000)).build(); /// -/// /// Example 5: Compute Timestamp from Java `Instant.now()`. /// /// Instant now = Instant.now(); @@ -57,7 +56,6 @@ /// Timestamp.newBuilder().setSeconds(now.getEpochSecond()) /// .setNanos(now.getNano()).build(); /// -/// /// Example 6: Compute Timestamp from current time in Python. /// /// timestamp = Timestamp() @@ -87,10 +85,9 @@ /// \[`strftime`\]() with /// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use /// the Joda Time's \[`ISODateTimeFormat.dateTime()`\]( -/// +/// ) /// ) to obtain a formatter capable of generating timestamps in this format. /// -/// #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -131,8 +128,12 @@ pub struct Timestamp { /// if (any.is(Foo.class)) { /// foo = any.unpack(Foo.class); /// } +/// // or ... +/// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +/// foo = any.unpack(Foo.getDefaultInstance()); +/// } /// -/// Example 3: Pack and unpack a message in Python. +/// Example 3: Pack and unpack a message in Python. /// /// foo = Foo(...) /// any = Any() @@ -142,7 +143,7 @@ pub struct Timestamp { /// any.Unpack(foo) /// ... /// -/// Example 4: Pack and unpack a message in Go +/// Example 4: Pack and unpack a message in Go /// /// foo := &pb.Foo{...} /// any, err := anypb.New(foo) @@ -161,9 +162,8 @@ pub struct Timestamp { /// in the type URL, for example "foo.bar.com/x/y.z" will yield type /// name "y.z". /// -/// /// JSON -/// +/// ==== /// The JSON representation of an `Any` value uses the regular /// representation of the deserialized, embedded message, with an /// additional field `@type` which contains the type URL. Example: @@ -217,7 +217,8 @@ pub struct Any { /// /// Note: this functionality is not currently available in the official /// protobuf release, and it is not used for type URLs beginning with - /// type.googleapis.com. + /// type.googleapis.com. As of May 2023, there are no widely used type server + /// implementations and no plans to implement one. /// /// Schemes other than `http`, `https` (or the empty scheme) might be /// used with implementation specific semantics. @@ -228,3 +229,1891 @@ pub struct Any { #[prost(bytes = "bytes", tag = "2")] pub value: ::prost::bytes::Bytes, } +/// The protocol compiler can output a FileDescriptorSet containing the .proto +/// files it parses. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FileDescriptorSet { + #[prost(message, repeated, tag = "1")] + pub file: ::prost::alloc::vec::Vec, +} +/// Describes a complete .proto file. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FileDescriptorProto { + /// file name, relative to root of source tree + #[prost(string, optional, tag = "1")] + pub name: ::core::option::Option<::prost::alloc::string::String>, + /// e.g. "foo", "foo.bar", etc. + #[prost(string, optional, tag = "2")] + pub package: ::core::option::Option<::prost::alloc::string::String>, + /// Names of files imported by this file. + #[prost(string, repeated, tag = "3")] + pub dependency: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// Indexes of the public imported files in the dependency list above. + #[prost(int32, repeated, packed = "false", tag = "10")] + pub public_dependency: ::prost::alloc::vec::Vec, + /// Indexes of the weak imported files in the dependency list. + /// For Google-internal migration only. Do not use. + #[prost(int32, repeated, packed = "false", tag = "11")] + pub weak_dependency: ::prost::alloc::vec::Vec, + /// All top-level definitions in this file. + #[prost(message, repeated, tag = "4")] + pub message_type: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub enum_type: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub service: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "7")] + pub extension: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "8")] + pub options: ::core::option::Option, + /// This field contains optional information about the original source code. + /// You may safely remove this entire field without harming runtime + /// functionality of the descriptors -- the information is needed only by + /// development tools. + #[prost(message, optional, tag = "9")] + pub source_code_info: ::core::option::Option, + /// The syntax of the proto file. + /// The supported values are "proto2", "proto3", and "editions". + /// + /// If `edition` is present, this value must be "editions". + #[prost(string, optional, tag = "12")] + pub syntax: ::core::option::Option<::prost::alloc::string::String>, + /// The edition of the proto file. + #[prost(enumeration = "Edition", optional, tag = "14")] + pub edition: ::core::option::Option, +} +/// Describes a message type. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DescriptorProto { + #[prost(string, optional, tag = "1")] + pub name: ::core::option::Option<::prost::alloc::string::String>, + #[prost(message, repeated, tag = "2")] + pub field: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub extension: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub nested_type: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub enum_type: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub extension_range: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "8")] + pub oneof_decl: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "7")] + pub options: ::core::option::Option, + #[prost(message, repeated, tag = "9")] + pub reserved_range: ::prost::alloc::vec::Vec, + /// Reserved field names, which may not be used by fields in the same message. + /// A given name may only be reserved once. + #[prost(string, repeated, tag = "10")] + pub reserved_name: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +/// Nested message and enum types in `DescriptorProto`. +pub mod descriptor_proto { + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct ExtensionRange { + /// Inclusive. + #[prost(int32, optional, tag = "1")] + pub start: ::core::option::Option, + /// Exclusive. + #[prost(int32, optional, tag = "2")] + pub end: ::core::option::Option, + #[prost(message, optional, tag = "3")] + pub options: ::core::option::Option, + } + /// Range of reserved tag numbers. Reserved tag numbers may not be used by + /// fields or extension ranges in the same message. Reserved ranges may + /// not overlap. + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct ReservedRange { + /// Inclusive. + #[prost(int32, optional, tag = "1")] + pub start: ::core::option::Option, + /// Exclusive. + #[prost(int32, optional, tag = "2")] + pub end: ::core::option::Option, + } +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ExtensionRangeOptions { + /// The parser stores options it doesn't recognize here. See above. + #[prost(message, repeated, tag = "999")] + pub uninterpreted_option: ::prost::alloc::vec::Vec, + /// For external users: DO NOT USE. We are in the process of open sourcing + /// extension declaration and executing internal cleanups before it can be + /// used externally. + #[prost(message, repeated, tag = "2")] + pub declaration: ::prost::alloc::vec::Vec, + /// Any features defined in the specific edition. + #[prost(message, optional, tag = "50")] + pub features: ::core::option::Option, + /// The verification state of the range. + /// TODO: flip the default to DECLARATION once all empty ranges + /// are marked as UNVERIFIED. + #[prost( + enumeration = "extension_range_options::VerificationState", + optional, + tag = "3", + default = "Unverified" + )] + pub verification: ::core::option::Option, +} +/// Nested message and enum types in `ExtensionRangeOptions`. +pub mod extension_range_options { + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct Declaration { + /// The extension number declared within the extension range. + #[prost(int32, optional, tag = "1")] + pub number: ::core::option::Option, + /// The fully-qualified name of the extension field. There must be a leading + /// dot in front of the full name. + #[prost(string, optional, tag = "2")] + pub full_name: ::core::option::Option<::prost::alloc::string::String>, + /// The fully-qualified type name of the extension field. Unlike + /// Metadata.type, Declaration.type must have a leading dot for messages + /// and enums. + #[prost(string, optional, tag = "3")] + pub r#type: ::core::option::Option<::prost::alloc::string::String>, + /// If true, indicates that the number is reserved in the extension range, + /// and any extension field with the number will fail to compile. Set this + /// when a declared extension field is deleted. + #[prost(bool, optional, tag = "5")] + pub reserved: ::core::option::Option, + /// If true, indicates that the extension must be defined as repeated. + /// Otherwise the extension must be defined as optional. + #[prost(bool, optional, tag = "6")] + pub repeated: ::core::option::Option, + } + /// The verification state of the extension range. + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum VerificationState { + /// All the extensions of the range must be declared. + Declaration = 0, + Unverified = 1, + } + impl VerificationState { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + VerificationState::Declaration => "DECLARATION", + VerificationState::Unverified => "UNVERIFIED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "DECLARATION" => Some(Self::Declaration), + "UNVERIFIED" => Some(Self::Unverified), + _ => None, + } + } + } +} +/// Describes a field within a message. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FieldDescriptorProto { + #[prost(string, optional, tag = "1")] + pub name: ::core::option::Option<::prost::alloc::string::String>, + #[prost(int32, optional, tag = "3")] + pub number: ::core::option::Option, + #[prost(enumeration = "field_descriptor_proto::Label", optional, tag = "4")] + pub label: ::core::option::Option, + /// If type_name is set, this need not be set. If both this and type_name + /// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + #[prost(enumeration = "field_descriptor_proto::Type", optional, tag = "5")] + pub r#type: ::core::option::Option, + /// For message and enum types, this is the name of the type. If the name + /// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + /// rules are used to find the type (i.e. first the nested types within this + /// message are searched, then within the parent, on up to the root + /// namespace). + #[prost(string, optional, tag = "6")] + pub type_name: ::core::option::Option<::prost::alloc::string::String>, + /// For extensions, this is the name of the type being extended. It is + /// resolved in the same manner as type_name. + #[prost(string, optional, tag = "2")] + pub extendee: ::core::option::Option<::prost::alloc::string::String>, + /// For numeric types, contains the original text representation of the value. + /// For booleans, "true" or "false". + /// For strings, contains the default text contents (not escaped in any way). + /// For bytes, contains the C escaped value. All bytes >= 128 are escaped. + #[prost(string, optional, tag = "7")] + pub default_value: ::core::option::Option<::prost::alloc::string::String>, + /// If set, gives the index of a oneof in the containing type's oneof_decl + /// list. This field is a member of that oneof. + #[prost(int32, optional, tag = "9")] + pub oneof_index: ::core::option::Option, + /// JSON name of this field. The value is set by protocol compiler. If the + /// user has set a "json_name" option on this field, that option's value + /// will be used. Otherwise, it's deduced from the field's name by converting + /// it to camelCase. + #[prost(string, optional, tag = "10")] + pub json_name: ::core::option::Option<::prost::alloc::string::String>, + #[prost(message, optional, tag = "8")] + pub options: ::core::option::Option, + /// If true, this is a proto3 "optional". When a proto3 field is optional, it + /// tracks presence regardless of field type. + /// + /// When proto3_optional is true, this field must be belong to a oneof to + /// signal to old proto3 clients that presence is tracked for this field. This + /// oneof is known as a "synthetic" oneof, and this field must be its sole + /// member (each proto3 optional field gets its own synthetic oneof). Synthetic + /// oneofs exist in the descriptor only, and do not generate any API. Synthetic + /// oneofs must be ordered after all "real" oneofs. + /// + /// For message fields, proto3_optional doesn't create any semantic change, + /// since non-repeated message fields always track presence. However it still + /// indicates the semantic detail of whether the user wrote "optional" or not. + /// This can be useful for round-tripping the .proto file. For consistency we + /// give message fields a synthetic oneof also, even though it is not required + /// to track presence. This is especially important because the parser can't + /// tell if a field is a message or an enum, so it must always create a + /// synthetic oneof. + /// + /// Proto2 optional fields do not set this flag, because they already indicate + /// optional with `LABEL_OPTIONAL`. + #[prost(bool, optional, tag = "17")] + pub proto3_optional: ::core::option::Option, +} +/// Nested message and enum types in `FieldDescriptorProto`. +pub mod field_descriptor_proto { + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum Type { + /// 0 is reserved for errors. + /// Order is weird for historical reasons. + Double = 1, + Float = 2, + /// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + /// negative values are likely. + Int64 = 3, + Uint64 = 4, + /// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + /// negative values are likely. + Int32 = 5, + Fixed64 = 6, + Fixed32 = 7, + Bool = 8, + String = 9, + /// Tag-delimited aggregate. + /// Group type is deprecated and not supported after google.protobuf. However, Proto3 + /// implementations should still be able to parse the group wire format and + /// treat group fields as unknown fields. In Editions, the group wire format + /// can be enabled via the `message_encoding` feature. + Group = 10, + /// Length-delimited aggregate. + Message = 11, + /// New in version 2. + Bytes = 12, + Uint32 = 13, + Enum = 14, + Sfixed32 = 15, + Sfixed64 = 16, + /// Uses ZigZag encoding. + Sint32 = 17, + /// Uses ZigZag encoding. + Sint64 = 18, + } + impl Type { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Type::Double => "TYPE_DOUBLE", + Type::Float => "TYPE_FLOAT", + Type::Int64 => "TYPE_INT64", + Type::Uint64 => "TYPE_UINT64", + Type::Int32 => "TYPE_INT32", + Type::Fixed64 => "TYPE_FIXED64", + Type::Fixed32 => "TYPE_FIXED32", + Type::Bool => "TYPE_BOOL", + Type::String => "TYPE_STRING", + Type::Group => "TYPE_GROUP", + Type::Message => "TYPE_MESSAGE", + Type::Bytes => "TYPE_BYTES", + Type::Uint32 => "TYPE_UINT32", + Type::Enum => "TYPE_ENUM", + Type::Sfixed32 => "TYPE_SFIXED32", + Type::Sfixed64 => "TYPE_SFIXED64", + Type::Sint32 => "TYPE_SINT32", + Type::Sint64 => "TYPE_SINT64", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "TYPE_DOUBLE" => Some(Self::Double), + "TYPE_FLOAT" => Some(Self::Float), + "TYPE_INT64" => Some(Self::Int64), + "TYPE_UINT64" => Some(Self::Uint64), + "TYPE_INT32" => Some(Self::Int32), + "TYPE_FIXED64" => Some(Self::Fixed64), + "TYPE_FIXED32" => Some(Self::Fixed32), + "TYPE_BOOL" => Some(Self::Bool), + "TYPE_STRING" => Some(Self::String), + "TYPE_GROUP" => Some(Self::Group), + "TYPE_MESSAGE" => Some(Self::Message), + "TYPE_BYTES" => Some(Self::Bytes), + "TYPE_UINT32" => Some(Self::Uint32), + "TYPE_ENUM" => Some(Self::Enum), + "TYPE_SFIXED32" => Some(Self::Sfixed32), + "TYPE_SFIXED64" => Some(Self::Sfixed64), + "TYPE_SINT32" => Some(Self::Sint32), + "TYPE_SINT64" => Some(Self::Sint64), + _ => None, + } + } + } + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum Label { + /// 0 is reserved for errors + Optional = 1, + Repeated = 3, + /// The required label is only allowed in google.protobuf. In proto3 and Editions + /// it's explicitly prohibited. In Editions, the `field_presence` feature + /// can be used to get this behavior. + Required = 2, + } + impl Label { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Label::Optional => "LABEL_OPTIONAL", + Label::Repeated => "LABEL_REPEATED", + Label::Required => "LABEL_REQUIRED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "LABEL_OPTIONAL" => Some(Self::Optional), + "LABEL_REPEATED" => Some(Self::Repeated), + "LABEL_REQUIRED" => Some(Self::Required), + _ => None, + } + } + } +} +/// Describes a oneof. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct OneofDescriptorProto { + #[prost(string, optional, tag = "1")] + pub name: ::core::option::Option<::prost::alloc::string::String>, + #[prost(message, optional, tag = "2")] + pub options: ::core::option::Option, +} +/// Describes an enum type. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EnumDescriptorProto { + #[prost(string, optional, tag = "1")] + pub name: ::core::option::Option<::prost::alloc::string::String>, + #[prost(message, repeated, tag = "2")] + pub value: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "3")] + pub options: ::core::option::Option, + /// Range of reserved numeric values. Reserved numeric values may not be used + /// by enum values in the same enum declaration. Reserved ranges may not + /// overlap. + #[prost(message, repeated, tag = "4")] + pub reserved_range: ::prost::alloc::vec::Vec< + enum_descriptor_proto::EnumReservedRange, + >, + /// Reserved enum value names, which may not be reused. A given name may only + /// be reserved once. + #[prost(string, repeated, tag = "5")] + pub reserved_name: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +/// Nested message and enum types in `EnumDescriptorProto`. +pub mod enum_descriptor_proto { + /// Range of reserved numeric values. Reserved values may not be used by + /// entries in the same enum. Reserved ranges may not overlap. + /// + /// Note that this is distinct from DescriptorProto.ReservedRange in that it + /// is inclusive such that it can appropriately represent the entire int32 + /// domain. + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct EnumReservedRange { + /// Inclusive. + #[prost(int32, optional, tag = "1")] + pub start: ::core::option::Option, + /// Inclusive. + #[prost(int32, optional, tag = "2")] + pub end: ::core::option::Option, + } +} +/// Describes a value within an enum. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EnumValueDescriptorProto { + #[prost(string, optional, tag = "1")] + pub name: ::core::option::Option<::prost::alloc::string::String>, + #[prost(int32, optional, tag = "2")] + pub number: ::core::option::Option, + #[prost(message, optional, tag = "3")] + pub options: ::core::option::Option, +} +/// Describes a service. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ServiceDescriptorProto { + #[prost(string, optional, tag = "1")] + pub name: ::core::option::Option<::prost::alloc::string::String>, + #[prost(message, repeated, tag = "2")] + pub method: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "3")] + pub options: ::core::option::Option, +} +/// Describes a method of a service. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MethodDescriptorProto { + #[prost(string, optional, tag = "1")] + pub name: ::core::option::Option<::prost::alloc::string::String>, + /// Input and output type names. These are resolved in the same way as + /// FieldDescriptorProto.type_name, but must refer to a message type. + #[prost(string, optional, tag = "2")] + pub input_type: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag = "3")] + pub output_type: ::core::option::Option<::prost::alloc::string::String>, + #[prost(message, optional, tag = "4")] + pub options: ::core::option::Option, + /// Identifies if client streams multiple client messages + #[prost(bool, optional, tag = "5", default = "false")] + pub client_streaming: ::core::option::Option, + /// Identifies if server streams multiple server messages + #[prost(bool, optional, tag = "6", default = "false")] + pub server_streaming: ::core::option::Option, +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FileOptions { + /// Sets the Java package where classes generated from this .proto will be + /// placed. By default, the proto package is used, but this is often + /// inappropriate because proto packages do not normally start with backwards + /// domain names. + #[prost(string, optional, tag = "1")] + pub java_package: ::core::option::Option<::prost::alloc::string::String>, + /// Controls the name of the wrapper Java class generated for the .proto file. + /// That class will always contain the .proto file's getDescriptor() method as + /// well as any top-level extensions defined in the .proto file. + /// If java_multiple_files is disabled, then all the other classes from the + /// .proto file will be nested inside the single wrapper outer class. + #[prost(string, optional, tag = "8")] + pub java_outer_classname: ::core::option::Option<::prost::alloc::string::String>, + /// If enabled, then the Java code generator will generate a separate .java + /// file for each top-level message, enum, and service defined in the .proto + /// file. Thus, these types will *not* be nested inside the wrapper class + /// named by java_outer_classname. However, the wrapper class will still be + /// generated to contain the file's getDescriptor() method as well as any + /// top-level extensions defined in the file. + #[prost(bool, optional, tag = "10", default = "false")] + pub java_multiple_files: ::core::option::Option, + /// This option does nothing. + #[deprecated] + #[prost(bool, optional, tag = "20")] + pub java_generate_equals_and_hash: ::core::option::Option, + /// If set true, then the Java2 code generator will generate code that + /// throws an exception whenever an attempt is made to assign a non-UTF-8 + /// byte sequence to a string field. + /// Message reflection will do the same. + /// However, an extension field still accepts non-UTF-8 byte sequences. + /// This option has no effect on when used with the lite runtime. + #[prost(bool, optional, tag = "27", default = "false")] + pub java_string_check_utf8: ::core::option::Option, + #[prost( + enumeration = "file_options::OptimizeMode", + optional, + tag = "9", + default = "Speed" + )] + pub optimize_for: ::core::option::Option, + /// Sets the Go package where structs generated from this .proto will be + /// placed. If omitted, the Go package will be derived from the following: + /// - The basename of the package import path, if provided. + /// - Otherwise, the package statement in the .proto file, if present. + /// - Otherwise, the basename of the .proto file, without extension. + #[prost(string, optional, tag = "11")] + pub go_package: ::core::option::Option<::prost::alloc::string::String>, + /// Should generic services be generated in each language? "Generic" services + /// are not specific to any particular RPC system. They are generated by the + /// main code generators in each language (without additional plugins). + /// Generic services were the only kind of service generation supported by + /// early versions of google.protobuf. + /// + /// Generic services are now considered deprecated in favor of using plugins + /// that generate code specific to your particular RPC system. Therefore, + /// these default to false. Old code which depends on generic services should + /// explicitly set them to true. + #[prost(bool, optional, tag = "16", default = "false")] + pub cc_generic_services: ::core::option::Option, + #[prost(bool, optional, tag = "17", default = "false")] + pub java_generic_services: ::core::option::Option, + #[prost(bool, optional, tag = "18", default = "false")] + pub py_generic_services: ::core::option::Option, + #[prost(bool, optional, tag = "42", default = "false")] + pub php_generic_services: ::core::option::Option, + /// Is this file deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for everything in the file, or it will be completely ignored; in the very + /// least, this is a formalization for deprecating files. + #[prost(bool, optional, tag = "23", default = "false")] + pub deprecated: ::core::option::Option, + /// Enables the use of arenas for the proto messages in this file. This applies + /// only to generated classes for C++. + #[prost(bool, optional, tag = "31", default = "true")] + pub cc_enable_arenas: ::core::option::Option, + /// Sets the objective c class prefix which is prepended to all objective c + /// generated classes from this .proto. There is no default. + #[prost(string, optional, tag = "36")] + pub objc_class_prefix: ::core::option::Option<::prost::alloc::string::String>, + /// Namespace for generated classes; defaults to the package. + #[prost(string, optional, tag = "37")] + pub csharp_namespace: ::core::option::Option<::prost::alloc::string::String>, + /// By default Swift generators will take the proto package and CamelCase it + /// replacing '.' with underscore and use that to prefix the types/symbols + /// defined. When this options is provided, they will use this value instead + /// to prefix the types/symbols defined. + #[prost(string, optional, tag = "39")] + pub swift_prefix: ::core::option::Option<::prost::alloc::string::String>, + /// Sets the php class prefix which is prepended to all php generated classes + /// from this .proto. Default is empty. + #[prost(string, optional, tag = "40")] + pub php_class_prefix: ::core::option::Option<::prost::alloc::string::String>, + /// Use this option to change the namespace of php generated classes. Default + /// is empty. When this option is empty, the package name will be used for + /// determining the namespace. + #[prost(string, optional, tag = "41")] + pub php_namespace: ::core::option::Option<::prost::alloc::string::String>, + /// Use this option to change the namespace of php generated metadata classes. + /// Default is empty. When this option is empty, the proto file name will be + /// used for determining the namespace. + #[prost(string, optional, tag = "44")] + pub php_metadata_namespace: ::core::option::Option<::prost::alloc::string::String>, + /// Use this option to change the package of ruby generated classes. Default + /// is empty. When this option is not set, the package name will be used for + /// determining the ruby package. + #[prost(string, optional, tag = "45")] + pub ruby_package: ::core::option::Option<::prost::alloc::string::String>, + /// Any features defined in the specific edition. + #[prost(message, optional, tag = "50")] + pub features: ::core::option::Option, + /// The parser stores options it doesn't recognize here. + /// See the documentation for the "Options" section above. + #[prost(message, repeated, tag = "999")] + pub uninterpreted_option: ::prost::alloc::vec::Vec, +} +/// Nested message and enum types in `FileOptions`. +pub mod file_options { + /// Generated classes can be optimized for speed or code size. + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum OptimizeMode { + /// Generate complete code for parsing, serialization, + Speed = 1, + /// etc. + /// + /// Use ReflectionOps to implement these methods. + CodeSize = 2, + /// Generate code using MessageLite and the lite runtime. + LiteRuntime = 3, + } + impl OptimizeMode { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + OptimizeMode::Speed => "SPEED", + OptimizeMode::CodeSize => "CODE_SIZE", + OptimizeMode::LiteRuntime => "LITE_RUNTIME", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "SPEED" => Some(Self::Speed), + "CODE_SIZE" => Some(Self::CodeSize), + "LITE_RUNTIME" => Some(Self::LiteRuntime), + _ => None, + } + } + } +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MessageOptions { + /// Set true to use the old proto1 MessageSet wire format for extensions. + /// This is provided for backwards-compatibility with the MessageSet wire + /// format. You should not use this for any other reason: It's less + /// efficient, has fewer features, and is more complicated. + /// + /// The message must be defined exactly as follows: + /// message Foo { + /// option message_set_wire_format = true; + /// extensions 4 to max; + /// } + /// Note that the message cannot have any defined fields; MessageSets only + /// have extensions. + /// + /// All extensions of your type must be singular messages; e.g. they cannot + /// be int32s, enums, or repeated messages. + /// + /// Because this is an option, the above two restrictions are not enforced by + /// the protocol compiler. + #[prost(bool, optional, tag = "1", default = "false")] + pub message_set_wire_format: ::core::option::Option, + /// Disables the generation of the standard "descriptor()" accessor, which can + /// conflict with a field of the same name. This is meant to make migration + /// from proto1 easier; new code should avoid fields named "descriptor". + #[prost(bool, optional, tag = "2", default = "false")] + pub no_standard_descriptor_accessor: ::core::option::Option, + /// Is this message deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for the message, or it will be completely ignored; in the very least, + /// this is a formalization for deprecating messages. + #[prost(bool, optional, tag = "3", default = "false")] + pub deprecated: ::core::option::Option, + /// NOTE: Do not set the option in .proto files. Always use the maps syntax + /// instead. The option should only be implicitly set by the proto compiler + /// parser. + /// + /// Whether the message is an automatically generated map entry type for the + /// maps field. + /// + /// For maps fields: + /// map map_field = 1; + /// The parsed descriptor looks like: + /// message MapFieldEntry { + /// option map_entry = true; + /// optional KeyType key = 1; + /// optional ValueType value = 2; + /// } + /// repeated MapFieldEntry map_field = 1; + /// + /// Implementations may choose not to generate the map_entry=true message, but + /// use a native map in the target language to hold the keys and values. + /// The reflection APIs in such implementations still need to work as + /// if the field is a repeated message field. + #[prost(bool, optional, tag = "7")] + pub map_entry: ::core::option::Option, + /// Enable the legacy handling of JSON field name conflicts. This lowercases + /// and strips underscored from the fields before comparison in proto3 only. + /// The new behavior takes `json_name` into account and applies to proto2 as + /// well. + /// + /// This should only be used as a temporary measure against broken builds due + /// to the change in behavior for JSON field name conflicts. + /// + /// TODO This is legacy behavior we plan to remove once downstream + /// teams have had time to migrate. + #[deprecated] + #[prost(bool, optional, tag = "11")] + pub deprecated_legacy_json_field_conflicts: ::core::option::Option, + /// Any features defined in the specific edition. + #[prost(message, optional, tag = "12")] + pub features: ::core::option::Option, + /// The parser stores options it doesn't recognize here. See above. + #[prost(message, repeated, tag = "999")] + pub uninterpreted_option: ::prost::alloc::vec::Vec, +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FieldOptions { + /// The ctype option instructs the C++ code generator to use a different + /// representation of the field than it normally would. See the specific + /// options below. This option is only implemented to support use of + /// \[ctype=CORD\] and \[ctype=STRING\] (the default) on non-repeated fields of + /// type "bytes" in the open source release -- sorry, we'll try to include + /// other types in a future version! + #[prost( + enumeration = "field_options::CType", + optional, + tag = "1", + default = "String" + )] + pub ctype: ::core::option::Option, + /// The packed option can be enabled for repeated primitive fields to enable + /// a more efficient representation on the wire. Rather than repeatedly + /// writing the tag and type for each element, the entire array is encoded as + /// a single length-delimited blob. In proto3, only explicit setting it to + /// false will avoid using packed encoding. This option is prohibited in + /// Editions, but the `repeated_field_encoding` feature can be used to control + /// the behavior. + #[prost(bool, optional, tag = "2")] + pub packed: ::core::option::Option, + /// The jstype option determines the JavaScript type used for values of the + /// field. The option is permitted only for 64 bit integral and fixed types + /// (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + /// is represented as JavaScript string, which avoids loss of precision that + /// can happen when a large value is converted to a floating point JavaScript. + /// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + /// use the JavaScript "number" type. The behavior of the default option + /// JS_NORMAL is implementation dependent. + /// + /// This option is an enum to permit additional types to be added, e.g. + /// goog.math.Integer. + #[prost( + enumeration = "field_options::JsType", + optional, + tag = "6", + default = "JsNormal" + )] + pub jstype: ::core::option::Option, + /// Should this field be parsed lazily? Lazy applies only to message-type + /// fields. It means that when the outer message is initially parsed, the + /// inner message's contents will not be parsed but instead stored in encoded + /// form. The inner message will actually be parsed when it is first accessed. + /// + /// This is only a hint. Implementations are free to choose whether to use + /// eager or lazy parsing regardless of the value of this option. However, + /// setting this option true suggests that the protocol author believes that + /// using lazy parsing on this field is worth the additional bookkeeping + /// overhead typically needed to implement it. + /// + /// This option does not affect the public interface of any generated code; + /// all method signatures remain the same. Furthermore, thread-safety of the + /// interface is not affected by this option; const methods remain safe to + /// call from multiple threads concurrently, while non-const methods continue + /// to require exclusive access. + /// + /// Note that implementations may choose not to check required fields within + /// a lazy sub-message. That is, calling IsInitialized() on the outer message + /// may return true even if the inner message has missing required fields. + /// This is necessary because otherwise the inner message would have to be + /// parsed in order to perform the check, defeating the purpose of lazy + /// parsing. An implementation which chooses not to check required fields + /// must be consistent about it. That is, for any particular sub-message, the + /// implementation must either *always* check its required fields, or *never* + /// check its required fields, regardless of whether or not the message has + /// been parsed. + /// + /// As of May 2022, lazy verifies the contents of the byte stream during + /// parsing. An invalid byte stream will cause the overall parsing to fail. + #[prost(bool, optional, tag = "5", default = "false")] + pub lazy: ::core::option::Option, + /// unverified_lazy does no correctness checks on the byte stream. This should + /// only be used where lazy with verification is prohibitive for performance + /// reasons. + #[prost(bool, optional, tag = "15", default = "false")] + pub unverified_lazy: ::core::option::Option, + /// Is this field deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for accessors, or it will be completely ignored; in the very least, this + /// is a formalization for deprecating fields. + #[prost(bool, optional, tag = "3", default = "false")] + pub deprecated: ::core::option::Option, + /// For Google-internal migration only. Do not use. + #[prost(bool, optional, tag = "10", default = "false")] + pub weak: ::core::option::Option, + /// Indicate that the field value should not be printed out when using debug + /// formats, e.g. when the field contains sensitive credentials. + #[prost(bool, optional, tag = "16", default = "false")] + pub debug_redact: ::core::option::Option, + #[prost(enumeration = "field_options::OptionRetention", optional, tag = "17")] + pub retention: ::core::option::Option, + #[prost( + enumeration = "field_options::OptionTargetType", + repeated, + packed = "false", + tag = "19" + )] + pub targets: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "20")] + pub edition_defaults: ::prost::alloc::vec::Vec, + /// Any features defined in the specific edition. + #[prost(message, optional, tag = "21")] + pub features: ::core::option::Option, + /// The parser stores options it doesn't recognize here. See above. + #[prost(message, repeated, tag = "999")] + pub uninterpreted_option: ::prost::alloc::vec::Vec, +} +/// Nested message and enum types in `FieldOptions`. +pub mod field_options { + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct EditionDefault { + #[prost(enumeration = "super::Edition", optional, tag = "3")] + pub edition: ::core::option::Option, + /// Textproto value. + #[prost(string, optional, tag = "2")] + pub value: ::core::option::Option<::prost::alloc::string::String>, + } + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum CType { + /// Default mode. + String = 0, + /// The option \[ctype=CORD\] may be applied to a non-repeated field of type + /// "bytes". It indicates that in C++, the data should be stored in a Cord + /// instead of a string. For very large strings, this may reduce memory + /// fragmentation. It may also allow better performance when parsing from a + /// Cord, or when parsing with aliasing enabled, as the parsed Cord may then + /// alias the original buffer. + Cord = 1, + StringPiece = 2, + } + impl CType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + CType::String => "STRING", + CType::Cord => "CORD", + CType::StringPiece => "STRING_PIECE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "STRING" => Some(Self::String), + "CORD" => Some(Self::Cord), + "STRING_PIECE" => Some(Self::StringPiece), + _ => None, + } + } + } + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum JsType { + /// Use the default type. + JsNormal = 0, + /// Use JavaScript strings. + JsString = 1, + /// Use JavaScript numbers. + JsNumber = 2, + } + impl JsType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + JsType::JsNormal => "JS_NORMAL", + JsType::JsString => "JS_STRING", + JsType::JsNumber => "JS_NUMBER", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JS_NORMAL" => Some(Self::JsNormal), + "JS_STRING" => Some(Self::JsString), + "JS_NUMBER" => Some(Self::JsNumber), + _ => None, + } + } + } + /// If set to RETENTION_SOURCE, the option will be omitted from the binary. + /// Note: as of January 2023, support for this is in progress and does not yet + /// have an effect (b/264593489). + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum OptionRetention { + RetentionUnknown = 0, + RetentionRuntime = 1, + RetentionSource = 2, + } + impl OptionRetention { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + OptionRetention::RetentionUnknown => "RETENTION_UNKNOWN", + OptionRetention::RetentionRuntime => "RETENTION_RUNTIME", + OptionRetention::RetentionSource => "RETENTION_SOURCE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "RETENTION_UNKNOWN" => Some(Self::RetentionUnknown), + "RETENTION_RUNTIME" => Some(Self::RetentionRuntime), + "RETENTION_SOURCE" => Some(Self::RetentionSource), + _ => None, + } + } + } + /// This indicates the types of entities that the field may apply to when used + /// as an option. If it is unset, then the field may be freely used as an + /// option on any kind of entity. Note: as of January 2023, support for this is + /// in progress and does not yet have an effect (b/264593489). + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum OptionTargetType { + TargetTypeUnknown = 0, + TargetTypeFile = 1, + TargetTypeExtensionRange = 2, + TargetTypeMessage = 3, + TargetTypeField = 4, + TargetTypeOneof = 5, + TargetTypeEnum = 6, + TargetTypeEnumEntry = 7, + TargetTypeService = 8, + TargetTypeMethod = 9, + } + impl OptionTargetType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + OptionTargetType::TargetTypeUnknown => "TARGET_TYPE_UNKNOWN", + OptionTargetType::TargetTypeFile => "TARGET_TYPE_FILE", + OptionTargetType::TargetTypeExtensionRange => { + "TARGET_TYPE_EXTENSION_RANGE" + } + OptionTargetType::TargetTypeMessage => "TARGET_TYPE_MESSAGE", + OptionTargetType::TargetTypeField => "TARGET_TYPE_FIELD", + OptionTargetType::TargetTypeOneof => "TARGET_TYPE_ONEOF", + OptionTargetType::TargetTypeEnum => "TARGET_TYPE_ENUM", + OptionTargetType::TargetTypeEnumEntry => "TARGET_TYPE_ENUM_ENTRY", + OptionTargetType::TargetTypeService => "TARGET_TYPE_SERVICE", + OptionTargetType::TargetTypeMethod => "TARGET_TYPE_METHOD", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "TARGET_TYPE_UNKNOWN" => Some(Self::TargetTypeUnknown), + "TARGET_TYPE_FILE" => Some(Self::TargetTypeFile), + "TARGET_TYPE_EXTENSION_RANGE" => Some(Self::TargetTypeExtensionRange), + "TARGET_TYPE_MESSAGE" => Some(Self::TargetTypeMessage), + "TARGET_TYPE_FIELD" => Some(Self::TargetTypeField), + "TARGET_TYPE_ONEOF" => Some(Self::TargetTypeOneof), + "TARGET_TYPE_ENUM" => Some(Self::TargetTypeEnum), + "TARGET_TYPE_ENUM_ENTRY" => Some(Self::TargetTypeEnumEntry), + "TARGET_TYPE_SERVICE" => Some(Self::TargetTypeService), + "TARGET_TYPE_METHOD" => Some(Self::TargetTypeMethod), + _ => None, + } + } + } +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct OneofOptions { + /// Any features defined in the specific edition. + #[prost(message, optional, tag = "1")] + pub features: ::core::option::Option, + /// The parser stores options it doesn't recognize here. See above. + #[prost(message, repeated, tag = "999")] + pub uninterpreted_option: ::prost::alloc::vec::Vec, +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EnumOptions { + /// Set this option to true to allow mapping different tag names to the same + /// value. + #[prost(bool, optional, tag = "2")] + pub allow_alias: ::core::option::Option, + /// Is this enum deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for the enum, or it will be completely ignored; in the very least, this + /// is a formalization for deprecating enums. + #[prost(bool, optional, tag = "3", default = "false")] + pub deprecated: ::core::option::Option, + /// Enable the legacy handling of JSON field name conflicts. This lowercases + /// and strips underscored from the fields before comparison in proto3 only. + /// The new behavior takes `json_name` into account and applies to proto2 as + /// well. + /// TODO Remove this legacy behavior once downstream teams have + /// had time to migrate. + #[deprecated] + #[prost(bool, optional, tag = "6")] + pub deprecated_legacy_json_field_conflicts: ::core::option::Option, + /// Any features defined in the specific edition. + #[prost(message, optional, tag = "7")] + pub features: ::core::option::Option, + /// The parser stores options it doesn't recognize here. See above. + #[prost(message, repeated, tag = "999")] + pub uninterpreted_option: ::prost::alloc::vec::Vec, +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EnumValueOptions { + /// Is this enum value deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for the enum value, or it will be completely ignored; in the very least, + /// this is a formalization for deprecating enum values. + #[prost(bool, optional, tag = "1", default = "false")] + pub deprecated: ::core::option::Option, + /// Any features defined in the specific edition. + #[prost(message, optional, tag = "2")] + pub features: ::core::option::Option, + /// Indicate that fields annotated with this enum value should not be printed + /// out when using debug formats, e.g. when the field contains sensitive + /// credentials. + #[prost(bool, optional, tag = "3", default = "false")] + pub debug_redact: ::core::option::Option, + /// The parser stores options it doesn't recognize here. See above. + #[prost(message, repeated, tag = "999")] + pub uninterpreted_option: ::prost::alloc::vec::Vec, +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ServiceOptions { + /// Any features defined in the specific edition. + #[prost(message, optional, tag = "34")] + pub features: ::core::option::Option, + /// Is this service deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for the service, or it will be completely ignored; in the very least, + /// this is a formalization for deprecating services. + #[prost(bool, optional, tag = "33", default = "false")] + pub deprecated: ::core::option::Option, + /// The parser stores options it doesn't recognize here. See above. + #[prost(message, repeated, tag = "999")] + pub uninterpreted_option: ::prost::alloc::vec::Vec, +} +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MethodOptions { + /// Is this method deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for the method, or it will be completely ignored; in the very least, + /// this is a formalization for deprecating methods. + #[prost(bool, optional, tag = "33", default = "false")] + pub deprecated: ::core::option::Option, + #[prost( + enumeration = "method_options::IdempotencyLevel", + optional, + tag = "34", + default = "IdempotencyUnknown" + )] + pub idempotency_level: ::core::option::Option, + /// Any features defined in the specific edition. + #[prost(message, optional, tag = "35")] + pub features: ::core::option::Option, + /// The parser stores options it doesn't recognize here. See above. + #[prost(message, repeated, tag = "999")] + pub uninterpreted_option: ::prost::alloc::vec::Vec, +} +/// Nested message and enum types in `MethodOptions`. +pub mod method_options { + /// Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + /// or neither? HTTP based RPC implementation may choose GET verb for safe + /// methods, and PUT verb for idempotent methods instead of the default POST. + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum IdempotencyLevel { + IdempotencyUnknown = 0, + /// implies idempotent + NoSideEffects = 1, + /// idempotent, but may have side effects + Idempotent = 2, + } + impl IdempotencyLevel { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + IdempotencyLevel::IdempotencyUnknown => "IDEMPOTENCY_UNKNOWN", + IdempotencyLevel::NoSideEffects => "NO_SIDE_EFFECTS", + IdempotencyLevel::Idempotent => "IDEMPOTENT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "IDEMPOTENCY_UNKNOWN" => Some(Self::IdempotencyUnknown), + "NO_SIDE_EFFECTS" => Some(Self::NoSideEffects), + "IDEMPOTENT" => Some(Self::Idempotent), + _ => None, + } + } + } +} +/// A message representing a option the parser does not recognize. This only +/// appears in options protos created by the compiler::Parser class. +/// DescriptorPool resolves these when building Descriptor objects. Therefore, +/// options protos in descriptor objects (e.g. returned by Descriptor::options(), +/// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +/// in them. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct UninterpretedOption { + #[prost(message, repeated, tag = "2")] + pub name: ::prost::alloc::vec::Vec, + /// The value of the uninterpreted option, in whatever type the tokenizer + /// identified it as during parsing. Exactly one of these should be set. + #[prost(string, optional, tag = "3")] + pub identifier_value: ::core::option::Option<::prost::alloc::string::String>, + #[prost(uint64, optional, tag = "4")] + pub positive_int_value: ::core::option::Option, + #[prost(int64, optional, tag = "5")] + pub negative_int_value: ::core::option::Option, + #[prost(double, optional, tag = "6")] + pub double_value: ::core::option::Option, + #[prost(bytes = "bytes", optional, tag = "7")] + pub string_value: ::core::option::Option<::prost::bytes::Bytes>, + #[prost(string, optional, tag = "8")] + pub aggregate_value: ::core::option::Option<::prost::alloc::string::String>, +} +/// Nested message and enum types in `UninterpretedOption`. +pub mod uninterpreted_option { + /// The name of the uninterpreted option. Each string represents a segment in + /// a dot-separated name. is_extension is true iff a segment represents an + /// extension (denoted with parentheses in options specs in .proto files). + /// E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + /// "foo.(bar.baz).moo". + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct NamePart { + #[prost(string, required, tag = "1")] + pub name_part: ::prost::alloc::string::String, + #[prost(bool, required, tag = "2")] + pub is_extension: bool, + } +} +/// TODO Enums in C++ gencode (and potentially other languages) are +/// not well scoped. This means that each of the feature enums below can clash +/// with each other. The short names we've chosen maximize call-site +/// readability, but leave us very open to this scenario. A future feature will +/// be designed and implemented to handle this, hopefully before we ever hit a +/// conflict here. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FeatureSet { + #[prost(enumeration = "feature_set::FieldPresence", optional, tag = "1")] + pub field_presence: ::core::option::Option, + #[prost(enumeration = "feature_set::EnumType", optional, tag = "2")] + pub enum_type: ::core::option::Option, + #[prost(enumeration = "feature_set::RepeatedFieldEncoding", optional, tag = "3")] + pub repeated_field_encoding: ::core::option::Option, + #[prost(enumeration = "feature_set::Utf8Validation", optional, tag = "4")] + pub utf8_validation: ::core::option::Option, + #[prost(enumeration = "feature_set::MessageEncoding", optional, tag = "5")] + pub message_encoding: ::core::option::Option, + #[prost(enumeration = "feature_set::JsonFormat", optional, tag = "6")] + pub json_format: ::core::option::Option, +} +/// Nested message and enum types in `FeatureSet`. +pub mod feature_set { + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum FieldPresence { + Unknown = 0, + Explicit = 1, + Implicit = 2, + LegacyRequired = 3, + } + impl FieldPresence { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + FieldPresence::Unknown => "FIELD_PRESENCE_UNKNOWN", + FieldPresence::Explicit => "EXPLICIT", + FieldPresence::Implicit => "IMPLICIT", + FieldPresence::LegacyRequired => "LEGACY_REQUIRED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "FIELD_PRESENCE_UNKNOWN" => Some(Self::Unknown), + "EXPLICIT" => Some(Self::Explicit), + "IMPLICIT" => Some(Self::Implicit), + "LEGACY_REQUIRED" => Some(Self::LegacyRequired), + _ => None, + } + } + } + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum EnumType { + Unknown = 0, + Open = 1, + Closed = 2, + } + impl EnumType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + EnumType::Unknown => "ENUM_TYPE_UNKNOWN", + EnumType::Open => "OPEN", + EnumType::Closed => "CLOSED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "ENUM_TYPE_UNKNOWN" => Some(Self::Unknown), + "OPEN" => Some(Self::Open), + "CLOSED" => Some(Self::Closed), + _ => None, + } + } + } + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum RepeatedFieldEncoding { + Unknown = 0, + Packed = 1, + Expanded = 2, + } + impl RepeatedFieldEncoding { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + RepeatedFieldEncoding::Unknown => "REPEATED_FIELD_ENCODING_UNKNOWN", + RepeatedFieldEncoding::Packed => "PACKED", + RepeatedFieldEncoding::Expanded => "EXPANDED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "REPEATED_FIELD_ENCODING_UNKNOWN" => Some(Self::Unknown), + "PACKED" => Some(Self::Packed), + "EXPANDED" => Some(Self::Expanded), + _ => None, + } + } + } + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum Utf8Validation { + Unknown = 0, + None = 1, + Verify = 2, + } + impl Utf8Validation { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Utf8Validation::Unknown => "UTF8_VALIDATION_UNKNOWN", + Utf8Validation::None => "NONE", + Utf8Validation::Verify => "VERIFY", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "UTF8_VALIDATION_UNKNOWN" => Some(Self::Unknown), + "NONE" => Some(Self::None), + "VERIFY" => Some(Self::Verify), + _ => None, + } + } + } + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum MessageEncoding { + Unknown = 0, + LengthPrefixed = 1, + Delimited = 2, + } + impl MessageEncoding { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + MessageEncoding::Unknown => "MESSAGE_ENCODING_UNKNOWN", + MessageEncoding::LengthPrefixed => "LENGTH_PREFIXED", + MessageEncoding::Delimited => "DELIMITED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "MESSAGE_ENCODING_UNKNOWN" => Some(Self::Unknown), + "LENGTH_PREFIXED" => Some(Self::LengthPrefixed), + "DELIMITED" => Some(Self::Delimited), + _ => None, + } + } + } + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum JsonFormat { + Unknown = 0, + Allow = 1, + LegacyBestEffort = 2, + } + impl JsonFormat { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + JsonFormat::Unknown => "JSON_FORMAT_UNKNOWN", + JsonFormat::Allow => "ALLOW", + JsonFormat::LegacyBestEffort => "LEGACY_BEST_EFFORT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JSON_FORMAT_UNKNOWN" => Some(Self::Unknown), + "ALLOW" => Some(Self::Allow), + "LEGACY_BEST_EFFORT" => Some(Self::LegacyBestEffort), + _ => None, + } + } + } +} +/// A compiled specification for the defaults of a set of features. These +/// messages are generated from FeatureSet extensions and can be used to seed +/// feature resolution. The resolution with this object becomes a simple search +/// for the closest matching edition, followed by proto merges. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FeatureSetDefaults { + #[prost(message, repeated, tag = "1")] + pub defaults: ::prost::alloc::vec::Vec< + feature_set_defaults::FeatureSetEditionDefault, + >, + /// The minimum supported edition (inclusive) when this was constructed. + /// Editions before this will not have defaults. + #[prost(enumeration = "Edition", optional, tag = "4")] + pub minimum_edition: ::core::option::Option, + /// The maximum known edition (inclusive) when this was constructed. Editions + /// after this will not have reliable defaults. + #[prost(enumeration = "Edition", optional, tag = "5")] + pub maximum_edition: ::core::option::Option, +} +/// Nested message and enum types in `FeatureSetDefaults`. +pub mod feature_set_defaults { + /// A map from every known edition with a unique set of defaults to its + /// defaults. Not all editions may be contained here. For a given edition, + /// the defaults at the closest matching edition ordered at or before it should + /// be used. This field must be in strict ascending order by edition. + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct FeatureSetEditionDefault { + #[prost(enumeration = "super::Edition", optional, tag = "3")] + pub edition: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub features: ::core::option::Option, + } +} +/// Encapsulates information about the original source file from which a +/// FileDescriptorProto was generated. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SourceCodeInfo { + /// A Location identifies a piece of source code in a .proto file which + /// corresponds to a particular definition. This information is intended + /// to be useful to IDEs, code indexers, documentation generators, and similar + /// tools. + /// + /// For example, say we have a file like: + /// message Foo { + /// optional string foo = 1; + /// } + /// Let's look at just the field definition: + /// optional string foo = 1; + /// ^ ^^ ^^ ^ ^^^ + /// a bc de f ghi + /// We have the following locations: + /// span path represents + /// [a,i) [ 4, 0, 2, 0 ] The whole field definition. + /// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + /// [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + /// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + /// [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + /// + /// Notes: + /// - A location may refer to a repeated field itself (i.e. not to any + /// particular index within it). This is used whenever a set of elements are + /// logically enclosed in a single code segment. For example, an entire + /// extend block (possibly containing multiple extension definitions) will + /// have an outer location whose path refers to the "extensions" repeated + /// field without an index. + /// - Multiple locations may have the same path. This happens when a single + /// logical declaration is spread out across multiple places. The most + /// obvious example is the "extend" block again -- there may be multiple + /// extend blocks in the same scope, each of which will have the same path. + /// - A location's span is not always a subset of its parent's span. For + /// example, the "extendee" of an extension declaration appears at the + /// beginning of the "extend" block and is shared by all extensions within + /// the block. + /// - Just because a location's span is a subset of some other location's span + /// does not mean that it is a descendant. For example, a "group" defines + /// both a type and a field in a single declaration. Thus, the locations + /// corresponding to the type and field and their components will overlap. + /// - Code which tries to interpret locations should probably be designed to + /// ignore those that it doesn't understand, as more types of locations could + /// be recorded in the future. + #[prost(message, repeated, tag = "1")] + pub location: ::prost::alloc::vec::Vec, +} +/// Nested message and enum types in `SourceCodeInfo`. +pub mod source_code_info { + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct Location { + /// Identifies which part of the FileDescriptorProto was defined at this + /// location. + /// + /// Each element is a field number or an index. They form a path from + /// the root FileDescriptorProto to the place where the definition occurs. + /// For example, this path: + /// [ 4, 3, 2, 7, 1 ] + /// refers to: + /// file.message_type(3) // 4, 3 + /// .field(7) // 2, 7 + /// .name() // 1 + /// This is because FileDescriptorProto.message_type has field number 4: + /// repeated DescriptorProto message_type = 4; + /// and DescriptorProto.field has field number 2: + /// repeated FieldDescriptorProto field = 2; + /// and FieldDescriptorProto.name has field number 1: + /// optional string name = 1; + /// + /// Thus, the above path gives the location of a field name. If we removed + /// the last element: + /// [ 4, 3, 2, 7 ] + /// this path refers to the whole field declaration (from the beginning + /// of the label to the terminating semicolon). + #[prost(int32, repeated, tag = "1")] + pub path: ::prost::alloc::vec::Vec, + /// Always has exactly three or four elements: start line, start column, + /// end line (optional, otherwise assumed same as start line), end column. + /// These are packed into a single field for efficiency. Note that line + /// and column numbers are zero-based -- typically you will want to add + /// 1 to each before displaying to a user. + #[prost(int32, repeated, tag = "2")] + pub span: ::prost::alloc::vec::Vec, + /// If this SourceCodeInfo represents a complete declaration, these are any + /// comments appearing before and after the declaration which appear to be + /// attached to the declaration. + /// + /// A series of line comments appearing on consecutive lines, with no other + /// tokens appearing on those lines, will be treated as a single comment. + /// + /// leading_detached_comments will keep paragraphs of comments that appear + /// before (but not connected to) the current element. Each paragraph, + /// separated by empty lines, will be one comment element in the repeated + /// field. + /// + /// Only the comment content is provided; comment markers (e.g. //) are + /// stripped out. For block comments, leading whitespace and an asterisk + /// will be stripped from the beginning of each line other than the first. + /// Newlines are included in the output. + /// + /// Examples: + /// + /// optional int32 foo = 1; // Comment attached to foo. + /// // Comment attached to bar. + /// optional int32 bar = 2; + /// + /// optional string baz = 3; + /// // Comment attached to baz. + /// // Another line attached to baz. + /// + /// // Comment attached to moo. + /// // + /// // Another line attached to moo. + /// optional double moo = 4; + /// + /// // Detached comment for corge. This is not leading or trailing comments + /// // to moo or corge because there are blank lines separating it from + /// // both. + /// + /// // Detached comment for corge paragraph 2. + /// + /// optional string corge = 5; + /// /* Block comment attached + /// * to corge. Leading asterisks + /// * will be removed. */ + /// /* Block comment attached to + /// * grault. */ + /// optional int32 grault = 6; + /// + /// // ignored detached comments. + #[prost(string, optional, tag = "3")] + pub leading_comments: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag = "4")] + pub trailing_comments: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, repeated, tag = "6")] + pub leading_detached_comments: ::prost::alloc::vec::Vec< + ::prost::alloc::string::String, + >, + } +} +/// Describes the relationship between generated code and its original source +/// file. A GeneratedCodeInfo message is associated with only one generated +/// source file, but may contain references to different source .proto files. +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GeneratedCodeInfo { + /// An Annotation connects some span of text in generated code to an element + /// of its generating .proto file. + #[prost(message, repeated, tag = "1")] + pub annotation: ::prost::alloc::vec::Vec, +} +/// Nested message and enum types in `GeneratedCodeInfo`. +pub mod generated_code_info { + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct Annotation { + /// Identifies the element in the original source .proto file. This field + /// is formatted the same as SourceCodeInfo.Location.path. + #[prost(int32, repeated, tag = "1")] + pub path: ::prost::alloc::vec::Vec, + /// Identifies the filesystem path to the original source .proto. + #[prost(string, optional, tag = "2")] + pub source_file: ::core::option::Option<::prost::alloc::string::String>, + /// Identifies the starting offset in bytes in the generated code + /// that relates to the identified object. + #[prost(int32, optional, tag = "3")] + pub begin: ::core::option::Option, + /// Identifies the ending offset in bytes in the generated code that + /// relates to the identified object. The end offset should be one past + /// the last relevant byte (so the length of the text = end - begin). + #[prost(int32, optional, tag = "4")] + pub end: ::core::option::Option, + #[prost(enumeration = "annotation::Semantic", optional, tag = "5")] + pub semantic: ::core::option::Option, + } + /// Nested message and enum types in `Annotation`. + pub mod annotation { + /// Represents the identified object's effect on the element in the original + /// .proto file. + #[actix_prost_macros::serde] + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum Semantic { + /// There is no effect or the effect is indescribable. + None = 0, + /// The element is set or otherwise mutated. + Set = 1, + /// An alias to the element is returned. + Alias = 2, + } + impl Semantic { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Semantic::None => "NONE", + Semantic::Set => "SET", + Semantic::Alias => "ALIAS", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "NONE" => Some(Self::None), + "SET" => Some(Self::Set), + "ALIAS" => Some(Self::Alias), + _ => None, + } + } + } + } +} +/// The full set of known editions. +#[actix_prost_macros::serde] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum Edition { + /// A placeholder for an unknown edition value. + Unknown = 0, + /// Legacy syntax "editions". These pre-date editions, but behave much like + /// distinct editions. These can't be used to specify the edition of proto + /// files, but feature definitions must supply proto2/proto3 defaults for + /// backwards compatibility. + Proto2 = 998, + Proto3 = 999, + /// Editions that have been released. The specific values are arbitrary and + /// should not be depended on, but they will always be time-ordered for easy + /// comparison. + Edition2023 = 1000, + /// Placeholder editions for testing feature resolution. These should not be + /// used or relyed on outside of tests. + Edition1TestOnly = 1, + Edition2TestOnly = 2, + Edition99997TestOnly = 99997, + Edition99998TestOnly = 99998, + Edition99999TestOnly = 99999, +} +impl Edition { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Edition::Unknown => "EDITION_UNKNOWN", + Edition::Proto2 => "EDITION_PROTO2", + Edition::Proto3 => "EDITION_PROTO3", + Edition::Edition2023 => "EDITION_2023", + Edition::Edition1TestOnly => "EDITION_1_TEST_ONLY", + Edition::Edition2TestOnly => "EDITION_2_TEST_ONLY", + Edition::Edition99997TestOnly => "EDITION_99997_TEST_ONLY", + Edition::Edition99998TestOnly => "EDITION_99998_TEST_ONLY", + Edition::Edition99999TestOnly => "EDITION_99999_TEST_ONLY", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "EDITION_UNKNOWN" => Some(Self::Unknown), + "EDITION_PROTO2" => Some(Self::Proto2), + "EDITION_PROTO3" => Some(Self::Proto3), + "EDITION_2023" => Some(Self::Edition2023), + "EDITION_1_TEST_ONLY" => Some(Self::Edition1TestOnly), + "EDITION_2_TEST_ONLY" => Some(Self::Edition2TestOnly), + "EDITION_99997_TEST_ONLY" => Some(Self::Edition99997TestOnly), + "EDITION_99998_TEST_ONLY" => Some(Self::Edition99998TestOnly), + "EDITION_99999_TEST_ONLY" => Some(Self::Edition99999TestOnly), + _ => None, + } + } +} diff --git a/tests/src/proto/mod.rs b/tests/src/proto/mod.rs index 81d6aa9..93d4b51 100644 --- a/tests/src/proto/mod.rs +++ b/tests/src/proto/mod.rs @@ -1,3 +1,5 @@ +pub mod conversions; pub mod errors; pub mod rest; +pub mod simple; pub mod types; diff --git a/tests/src/proto/rest.rs b/tests/src/proto/rest.rs index d2b1b45..1cc8aa3 100644 --- a/tests/src/proto/rest.rs +++ b/tests/src/proto/rest.rs @@ -20,7 +20,8 @@ pub struct Post { } pub mod rest_rpc_actix { #![allow(unused_variables, dead_code, missing_docs)] - use super::{rest_rpc_server::RestRpc, *}; + use super::*; + use super::rest_rpc_server::RestRpc; use std::sync::Arc; #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -38,9 +39,9 @@ pub mod rest_rpc_actix { #[prost(string, tag = "1")] pub foo: ::prost::alloc::string::String, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct GetQueryRPCQuery { #[prost(int64, tag = "2")] pub bar: i64, @@ -54,9 +55,9 @@ pub mod rest_rpc_actix { #[prost(int64, tag = "2")] pub bar: i64, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct PostRPCJson { #[prost(double, tag = "3")] pub long_name: f64, @@ -68,23 +69,23 @@ pub mod rest_rpc_actix { #[prost(double, tag = "3")] pub long_name: f64, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct PostQueryRPCQuery { #[prost(int64, tag = "2")] pub bar: i64, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct PostQueryRPCJson { #[prost(string, tag = "1")] pub foo: ::prost::alloc::string::String, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct PostNoPathRPCJson { #[prost(string, tag = "1")] pub foo: ::prost::alloc::string::String, @@ -93,9 +94,9 @@ pub mod rest_rpc_actix { #[prost(double, tag = "3")] pub long_name: f64, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct PostGetRPCJson { #[prost(string, tag = "1")] pub foo: ::prost::alloc::string::String, @@ -113,9 +114,9 @@ pub mod rest_rpc_actix { #[prost(int64, tag = "2")] pub bar: i64, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct PostResponseRPCJson { #[prost(string, tag = "1")] pub foo: ::prost::alloc::string::String, @@ -124,9 +125,9 @@ pub mod rest_rpc_actix { #[prost(double, tag = "3")] pub long_name: f64, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct PostResponseGetRPCJson { #[prost(string, tag = "1")] pub foo: ::prost::alloc::string::String, @@ -139,12 +140,15 @@ pub mod rest_rpc_actix { service: ::actix_web::web::Data, http_request: ::actix_web::HttpRequest, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { - let path = <::actix_web::web::Path as ::actix_web::FromRequest>::extract( - &http_request, - ) - .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? - .into_inner(); + let path = <::actix_web::web::Path< + GetRPCPath, + > as ::actix_web::FromRequest>::extract(&http_request) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); let request = Get { foo: path.foo, bar: path.bar, @@ -158,18 +162,23 @@ pub mod rest_rpc_actix { service: ::actix_web::web::Data, http_request: ::actix_web::HttpRequest, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { - let path = <::actix_web::web::Path as ::actix_web::FromRequest>::extract( - &http_request, - ) - .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? - .into_inner(); - let query = - <::actix_web::web::Query as ::actix_web::FromRequest>::extract( - &http_request, - ) + let path = <::actix_web::web::Path< + GetQueryRPCPath, + > as ::actix_web::FromRequest>::extract(&http_request) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); + let query = <::actix_web::web::Query< + GetQueryRPCQuery, + > as ::actix_web::FromRequest>::extract(&http_request) .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? .into_inner(); let request = Get { foo: path.foo, @@ -186,19 +195,24 @@ pub mod rest_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let path = <::actix_web::web::Path as ::actix_web::FromRequest>::extract( - &http_request, - ) - .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? - .into_inner(); - let json = <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) - .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? - .into_inner(); + let path = <::actix_web::web::Path< + PostRPCPath, + > as ::actix_web::FromRequest>::extract(&http_request) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); + let json = <::actix_web::web::Json< + PostRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); let request = Post { foo: path.foo, bar: path.bar, @@ -215,26 +229,32 @@ pub mod rest_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let path = <::actix_web::web::Path as ::actix_web::FromRequest>::extract( - &http_request, - ) - .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? - .into_inner(); - let query = - <::actix_web::web::Query as ::actix_web::FromRequest>::extract( - &http_request, - ) + let path = <::actix_web::web::Path< + PostQueryRPCPath, + > as ::actix_web::FromRequest>::extract(&http_request) .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? .into_inner(); - let json = - <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) + let query = <::actix_web::web::Query< + PostQueryRPCQuery, + > as ::actix_web::FromRequest>::extract(&http_request) .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); + let json = <::actix_web::web::Json< + PostQueryRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? .into_inner(); let request = Post { long_name: path.long_name, @@ -252,13 +272,14 @@ pub mod rest_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let json = - <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) + let json = <::actix_web::web::Json< + PostNoPathRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? .into_inner(); let request = Post { foo: json.foo, @@ -276,13 +297,14 @@ pub mod rest_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let json = - <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) + let json = <::actix_web::web::Json< + PostGetRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? .into_inner(); let request = Post { foo: json.foo, @@ -297,13 +319,18 @@ pub mod rest_rpc_actix { async fn call_get_response_rpc( service: ::actix_web::web::Data, http_request: ::actix_web::HttpRequest, - ) -> Result<::actix_web::web::Json<::prost::alloc::string::String>, ::actix_prost::Error> { - let path = - <::actix_web::web::Path as ::actix_web::FromRequest>::extract( - &http_request, - ) + ) -> Result< + ::actix_web::web::Json<::prost::alloc::string::String>, + ::actix_prost::Error, + > { + let path = <::actix_web::web::Path< + GetResponseRPCPath, + > as ::actix_web::FromRequest>::extract(&http_request) .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? .into_inner(); let request = Get { foo: path.foo, @@ -321,7 +348,7 @@ pub mod rest_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let json = <::actix_web::web::Json::< + let json = <::actix_web::web::Json< PostResponseRPCJson, > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) .await @@ -345,9 +372,12 @@ pub mod rest_rpc_actix { service: ::actix_web::web::Data, http_request: ::actix_web::HttpRequest, payload: ::actix_web::web::Payload, - ) -> Result<::actix_web::web::Json<::prost::alloc::string::String>, ::actix_prost::Error> { + ) -> Result< + ::actix_web::web::Json<::prost::alloc::string::String>, + ::actix_prost::Error, + > { let mut payload = payload.into_inner(); - let json = <::actix_web::web::Json::< + let json = <::actix_web::web::Json< PostResponseGetRPCJson, > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) .await @@ -372,119 +402,84 @@ pub mod rest_rpc_actix { service: Arc, ) { config.app_data(::actix_web::web::Data::from(service)); - config.route( - "/rest/get/{foo}/{bar}", - ::actix_web::web::get().to(call_get_rpc), - ); - config.route( - "/rest/get/{foo}", - ::actix_web::web::get().to(call_get_query_rpc), - ); - config.route( - "/rest/post/{foo}/{bar}", - ::actix_web::web::post().to(call_post_rpc), - ); - config.route( - "/rest/post/{long_name}", - ::actix_web::web::post().to(call_post_query_rpc), - ); - config.route( - "/rest/post", - ::actix_web::web::post().to(call_post_no_path_rpc), - ); - config.route( - "/rest/post_get", - ::actix_web::web::post().to(call_post_get_rpc), - ); - config.route( - "/rest/response/get/{foo}/{bar}", - ::actix_web::web::get().to(call_get_response_rpc), - ); - config.route( - "/rest/response/post", - ::actix_web::web::post().to(call_post_response_rpc), - ); - config.route( - "/rest/response/post_get", - ::actix_web::web::post().to(call_post_response_get_rpc), - ); + config.route("/rest/get/{foo}/{bar}", ::actix_web::web::get().to(call_get_rpc)); + config.route("/rest/get/{foo}", ::actix_web::web::get().to(call_get_query_rpc)); + config + .route("/rest/post/{foo}/{bar}", ::actix_web::web::post().to(call_post_rpc)); + config + .route( + "/rest/post/{long_name}", + ::actix_web::web::post().to(call_post_query_rpc), + ); + config.route("/rest/post", ::actix_web::web::post().to(call_post_no_path_rpc)); + config.route("/rest/post_get", ::actix_web::web::post().to(call_post_get_rpc)); + config + .route( + "/rest/response/get/{foo}/{bar}", + ::actix_web::web::get().to(call_get_response_rpc), + ); + config + .route( + "/rest/response/post", + ::actix_web::web::post().to(call_post_response_rpc), + ); + config + .route( + "/rest/response/post_get", + ::actix_web::web::post().to(call_post_response_get_rpc), + ); } } -pub mod simple_rpc_actix { - #![allow(unused_variables, dead_code, missing_docs)] - use super::{simple_rpc_server::SimpleRpc, *}; - use std::sync::Arc; - #[allow(clippy::derive_partial_eq_without_eq)] - #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde(rename_all = "snake_case")] - pub struct PostRPCPath { - #[prost(string, tag = "1")] - pub foo: ::prost::alloc::string::String, - } - #[allow(clippy::derive_partial_eq_without_eq)] - #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] - pub struct PostRPCQuery { - #[prost(int64, tag = "2")] - pub bar: i64, +#[derive(Debug)] +pub struct GetInternal { + pub foo: ::prost::alloc::string::String, + pub bar: i64, +} +impl convert_trait::TryConvert for GetInternal { + fn try_convert(from: Get) -> Result { + Ok(Self { + foo: from.foo, + bar: from.bar, + }) } - #[allow(clippy::derive_partial_eq_without_eq)] - #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] - pub struct PostRPCJson { - #[prost(double, tag = "3")] - pub long_name: f64, +} +impl convert_trait::TryConvert for Get { + fn try_convert(from: GetInternal) -> Result { + Ok(Self { + foo: from.foo, + bar: from.bar, + }) } - async fn call_post_rpc( - service: ::actix_web::web::Data, - http_request: ::actix_web::HttpRequest, - payload: ::actix_web::web::Payload, - ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { - let mut payload = payload.into_inner(); - let path = <::actix_web::web::Path as ::actix_web::FromRequest>::extract( - &http_request, - ) - .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? - .into_inner(); - let query = <::actix_web::web::Query as ::actix_web::FromRequest>::extract( - &http_request, - ) - .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? - .into_inner(); - let json = <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) - .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? - .into_inner(); - let request = Post { - foo: path.foo, - bar: query.bar, - long_name: json.long_name, - }; - let request = ::actix_prost::new_request(request, &http_request); - let response = service.post_rpc(request).await?; - let response = response.into_inner(); - Ok(::actix_web::web::Json(response)) +} +#[derive(Debug)] +pub struct PostInternal { + pub foo: ::prost::alloc::string::String, + pub bar: i64, + pub long_name: f64, +} +impl convert_trait::TryConvert for PostInternal { + fn try_convert(from: Post) -> Result { + Ok(Self { + foo: from.foo, + bar: from.bar, + long_name: from.long_name, + }) } - pub fn route_simple_rpc( - config: &mut ::actix_web::web::ServiceConfig, - service: Arc, - ) { - config.app_data(::actix_web::web::Data::from(service)); - config.route( - "/rest/post/{foo}", - ::actix_web::web::post().to(call_post_rpc), - ); +} +impl convert_trait::TryConvert for Post { + fn try_convert(from: PostInternal) -> Result { + Ok(Self { + foo: from.foo, + bar: from.bar, + long_name: from.long_name, + }) } } /// Generated client implementations. pub mod rest_rpc_client { #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] - use tonic::codegen::{http::Uri, *}; + use tonic::codegen::*; + use tonic::codegen::http::Uri; #[derive(Debug, Clone)] pub struct RestRpcClient { inner: tonic::client::Grpc, @@ -528,8 +523,9 @@ pub mod rest_rpc_client { >::ResponseBody, >, >, - >>::Error: - Into + Send + Sync, + , + >>::Error: Into + Send + Sync, { RestRpcClient::new(InterceptedService::new(inner, interceptor)) } @@ -552,12 +548,15 @@ pub mod rest_rpc_client { &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/rest.RestRPC/GetRPC"); self.inner.unary(request.into_request(), path, codec).await @@ -566,12 +565,15 @@ pub mod rest_rpc_client { &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/rest.RestRPC/GetQueryRPC"); self.inner.unary(request.into_request(), path, codec).await @@ -580,12 +582,15 @@ pub mod rest_rpc_client { &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/rest.RestRPC/PostRPC"); self.inner.unary(request.into_request(), path, codec).await @@ -594,40 +599,53 @@ pub mod rest_rpc_client { &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/rest.RestRPC/PostQueryRPC"); + let path = http::uri::PathAndQuery::from_static( + "/rest.RestRPC/PostQueryRPC", + ); self.inner.unary(request.into_request(), path, codec).await } pub async fn post_no_path_rpc( &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/rest.RestRPC/PostNoPathRPC"); + let path = http::uri::PathAndQuery::from_static( + "/rest.RestRPC/PostNoPathRPC", + ); self.inner.unary(request.into_request(), path, codec).await } pub async fn post_get_rpc( &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/rest.RestRPC/PostGetRPC"); self.inner.unary(request.into_request(), path, codec).await @@ -636,125 +654,57 @@ pub mod rest_rpc_client { &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/rest.RestRPC/GetResponseRPC"); + let path = http::uri::PathAndQuery::from_static( + "/rest.RestRPC/GetResponseRPC", + ); self.inner.unary(request.into_request(), path, codec).await } pub async fn post_response_rpc( &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/rest.RestRPC/PostResponseRPC"); + let path = http::uri::PathAndQuery::from_static( + "/rest.RestRPC/PostResponseRPC", + ); self.inner.unary(request.into_request(), path, codec).await } pub async fn post_response_get_rpc( &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/rest.RestRPC/PostResponseGetRPC"); - self.inner.unary(request.into_request(), path, codec).await - } - } -} -/// Generated client implementations. -pub mod simple_rpc_client { - #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] - use tonic::codegen::{http::Uri, *}; - #[derive(Debug, Clone)] - pub struct SimpleRpcClient { - inner: tonic::client::Grpc, - } - impl SimpleRpcClient { - /// Attempt to create a new client by connecting to a given endpoint. - pub async fn connect(dst: D) -> Result - where - D: std::convert::TryInto, - D::Error: Into, - { - let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; - Ok(Self::new(conn)) - } - } - impl SimpleRpcClient - where - T: tonic::client::GrpcService, - T::Error: Into, - T::ResponseBody: Body + Send + 'static, - ::Error: Into + Send, - { - pub fn new(inner: T) -> Self { - let inner = tonic::client::Grpc::new(inner); - Self { inner } - } - pub fn with_origin(inner: T, origin: Uri) -> Self { - let inner = tonic::client::Grpc::with_origin(inner, origin); - Self { inner } - } - pub fn with_interceptor( - inner: T, - interceptor: F, - ) -> SimpleRpcClient> - where - F: tonic::service::Interceptor, - T::ResponseBody: Default, - T: tonic::codegen::Service< - http::Request, - Response = http::Response< - >::ResponseBody, - >, - >, - >>::Error: - Into + Send + Sync, - { - SimpleRpcClient::new(InterceptedService::new(inner, interceptor)) - } - /// Compress requests with the given encoding. - /// - /// This requires the server to support it otherwise it might respond with an - /// error. - #[must_use] - pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { - self.inner = self.inner.send_compressed(encoding); - self - } - /// Enable decompressing responses. - #[must_use] - pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { - self.inner = self.inner.accept_compressed(encoding); - self - } - pub async fn post_rpc( - &mut self, - request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; - let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/rest.SimpleRPC/PostRPC"); + let path = http::uri::PathAndQuery::from_static( + "/rest.RestRPC/PostResponseGetRPC", + ); self.inner.unary(request.into_request(), path, codec).await } } @@ -822,7 +772,10 @@ pub mod rest_rpc_server { send_compression_encodings: Default::default(), } } - pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService where F: tonic::service::Interceptor, { @@ -850,7 +803,10 @@ pub mod rest_rpc_server { type Response = http::Response; type Error = std::convert::Infallible; type Future = BoxFuture; - fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { @@ -859,10 +815,17 @@ pub mod rest_rpc_server { "/rest.RestRPC/GetRPC" => { #[allow(non_camel_case_types)] struct GetRPCSvc(pub Arc); - impl tonic::server::UnaryService for GetRPCSvc { + impl tonic::server::UnaryService + for GetRPCSvc { type Response = super::Get; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); let fut = async move { (*inner).get_rpc(request).await }; Box::pin(fut) @@ -875,10 +838,11 @@ pub mod rest_rpc_server { let inner = inner.0; let method = GetRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -887,12 +851,21 @@ pub mod rest_rpc_server { "/rest.RestRPC/GetQueryRPC" => { #[allow(non_camel_case_types)] struct GetQueryRPCSvc(pub Arc); - impl tonic::server::UnaryService for GetQueryRPCSvc { + impl tonic::server::UnaryService + for GetQueryRPCSvc { type Response = super::Get; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); - let fut = async move { (*inner).get_query_rpc(request).await }; + let fut = async move { + (*inner).get_query_rpc(request).await + }; Box::pin(fut) } } @@ -903,10 +876,11 @@ pub mod rest_rpc_server { let inner = inner.0; let method = GetQueryRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -915,10 +889,17 @@ pub mod rest_rpc_server { "/rest.RestRPC/PostRPC" => { #[allow(non_camel_case_types)] struct PostRPCSvc(pub Arc); - impl tonic::server::UnaryService for PostRPCSvc { + impl tonic::server::UnaryService + for PostRPCSvc { type Response = super::Post; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); let fut = async move { (*inner).post_rpc(request).await }; Box::pin(fut) @@ -931,10 +912,11 @@ pub mod rest_rpc_server { let inner = inner.0; let method = PostRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -943,12 +925,21 @@ pub mod rest_rpc_server { "/rest.RestRPC/PostQueryRPC" => { #[allow(non_camel_case_types)] struct PostQueryRPCSvc(pub Arc); - impl tonic::server::UnaryService for PostQueryRPCSvc { + impl tonic::server::UnaryService + for PostQueryRPCSvc { type Response = super::Post; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); - let fut = async move { (*inner).post_query_rpc(request).await }; + let fut = async move { + (*inner).post_query_rpc(request).await + }; Box::pin(fut) } } @@ -959,10 +950,11 @@ pub mod rest_rpc_server { let inner = inner.0; let method = PostQueryRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -971,12 +963,21 @@ pub mod rest_rpc_server { "/rest.RestRPC/PostNoPathRPC" => { #[allow(non_camel_case_types)] struct PostNoPathRPCSvc(pub Arc); - impl tonic::server::UnaryService for PostNoPathRPCSvc { + impl tonic::server::UnaryService + for PostNoPathRPCSvc { type Response = super::Post; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); - let fut = async move { (*inner).post_no_path_rpc(request).await }; + let fut = async move { + (*inner).post_no_path_rpc(request).await + }; Box::pin(fut) } } @@ -987,10 +988,11 @@ pub mod rest_rpc_server { let inner = inner.0; let method = PostNoPathRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -999,12 +1001,21 @@ pub mod rest_rpc_server { "/rest.RestRPC/PostGetRPC" => { #[allow(non_camel_case_types)] struct PostGetRPCSvc(pub Arc); - impl tonic::server::UnaryService for PostGetRPCSvc { + impl tonic::server::UnaryService + for PostGetRPCSvc { type Response = super::Get; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); - let fut = async move { (*inner).post_get_rpc(request).await }; + let fut = async move { + (*inner).post_get_rpc(request).await + }; Box::pin(fut) } } @@ -1015,10 +1026,11 @@ pub mod rest_rpc_server { let inner = inner.0; let method = PostGetRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -1027,12 +1039,21 @@ pub mod rest_rpc_server { "/rest.RestRPC/GetResponseRPC" => { #[allow(non_camel_case_types)] struct GetResponseRPCSvc(pub Arc); - impl tonic::server::UnaryService for GetResponseRPCSvc { + impl tonic::server::UnaryService + for GetResponseRPCSvc { type Response = super::Get; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); - let fut = async move { (*inner).get_response_rpc(request).await }; + let fut = async move { + (*inner).get_response_rpc(request).await + }; Box::pin(fut) } } @@ -1043,10 +1064,11 @@ pub mod rest_rpc_server { let inner = inner.0; let method = GetResponseRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -1055,12 +1077,21 @@ pub mod rest_rpc_server { "/rest.RestRPC/PostResponseRPC" => { #[allow(non_camel_case_types)] struct PostResponseRPCSvc(pub Arc); - impl tonic::server::UnaryService for PostResponseRPCSvc { + impl tonic::server::UnaryService + for PostResponseRPCSvc { type Response = super::Post; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); - let fut = async move { (*inner).post_response_rpc(request).await }; + let fut = async move { + (*inner).post_response_rpc(request).await + }; Box::pin(fut) } } @@ -1071,10 +1102,11 @@ pub mod rest_rpc_server { let inner = inner.0; let method = PostResponseRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -1083,12 +1115,21 @@ pub mod rest_rpc_server { "/rest.RestRPC/PostResponseGetRPC" => { #[allow(non_camel_case_types)] struct PostResponseGetRPCSvc(pub Arc); - impl tonic::server::UnaryService for PostResponseGetRPCSvc { + impl tonic::server::UnaryService + for PostResponseGetRPCSvc { type Response = super::Get; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); - let fut = async move { (*inner).post_response_get_rpc(request).await }; + let fut = async move { + (*inner).post_response_get_rpc(request).await + }; Box::pin(fut) } } @@ -1099,23 +1140,28 @@ pub mod rest_rpc_server { let inner = inner.0; let method = PostResponseGetRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; Box::pin(fut) } - _ => Box::pin(async move { - Ok(http::Response::builder() - .status(200) - .header("grpc-status", "12") - .header("content-type", "application/grpc") - .body(empty_body()) - .unwrap()) - }), + _ => { + Box::pin(async move { + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) + }) + } } } } @@ -1143,131 +1189,3 @@ pub mod rest_rpc_server { const NAME: &'static str = "rest.RestRPC"; } } -/// Generated server implementations. -pub mod simple_rpc_server { - #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] - use tonic::codegen::*; - /// Generated trait containing gRPC methods that should be implemented for use with SimpleRpcServer. - #[async_trait] - pub trait SimpleRpc: Send + Sync + 'static { - async fn post_rpc( - &self, - request: tonic::Request, - ) -> Result, tonic::Status>; - } - #[derive(Debug)] - pub struct SimpleRpcServer { - inner: _Inner, - accept_compression_encodings: EnabledCompressionEncodings, - send_compression_encodings: EnabledCompressionEncodings, - } - struct _Inner(Arc); - impl SimpleRpcServer { - pub fn new(inner: T) -> Self { - Self::from_arc(Arc::new(inner)) - } - pub fn from_arc(inner: Arc) -> Self { - let inner = _Inner(inner); - Self { - inner, - accept_compression_encodings: Default::default(), - send_compression_encodings: Default::default(), - } - } - pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService - where - F: tonic::service::Interceptor, - { - InterceptedService::new(Self::new(inner), interceptor) - } - /// Enable decompressing requests with the given encoding. - #[must_use] - pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { - self.accept_compression_encodings.enable(encoding); - self - } - /// Compress responses with the given encoding, if the client supports it. - #[must_use] - pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { - self.send_compression_encodings.enable(encoding); - self - } - } - impl tonic::codegen::Service> for SimpleRpcServer - where - T: SimpleRpc, - B: Body + Send + 'static, - B::Error: Into + Send + 'static, - { - type Response = http::Response; - type Error = std::convert::Infallible; - type Future = BoxFuture; - fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { - Poll::Ready(Ok(())) - } - fn call(&mut self, req: http::Request) -> Self::Future { - let inner = self.inner.clone(); - match req.uri().path() { - "/rest.SimpleRPC/PostRPC" => { - #[allow(non_camel_case_types)] - struct PostRPCSvc(pub Arc); - impl tonic::server::UnaryService for PostRPCSvc { - type Response = super::Post; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { - let inner = self.0.clone(); - let fut = async move { (*inner).post_rpc(request).await }; - Box::pin(fut) - } - } - let accept_compression_encodings = self.accept_compression_encodings; - let send_compression_encodings = self.send_compression_encodings; - let inner = self.inner.clone(); - let fut = async move { - let inner = inner.0; - let method = PostRPCSvc(inner); - let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); - let res = grpc.unary(method, req).await; - Ok(res) - }; - Box::pin(fut) - } - _ => Box::pin(async move { - Ok(http::Response::builder() - .status(200) - .header("grpc-status", "12") - .header("content-type", "application/grpc") - .body(empty_body()) - .unwrap()) - }), - } - } - } - impl Clone for SimpleRpcServer { - fn clone(&self) -> Self { - let inner = self.inner.clone(); - Self { - inner, - accept_compression_encodings: self.accept_compression_encodings, - send_compression_encodings: self.send_compression_encodings, - } - } - } - impl Clone for _Inner { - fn clone(&self) -> Self { - Self(self.0.clone()) - } - } - impl std::fmt::Debug for _Inner { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{:?}", self.0) - } - } - impl tonic::server::NamedService for SimpleRpcServer { - const NAME: &'static str = "rest.SimpleRPC"; - } -} diff --git a/tests/src/proto/simple.rs b/tests/src/proto/simple.rs new file mode 100644 index 0000000..a1c5ba7 --- /dev/null +++ b/tests/src/proto/simple.rs @@ -0,0 +1,346 @@ +#[actix_prost_macros::serde] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Post { + #[prost(string, tag = "1")] + pub foo: ::prost::alloc::string::String, + #[prost(int64, tag = "2")] + pub bar: i64, + #[prost(double, tag = "3")] + pub long_name: f64, +} +pub mod simple_rpc_actix { + #![allow(unused_variables, dead_code, missing_docs)] + use super::*; + use super::simple_rpc_server::SimpleRpc; + use std::sync::Arc; + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + #[actix_prost_macros::serde(rename_all = "snake_case")] + pub struct PostRPCPath { + #[prost(string, tag = "1")] + pub foo: ::prost::alloc::string::String, + } + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct PostRPCQuery { + #[prost(int64, tag = "2")] + pub bar: i64, + } + #[actix_prost_macros::serde] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct PostRPCJson { + #[prost(double, tag = "3")] + pub long_name: f64, + } + async fn call_post_rpc( + service: ::actix_web::web::Data, + http_request: ::actix_web::HttpRequest, + payload: ::actix_web::web::Payload, + ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { + let mut payload = payload.into_inner(); + let path = <::actix_web::web::Path< + PostRPCPath, + > as ::actix_web::FromRequest>::extract(&http_request) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); + let query = <::actix_web::web::Query< + PostRPCQuery, + > as ::actix_web::FromRequest>::extract(&http_request) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); + let json = <::actix_web::web::Json< + PostRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); + let request = Post { + foo: path.foo, + bar: query.bar, + long_name: json.long_name, + }; + let request = ::actix_prost::new_request(request, &http_request); + let response = service.post_rpc(request).await?; + let response = response.into_inner(); + Ok(::actix_web::web::Json(response)) + } + pub fn route_simple_rpc( + config: &mut ::actix_web::web::ServiceConfig, + service: Arc, + ) { + config.app_data(::actix_web::web::Data::from(service)); + config.route("/rest/post/{foo}", ::actix_web::web::post().to(call_post_rpc)); + } +} +#[derive(Debug)] +pub struct PostInternal { + pub foo: ::prost::alloc::string::String, + pub bar: i64, + pub long_name: f64, +} +impl convert_trait::TryConvert for PostInternal { + fn try_convert(from: Post) -> Result { + Ok(Self { + foo: from.foo, + bar: from.bar, + long_name: from.long_name, + }) + } +} +impl convert_trait::TryConvert for Post { + fn try_convert(from: PostInternal) -> Result { + Ok(Self { + foo: from.foo, + bar: from.bar, + long_name: from.long_name, + }) + } +} +/// Generated client implementations. +pub mod simple_rpc_client { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + #[derive(Debug, Clone)] + pub struct SimpleRpcClient { + inner: tonic::client::Grpc, + } + impl SimpleRpcClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: std::convert::TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl SimpleRpcClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> SimpleRpcClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + Send + Sync, + { + SimpleRpcClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + pub async fn post_rpc( + &mut self, + request: impl tonic::IntoRequest, + ) -> Result, tonic::Status> { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/simple.SimpleRPC/PostRPC"); + self.inner.unary(request.into_request(), path, codec).await + } + } +} +/// Generated server implementations. +pub mod simple_rpc_server { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with SimpleRpcServer. + #[async_trait] + pub trait SimpleRpc: Send + Sync + 'static { + async fn post_rpc( + &self, + request: tonic::Request, + ) -> Result, tonic::Status>; + } + #[derive(Debug)] + pub struct SimpleRpcServer { + inner: _Inner, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + } + struct _Inner(Arc); + impl SimpleRpcServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + } + impl tonic::codegen::Service> for SimpleRpcServer + where + T: SimpleRpc, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); + match req.uri().path() { + "/simple.SimpleRPC/PostRPC" => { + #[allow(non_camel_case_types)] + struct PostRPCSvc(pub Arc); + impl tonic::server::UnaryService + for PostRPCSvc { + type Response = super::Post; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = self.0.clone(); + let fut = async move { (*inner).post_rpc(request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = PostRPCSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) + }) + } + } + } + } + impl Clone for SimpleRpcServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + } + } + } + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(self.0.clone()) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for SimpleRpcServer { + const NAME: &'static str = "simple.SimpleRPC"; + } +} diff --git a/tests/src/proto/types.rs b/tests/src/proto/types.rs index bdef0fb..1ac8bb9 100644 --- a/tests/src/proto/types.rs +++ b/tests/src/proto/types.rs @@ -115,11 +115,12 @@ impl Values { } pub mod types_rpc_actix { #![allow(unused_variables, dead_code, missing_docs)] - use super::{types_rpc_server::TypesRpc, *}; + use super::*; + use super::types_rpc_server::TypesRpc; use std::sync::Arc; + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct ScalarsRPCJson { #[prost(double, tag = "1")] pub a: f64, @@ -132,9 +133,9 @@ pub mod types_rpc_actix { #[prost(bool, tag = "5")] pub e: bool, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct OptionalScalarsRPCJson { #[prost(double, optional, tag = "1")] pub a: ::core::option::Option, @@ -147,37 +148,37 @@ pub mod types_rpc_actix { #[prost(bool, optional, tag = "5")] pub e: ::core::option::Option, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct EnumsRPCJson { #[prost(enumeration = "Values", tag = "1")] pub values: i32, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct RepeatedRPCJson { #[prost(string, repeated, tag = "1")] pub foo: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct MapsRPCJson { #[prost(map = "string, int32", tag = "1")] pub foo: ::std::collections::HashMap<::prost::alloc::string::String, i32>, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct OneOfsRPCJson { #[prost(oneof = "one_ofs::Values", tags = "1, 2, 3")] pub values: ::core::option::Option, } + #[actix_prost_macros::serde] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] - #[actix_prost_macros::serde] pub struct ComplexRPCJson { #[prost(message, optional, tag = "1")] pub scalars: ::core::option::Option, @@ -197,13 +198,14 @@ pub mod types_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let json = - <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) + let json = <::actix_web::web::Json< + ScalarsRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? .into_inner(); let request = Scalars { a: json.a, @@ -223,7 +225,7 @@ pub mod types_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let json = <::actix_web::web::Json::< + let json = <::actix_web::web::Json< OptionalScalarsRPCJson, > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) .await @@ -250,17 +252,16 @@ pub mod types_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let json = - <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) + let json = <::actix_web::web::Json< + EnumsRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? .into_inner(); - let request = Enums { - values: json.values, - }; + let request = Enums { values: json.values }; let request = ::actix_prost::new_request(request, &http_request); let response = service.enums_rpc(request).await?; let response = response.into_inner(); @@ -272,13 +273,14 @@ pub mod types_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let json = - <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) + let json = <::actix_web::web::Json< + RepeatedRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? .into_inner(); let request = Repeated { foo: json.foo }; let request = ::actix_prost::new_request(request, &http_request); @@ -292,13 +294,15 @@ pub mod types_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let json = <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) - .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? - .into_inner(); + let json = <::actix_web::web::Json< + MapsRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) + .await + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? + .into_inner(); let request = Maps { foo: json.foo }; let request = ::actix_prost::new_request(request, &http_request); let response = service.maps_rpc(request).await?; @@ -311,17 +315,16 @@ pub mod types_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let json = - <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) + let json = <::actix_web::web::Json< + OneOfsRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? .into_inner(); - let request = OneOfs { - values: json.values, - }; + let request = OneOfs { values: json.values }; let request = ::actix_prost::new_request(request, &http_request); let response = service.one_ofs_rpc(request).await?; let response = response.into_inner(); @@ -333,13 +336,14 @@ pub mod types_rpc_actix { payload: ::actix_web::web::Payload, ) -> Result<::actix_web::web::Json, ::actix_prost::Error> { let mut payload = payload.into_inner(); - let json = - <::actix_web::web::Json as ::actix_web::FromRequest>::from_request( - &http_request, - &mut payload, - ) + let json = <::actix_web::web::Json< + ComplexRPCJson, + > as ::actix_web::FromRequest>::from_request(&http_request, &mut payload) .await - .map_err(|err| ::actix_prost::Error::from_actix(err, ::tonic::Code::InvalidArgument))? + .map_err(|err| ::actix_prost::Error::from_actix( + err, + ::tonic::Code::InvalidArgument, + ))? .into_inner(); let request = Complex { scalars: json.scalars, @@ -358,34 +362,172 @@ pub mod types_rpc_actix { service: Arc, ) { config.app_data(::actix_web::web::Data::from(service)); - config.route( - "/types/scalars", - ::actix_web::web::post().to(call_scalars_rpc), - ); - config.route( - "/types/optional_scalars", - ::actix_web::web::post().to(call_optional_scalars_rpc), - ); + config.route("/types/scalars", ::actix_web::web::post().to(call_scalars_rpc)); + config + .route( + "/types/optional_scalars", + ::actix_web::web::post().to(call_optional_scalars_rpc), + ); config.route("/types/enums", ::actix_web::web::post().to(call_enums_rpc)); - config.route( - "/types/repeated", - ::actix_web::web::post().to(call_repeated_rpc), - ); + config.route("/types/repeated", ::actix_web::web::post().to(call_repeated_rpc)); config.route("/types/maps", ::actix_web::web::post().to(call_maps_rpc)); - config.route( - "/types/oneofs", - ::actix_web::web::post().to(call_one_ofs_rpc), - ); - config.route( - "/types/complex", - ::actix_web::web::post().to(call_complex_rpc), - ); + config.route("/types/oneofs", ::actix_web::web::post().to(call_one_ofs_rpc)); + config.route("/types/complex", ::actix_web::web::post().to(call_complex_rpc)); + } +} +#[derive(Debug)] +pub struct ScalarsInternal { + pub a: f64, + pub b: i64, + pub c: ::prost::alloc::string::String, + pub d: ::prost::bytes::Bytes, + pub e: bool, +} +impl convert_trait::TryConvert for ScalarsInternal { + fn try_convert(from: Scalars) -> Result { + Ok(Self { + a: from.a, + b: from.b, + c: from.c, + d: from.d, + e: from.e, + }) + } +} +impl convert_trait::TryConvert for Scalars { + fn try_convert(from: ScalarsInternal) -> Result { + Ok(Self { + a: from.a, + b: from.b, + c: from.c, + d: from.d, + e: from.e, + }) + } +} +#[derive(Debug)] +pub struct OptionalScalarsInternal { + pub a: ::core::option::Option, + pub b: ::core::option::Option, + pub c: ::core::option::Option<::prost::alloc::string::String>, + pub d: ::core::option::Option<::prost::bytes::Bytes>, + pub e: ::core::option::Option, +} +impl convert_trait::TryConvert for OptionalScalarsInternal { + fn try_convert(from: OptionalScalars) -> Result { + Ok(Self { + a: from.a, + b: from.b, + c: from.c, + d: from.d, + e: from.e, + }) + } +} +impl convert_trait::TryConvert for OptionalScalars { + fn try_convert(from: OptionalScalarsInternal) -> Result { + Ok(Self { + a: from.a, + b: from.b, + c: from.c, + d: from.d, + e: from.e, + }) + } +} +#[derive(Debug)] +pub struct EnumsInternal { + pub values: Values, +} +impl convert_trait::TryConvert for EnumsInternal { + fn try_convert(from: Enums) -> Result { + Ok(Self { + values: Values::try_from(from.values)?, + }) + } +} +impl convert_trait::TryConvert for Enums { + fn try_convert(from: EnumsInternal) -> Result { + Ok(Self { values: from.values.into() }) + } +} +#[derive(Debug)] +pub struct RepeatedInternal { + pub foo: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +impl convert_trait::TryConvert for RepeatedInternal { + fn try_convert(from: Repeated) -> Result { + Ok(Self { foo: from.foo }) + } +} +impl convert_trait::TryConvert for Repeated { + fn try_convert(from: RepeatedInternal) -> Result { + Ok(Self { foo: from.foo }) + } +} +#[derive(Debug)] +pub struct MapsInternal { + pub foo: ::std::collections::HashMap<::prost::alloc::string::String, i32>, +} +impl convert_trait::TryConvert for MapsInternal { + fn try_convert(from: Maps) -> Result { + Ok(Self { foo: from.foo }) + } +} +impl convert_trait::TryConvert for Maps { + fn try_convert(from: MapsInternal) -> Result { + Ok(Self { foo: from.foo }) + } +} +#[derive(Debug)] +pub struct OneOfsInternal { + pub values: ::core::option::Option, +} +impl convert_trait::TryConvert for OneOfsInternal { + fn try_convert(from: OneOfs) -> Result { + Ok(Self { values: from.values }) + } +} +impl convert_trait::TryConvert for OneOfs { + fn try_convert(from: OneOfsInternal) -> Result { + Ok(Self { values: from.values }) + } +} +#[derive(Debug)] +pub struct ComplexInternal { + pub scalars: ::core::option::Option, + pub enums: ::core::option::Option, + pub repeated: ::core::option::Option, + pub maps: ::core::option::Option, + pub oneofs: ::core::option::Option, +} +impl convert_trait::TryConvert for ComplexInternal { + fn try_convert(from: Complex) -> Result { + Ok(Self { + scalars: convert_trait::TryConvert::try_convert(from.scalars)?, + enums: convert_trait::TryConvert::try_convert(from.enums)?, + repeated: convert_trait::TryConvert::try_convert(from.repeated)?, + maps: convert_trait::TryConvert::try_convert(from.maps)?, + oneofs: convert_trait::TryConvert::try_convert(from.oneofs)?, + }) + } +} +impl convert_trait::TryConvert for Complex { + fn try_convert(from: ComplexInternal) -> Result { + Ok(Self { + scalars: convert_trait::TryConvert::try_convert(from.scalars)?, + enums: convert_trait::TryConvert::try_convert(from.enums)?, + repeated: convert_trait::TryConvert::try_convert(from.repeated)?, + maps: convert_trait::TryConvert::try_convert(from.maps)?, + oneofs: convert_trait::TryConvert::try_convert(from.oneofs)?, + }) } } /// Generated client implementations. pub mod types_rpc_client { #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] - use tonic::codegen::{http::Uri, *}; + use tonic::codegen::*; + use tonic::codegen::http::Uri; #[derive(Debug, Clone)] pub struct TypesRpcClient { inner: tonic::client::Grpc, @@ -429,8 +571,9 @@ pub mod types_rpc_client { >::ResponseBody, >, >, - >>::Error: - Into + Send + Sync, + , + >>::Error: Into + Send + Sync, { TypesRpcClient::new(InterceptedService::new(inner, interceptor)) } @@ -453,40 +596,53 @@ pub mod types_rpc_client { &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/types.TypesRPC/ScalarsRPC"); + let path = http::uri::PathAndQuery::from_static( + "/types.TypesRPC/ScalarsRPC", + ); self.inner.unary(request.into_request(), path, codec).await } pub async fn optional_scalars_rpc( &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/types.TypesRPC/OptionalScalarsRPC"); + let path = http::uri::PathAndQuery::from_static( + "/types.TypesRPC/OptionalScalarsRPC", + ); self.inner.unary(request.into_request(), path, codec).await } pub async fn enums_rpc( &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/types.TypesRPC/EnumsRPC"); self.inner.unary(request.into_request(), path, codec).await @@ -495,26 +651,34 @@ pub mod types_rpc_client { &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/types.TypesRPC/RepeatedRPC"); + let path = http::uri::PathAndQuery::from_static( + "/types.TypesRPC/RepeatedRPC", + ); self.inner.unary(request.into_request(), path, codec).await } pub async fn maps_rpc( &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/types.TypesRPC/MapsRPC"); self.inner.unary(request.into_request(), path, codec).await @@ -523,12 +687,15 @@ pub mod types_rpc_client { &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/types.TypesRPC/OneOfsRPC"); self.inner.unary(request.into_request(), path, codec).await @@ -538,14 +705,19 @@ pub mod types_rpc_client { &mut self, request: impl tonic::IntoRequest, ) -> Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/types.TypesRPC/ComplexRPC"); + let path = http::uri::PathAndQuery::from_static( + "/types.TypesRPC/ComplexRPC", + ); self.inner.unary(request.into_request(), path, codec).await } } @@ -606,7 +778,10 @@ pub mod types_rpc_server { send_compression_encodings: Default::default(), } } - pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService where F: tonic::service::Interceptor, { @@ -634,7 +809,10 @@ pub mod types_rpc_server { type Response = http::Response; type Error = std::convert::Infallible; type Future = BoxFuture; - fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { @@ -643,9 +821,13 @@ pub mod types_rpc_server { "/types.TypesRPC/ScalarsRPC" => { #[allow(non_camel_case_types)] struct ScalarsRPCSvc(pub Arc); - impl tonic::server::UnaryService for ScalarsRPCSvc { + impl tonic::server::UnaryService + for ScalarsRPCSvc { type Response = super::Scalars; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, @@ -662,10 +844,11 @@ pub mod types_rpc_server { let inner = inner.0; let method = ScalarsRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -674,15 +857,21 @@ pub mod types_rpc_server { "/types.TypesRPC/OptionalScalarsRPC" => { #[allow(non_camel_case_types)] struct OptionalScalarsRPCSvc(pub Arc); - impl tonic::server::UnaryService for OptionalScalarsRPCSvc { + impl tonic::server::UnaryService + for OptionalScalarsRPCSvc { type Response = super::OptionalScalars; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = self.0.clone(); - let fut = async move { (*inner).optional_scalars_rpc(request).await }; + let fut = async move { + (*inner).optional_scalars_rpc(request).await + }; Box::pin(fut) } } @@ -693,10 +882,11 @@ pub mod types_rpc_server { let inner = inner.0; let method = OptionalScalarsRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -705,10 +895,17 @@ pub mod types_rpc_server { "/types.TypesRPC/EnumsRPC" => { #[allow(non_camel_case_types)] struct EnumsRPCSvc(pub Arc); - impl tonic::server::UnaryService for EnumsRPCSvc { + impl tonic::server::UnaryService + for EnumsRPCSvc { type Response = super::Enums; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); let fut = async move { (*inner).enums_rpc(request).await }; Box::pin(fut) @@ -721,10 +918,11 @@ pub mod types_rpc_server { let inner = inner.0; let method = EnumsRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -733,15 +931,21 @@ pub mod types_rpc_server { "/types.TypesRPC/RepeatedRPC" => { #[allow(non_camel_case_types)] struct RepeatedRPCSvc(pub Arc); - impl tonic::server::UnaryService for RepeatedRPCSvc { + impl tonic::server::UnaryService + for RepeatedRPCSvc { type Response = super::Repeated; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = self.0.clone(); - let fut = async move { (*inner).repeated_rpc(request).await }; + let fut = async move { + (*inner).repeated_rpc(request).await + }; Box::pin(fut) } } @@ -752,10 +956,11 @@ pub mod types_rpc_server { let inner = inner.0; let method = RepeatedRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -764,10 +969,17 @@ pub mod types_rpc_server { "/types.TypesRPC/MapsRPC" => { #[allow(non_camel_case_types)] struct MapsRPCSvc(pub Arc); - impl tonic::server::UnaryService for MapsRPCSvc { + impl tonic::server::UnaryService + for MapsRPCSvc { type Response = super::Maps; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); let fut = async move { (*inner).maps_rpc(request).await }; Box::pin(fut) @@ -780,10 +992,11 @@ pub mod types_rpc_server { let inner = inner.0; let method = MapsRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -792,10 +1005,17 @@ pub mod types_rpc_server { "/types.TypesRPC/OneOfsRPC" => { #[allow(non_camel_case_types)] struct OneOfsRPCSvc(pub Arc); - impl tonic::server::UnaryService for OneOfsRPCSvc { + impl tonic::server::UnaryService + for OneOfsRPCSvc { type Response = super::OneOfs; - type Future = BoxFuture, tonic::Status>; - fn call(&mut self, request: tonic::Request) -> Self::Future { + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { let inner = self.0.clone(); let fut = async move { (*inner).one_ofs_rpc(request).await }; Box::pin(fut) @@ -808,10 +1028,11 @@ pub mod types_rpc_server { let inner = inner.0; let method = OneOfsRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; @@ -820,9 +1041,13 @@ pub mod types_rpc_server { "/types.TypesRPC/ComplexRPC" => { #[allow(non_camel_case_types)] struct ComplexRPCSvc(pub Arc); - impl tonic::server::UnaryService for ComplexRPCSvc { + impl tonic::server::UnaryService + for ComplexRPCSvc { type Response = super::Complex; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, @@ -839,23 +1064,28 @@ pub mod types_rpc_server { let inner = inner.0; let method = ComplexRPCSvc(inner); let codec = tonic::codec::ProstCodec::default(); - let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config( - accept_compression_encodings, - send_compression_encodings, - ); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ); let res = grpc.unary(method, req).await; Ok(res) }; Box::pin(fut) } - _ => Box::pin(async move { - Ok(http::Response::builder() - .status(200) - .header("grpc-status", "12") - .header("content-type", "application/grpc") - .body(empty_body()) - .unwrap()) - }), + _ => { + Box::pin(async move { + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) + }) + } } } } diff --git a/tests/src/rest.rs b/tests/src/rest.rs index 7983c1e..b4ba187 100644 --- a/tests/src/rest.rs +++ b/tests/src/rest.rs @@ -1,7 +1,9 @@ use crate::{ - proto::rest::{ - rest_rpc_actix::route_rest_rpc, rest_rpc_server::RestRpc, - simple_rpc_actix::route_simple_rpc, simple_rpc_server::SimpleRpc, Get, Post, + proto::{ + rest::{rest_rpc_actix::route_rest_rpc, rest_rpc_server::RestRpc, Get, Post}, + simple::{ + simple_rpc_actix::route_simple_rpc, simple_rpc_server::SimpleRpc, Post as SimplePost, + }, }, test, }; @@ -218,7 +220,7 @@ struct HeaderServer {} #[async_trait::async_trait] impl SimpleRpc for HeaderServer { - async fn post_rpc(&self, request: Request) -> Result, Status> { + async fn post_rpc(&self, request: Request) -> Result, Status> { let mut meta = request .metadata() .iter() @@ -226,7 +228,7 @@ impl SimpleRpc for HeaderServer { .collect::>(); meta.sort(); let meta = meta.join(","); - Ok(Response::new(Post { + Ok(Response::new(SimplePost { foo: meta, bar: request.get_ref().bar, long_name: request.get_ref().long_name,