From 77b2e9c06d523c872059e3375ac7cc8c024f011d Mon Sep 17 00:00:00 2001 From: zhuyasen Date: Sun, 22 Sep 2024 18:30:59 +0800 Subject: [PATCH] add exmples --- 6_micro-cluster/README.md | 14 +- .../comment/.gitignore | 0 .../comment/.golangci.yml | 0 .../comment/Jenkinsfile | 0 .../comment/Makefile | 0 .../comment/README.md | 0 .../comment/api/comment/v1/comment.pb.go | 0 .../api/comment/v1/comment.pb.validate.go | 0 .../comment/api/comment/v1/comment.proto | 0 .../comment/api/comment/v1/comment_grpc.pb.go | 0 .../comment/cmd/comment/initial/initApp.go | 0 .../cmd/comment/initial/registerClose.go | 0 .../cmd/comment/initial/registerServer.go | 0 .../comment/cmd/comment/main.go | 0 .../comment/configs/comment.yml | 0 .../comment/configs/comment_cc.yml | 0 .../comment/configs/location.go | 0 .../comment/deployments/binary/README.md | 0 .../comment/deployments/binary/deploy.sh | 0 .../comment/deployments/binary/run.sh | 0 .../deployments/docker-compose/README.md | 0 .../docker-compose/docker-compose.yml | 0 .../comment/deployments/kubernetes/README.md | 0 .../kubernetes/comment-configmap.yml | 0 .../kubernetes/comment-deployment.yml | 0 .../deployments/kubernetes/comment-svc.yml | 0 .../projectNameExample-namespace.yml | 0 .../comment/docs/gen.info | 0 .../{ => example-1-multi-repo}/comment/go.mod | 0 .../{ => example-1-multi-repo}/comment/go.sum | 0 .../comment/internal/config/comment.go | 0 .../comment/internal/config/comment_cc.go | 0 .../comment/internal/config/comment_test.go | 0 .../comment/internal/ecode/comment_rpc.go | 0 .../comment/internal/ecode/systemCode_rpc.go | 0 .../comment/internal/server/grpc.go | 0 .../comment/internal/server/grpc_option.go | 0 .../comment/internal/server/grpc_test.go | 0 .../comment/internal/service/comment.go | 0 .../internal/service/comment_client_test.go | 0 .../comment/internal/service/service.go | 0 .../comment/internal/service/service_test.go | 0 .../comment/scripts/binary-package.sh | 0 .../comment/scripts/build/Dockerfile | 0 .../comment/scripts/build/Dockerfile_build | 0 .../comment/scripts/build/Dockerfile_test | 0 .../comment/scripts/build/README.md | 0 .../comment/scripts/deploy-binary.sh | 0 .../comment/scripts/deploy-docker.sh | 0 .../comment/scripts/deploy-k8s.sh | 0 .../comment/scripts/image-build-local.sh | 0 .../comment/scripts/image-build.sh | 0 .../comment/scripts/image-build2.sh | 0 .../comment/scripts/image-push.sh | 0 .../comment/scripts/image-rpc-test.sh | 0 .../comment/scripts/patch.sh | 0 .../comment/scripts/proto-doc.sh | 0 .../comment/scripts/protoc.sh | 0 .../comment/scripts/run-nohup.sh | 0 .../comment/scripts/run.sh | 0 .../comment/scripts/swag-docs.sh | 0 .../gogo/protobuf/gogoproto/gogo.proto | 0 .../third_party/google/api/BUILD.bazel | 0 .../comment/third_party/google/api/README.md | 0 .../third_party/google/api/annotations.proto | 0 .../comment/third_party/google/api/auth.proto | 0 .../third_party/google/api/backend.proto | 0 .../third_party/google/api/billing.proto | 0 .../third_party/google/api/client.proto | 0 .../google/api/config_change.proto | 0 .../third_party/google/api/consumer.proto | 0 .../third_party/google/api/context.proto | 0 .../third_party/google/api/control.proto | 0 .../third_party/google/api/distribution.proto | 0 .../google/api/documentation.proto | 0 .../third_party/google/api/endpoint.proto | 0 .../third_party/google/api/expr/BUILD.bazel | 0 .../third_party/google/api/expr/cel.yaml | 0 .../google/api/expr/v1alpha1/BUILD.bazel | 0 .../google/api/expr/v1alpha1/checked.proto | 0 .../expr/v1alpha1/conformance_service.proto | 0 .../google/api/expr/v1alpha1/eval.proto | 0 .../google/api/expr/v1alpha1/explain.proto | 0 .../google/api/expr/v1alpha1/syntax.proto | 0 .../google/api/expr/v1alpha1/value.proto | 0 .../google/api/expr/v1beta1/BUILD.bazel | 0 .../google/api/expr/v1beta1/decl.proto | 0 .../google/api/expr/v1beta1/eval.proto | 0 .../google/api/expr/v1beta1/expr.proto | 0 .../google/api/expr/v1beta1/source.proto | 0 .../google/api/expr/v1beta1/value.proto | 0 .../google/api/field_behavior.proto | 0 .../comment/third_party/google/api/http.proto | 0 .../third_party/google/api/httpbody.proto | 0 .../third_party/google/api/label.proto | 0 .../third_party/google/api/launch_stage.proto | 0 .../comment/third_party/google/api/log.proto | 0 .../third_party/google/api/logging.proto | 0 .../third_party/google/api/metric.proto | 0 .../google/api/monitored_resource.proto | 0 .../third_party/google/api/monitoring.proto | 0 .../third_party/google/api/quota.proto | 0 .../third_party/google/api/resource.proto | 0 .../third_party/google/api/service.proto | 0 .../third_party/google/api/serviceconfig.yaml | 0 .../google/api/servicecontrol/BUILD.bazel | 0 .../google/api/servicecontrol/README.md | 0 .../google/api/servicecontrol/v1/BUILD.bazel | 0 .../api/servicecontrol/v1/check_error.proto | 0 .../api/servicecontrol/v1/distribution.proto | 0 .../api/servicecontrol/v1/http_request.proto | 0 .../api/servicecontrol/v1/log_entry.proto | 0 .../api/servicecontrol/v1/metric_value.proto | 0 .../api/servicecontrol/v1/operation.proto | 0 .../servicecontrol/v1/quota_controller.proto | 0 .../v1/service_controller.proto | 0 .../api/servicecontrol/v1/servicecontrol.yaml | 0 .../google/api/servicemanagement/BUILD.bazel | 0 .../google/api/servicemanagement/README.md | 0 .../api/servicemanagement/v1/BUILD.bazel | 0 .../api/servicemanagement/v1/resources.proto | 0 .../v1/servicemanagement_gapic.legacy.yaml | 0 .../v1/servicemanagement_gapic.yaml | 0 ...servicemanagement_grpc_service_config.json | 0 .../v1/servicemanagement_v1.yaml | 0 .../servicemanagement/v1/servicemanager.proto | 0 .../third_party/google/api/source_info.proto | 0 .../google/api/system_parameter.proto | 0 .../third_party/google/api/usage.proto | 0 .../google/protobuf/annotations.proto | 0 .../third_party/google/protobuf/any.proto | 0 .../third_party/google/protobuf/api.proto | 0 .../google/protobuf/compiler/plugin.proto | 0 .../google/protobuf/descriptor.proto | 0 .../google/protobuf/duration.proto | 0 .../third_party/google/protobuf/empty.proto | 0 .../google/protobuf/field_mask.proto | 0 .../google/protobuf/source_context.proto | 0 .../third_party/google/protobuf/struct.proto | 0 .../google/protobuf/timestamp.proto | 0 .../third_party/google/protobuf/type.proto | 0 .../google/protobuf/wrappers.proto | 0 .../options/annotations.proto | 0 .../options/openapiv2.proto | 0 .../comment/third_party/tagger/tagger.proto | 0 .../third_party/validate/validate.proto | 0 .../inventory/.gitignore | 0 .../inventory/.golangci.yml | 0 .../inventory/Jenkinsfile | 0 .../inventory/Makefile | 0 .../inventory/README.md | 0 .../api/inventory/v1/inventory.pb.go | 0 .../api/inventory/v1/inventory.pb.validate.go | 0 .../api/inventory/v1/inventory.proto | 0 .../api/inventory/v1/inventory_grpc.pb.go | 0 .../cmd/inventory/initial/initApp.go | 0 .../cmd/inventory/initial/registerClose.go | 0 .../cmd/inventory/initial/registerServer.go | 0 .../inventory/cmd/inventory/main.go | 0 .../inventory/configs/inventory.yml | 0 .../inventory/configs/inventory_cc.yml | 0 .../inventory/configs/location.go | 0 .../inventory/deployments/binary/README.md | 0 .../inventory/deployments/binary/deploy.sh | 0 .../inventory/deployments/binary/run.sh | 0 .../deployments/docker-compose/README.md | 0 .../docker-compose/docker-compose.yml | 0 .../deployments/kubernetes/README.md | 0 .../kubernetes/inventory-configmap.yml | 0 .../kubernetes/inventory-deployment.yml | 0 .../deployments/kubernetes/inventory-svc.yml | 0 .../projectNameExample-namespace.yml | 0 .../inventory/docs/gen.info | 0 .../inventory/go.mod | 0 .../inventory/go.sum | 0 .../inventory/internal/config/inventory.go | 0 .../inventory/internal/config/inventory_cc.go | 0 .../internal/config/inventory_test.go | 0 .../inventory/internal/ecode/inventory_rpc.go | 0 .../internal/ecode/systemCode_rpc.go | 0 .../inventory/internal/server/grpc.go | 0 .../inventory/internal/server/grpc_option.go | 0 .../inventory/internal/server/grpc_test.go | 0 .../inventory/internal/service/inventory.go | 0 .../internal/service/inventory_client_test.go | 0 .../inventory/internal/service/service.go | 0 .../internal/service/service_test.go | 0 .../inventory/scripts/binary-package.sh | 0 .../inventory/scripts/build/Dockerfile | 0 .../inventory/scripts/build/Dockerfile_build | 0 .../inventory/scripts/build/Dockerfile_test | 0 .../inventory/scripts/build/README.md | 0 .../inventory/scripts/deploy-binary.sh | 0 .../inventory/scripts/deploy-docker.sh | 0 .../inventory/scripts/deploy-k8s.sh | 0 .../inventory/scripts/image-build-local.sh | 0 .../inventory/scripts/image-build.sh | 0 .../inventory/scripts/image-build2.sh | 0 .../inventory/scripts/image-push.sh | 0 .../inventory/scripts/image-rpc-test.sh | 0 .../inventory/scripts/patch.sh | 0 .../inventory/scripts/proto-doc.sh | 0 .../inventory/scripts/protoc.sh | 0 .../inventory/scripts/run-nohup.sh | 0 .../inventory/scripts/run.sh | 0 .../inventory/scripts/swag-docs.sh | 0 .../gogo/protobuf/gogoproto/gogo.proto | 0 .../third_party/google/api/BUILD.bazel | 0 .../third_party/google/api/README.md | 0 .../third_party/google/api/annotations.proto | 0 .../third_party/google/api/auth.proto | 0 .../third_party/google/api/backend.proto | 0 .../third_party/google/api/billing.proto | 0 .../third_party/google/api/client.proto | 0 .../google/api/config_change.proto | 0 .../third_party/google/api/consumer.proto | 0 .../third_party/google/api/context.proto | 0 .../third_party/google/api/control.proto | 0 .../third_party/google/api/distribution.proto | 0 .../google/api/documentation.proto | 0 .../third_party/google/api/endpoint.proto | 0 .../third_party/google/api/expr/BUILD.bazel | 0 .../third_party/google/api/expr/cel.yaml | 0 .../google/api/expr/v1alpha1/BUILD.bazel | 0 .../google/api/expr/v1alpha1/checked.proto | 0 .../expr/v1alpha1/conformance_service.proto | 0 .../google/api/expr/v1alpha1/eval.proto | 0 .../google/api/expr/v1alpha1/explain.proto | 0 .../google/api/expr/v1alpha1/syntax.proto | 0 .../google/api/expr/v1alpha1/value.proto | 0 .../google/api/expr/v1beta1/BUILD.bazel | 0 .../google/api/expr/v1beta1/decl.proto | 0 .../google/api/expr/v1beta1/eval.proto | 0 .../google/api/expr/v1beta1/expr.proto | 0 .../google/api/expr/v1beta1/source.proto | 0 .../google/api/expr/v1beta1/value.proto | 0 .../google/api/field_behavior.proto | 0 .../third_party/google/api/http.proto | 0 .../third_party/google/api/httpbody.proto | 0 .../third_party/google/api/label.proto | 0 .../third_party/google/api/launch_stage.proto | 0 .../third_party/google/api/log.proto | 0 .../third_party/google/api/logging.proto | 0 .../third_party/google/api/metric.proto | 0 .../google/api/monitored_resource.proto | 0 .../third_party/google/api/monitoring.proto | 0 .../third_party/google/api/quota.proto | 0 .../third_party/google/api/resource.proto | 0 .../third_party/google/api/service.proto | 0 .../third_party/google/api/serviceconfig.yaml | 0 .../google/api/servicecontrol/BUILD.bazel | 0 .../google/api/servicecontrol/README.md | 0 .../google/api/servicecontrol/v1/BUILD.bazel | 0 .../api/servicecontrol/v1/check_error.proto | 0 .../api/servicecontrol/v1/distribution.proto | 0 .../api/servicecontrol/v1/http_request.proto | 0 .../api/servicecontrol/v1/log_entry.proto | 0 .../api/servicecontrol/v1/metric_value.proto | 0 .../api/servicecontrol/v1/operation.proto | 0 .../servicecontrol/v1/quota_controller.proto | 0 .../v1/service_controller.proto | 0 .../api/servicecontrol/v1/servicecontrol.yaml | 0 .../google/api/servicemanagement/BUILD.bazel | 0 .../google/api/servicemanagement/README.md | 0 .../api/servicemanagement/v1/BUILD.bazel | 0 .../api/servicemanagement/v1/resources.proto | 0 .../v1/servicemanagement_gapic.legacy.yaml | 0 .../v1/servicemanagement_gapic.yaml | 0 ...servicemanagement_grpc_service_config.json | 0 .../v1/servicemanagement_v1.yaml | 0 .../servicemanagement/v1/servicemanager.proto | 0 .../third_party/google/api/source_info.proto | 0 .../google/api/system_parameter.proto | 0 .../third_party/google/api/usage.proto | 0 .../google/protobuf/annotations.proto | 0 .../third_party/google/protobuf/any.proto | 0 .../third_party/google/protobuf/api.proto | 0 .../google/protobuf/compiler/plugin.proto | 0 .../google/protobuf/descriptor.proto | 0 .../google/protobuf/duration.proto | 0 .../third_party/google/protobuf/empty.proto | 0 .../google/protobuf/field_mask.proto | 0 .../google/protobuf/source_context.proto | 0 .../third_party/google/protobuf/struct.proto | 0 .../google/protobuf/timestamp.proto | 0 .../third_party/google/protobuf/type.proto | 0 .../google/protobuf/wrappers.proto | 0 .../options/annotations.proto | 0 .../options/openapiv2.proto | 0 .../inventory/third_party/tagger/tagger.proto | 0 .../third_party/validate/validate.proto | 0 .../product/.gitignore | 0 .../product/.golangci.yml | 0 .../product/Jenkinsfile | 0 .../product/Makefile | 0 .../product/README.md | 0 .../product/api/product/v1/product.pb.go | 0 .../api/product/v1/product.pb.validate.go | 0 .../product/api/product/v1/product.proto | 0 .../product/api/product/v1/product_grpc.pb.go | 0 .../product/cmd/product/initial/initApp.go | 0 .../cmd/product/initial/registerClose.go | 0 .../cmd/product/initial/registerServer.go | 0 .../product/cmd/product/main.go | 0 .../product/configs/location.go | 0 .../product/configs/product.yml | 0 .../product/configs/product_cc.yml | 0 .../product/deployments/binary/README.md | 0 .../product/deployments/binary/deploy.sh | 0 .../product/deployments/binary/run.sh | 0 .../deployments/docker-compose/README.md | 0 .../docker-compose/docker-compose.yml | 0 .../product/deployments/kubernetes/README.md | 0 .../kubernetes/product-configmap.yml | 0 .../kubernetes/product-deployment.yml | 0 .../deployments/kubernetes/product-svc.yml | 0 .../projectNameExample-namespace.yml | 0 .../product/docs/gen.info | 0 .../{ => example-1-multi-repo}/product/go.mod | 0 .../{ => example-1-multi-repo}/product/go.sum | 0 .../product/internal/config/product.go | 0 .../product/internal/config/product_cc.go | 0 .../product/internal/config/product_test.go | 0 .../product/internal/ecode/product_rpc.go | 0 .../product/internal/ecode/systemCode_rpc.go | 0 .../product/internal/server/grpc.go | 0 .../product/internal/server/grpc_option.go | 0 .../product/internal/server/grpc_test.go | 0 .../product/internal/service/product.go | 0 .../internal/service/product_client_test.go | 0 .../product/internal/service/service.go | 0 .../product/internal/service/service_test.go | 0 .../product/scripts/binary-package.sh | 0 .../product/scripts/build/Dockerfile | 0 .../product/scripts/build/Dockerfile_build | 0 .../product/scripts/build/Dockerfile_test | 0 .../product/scripts/build/README.md | 0 .../product/scripts/deploy-binary.sh | 0 .../product/scripts/deploy-docker.sh | 0 .../product/scripts/deploy-k8s.sh | 0 .../product/scripts/image-build-local.sh | 0 .../product/scripts/image-build.sh | 0 .../product/scripts/image-build2.sh | 0 .../product/scripts/image-push.sh | 0 .../product/scripts/image-rpc-test.sh | 0 .../product/scripts/patch.sh | 0 .../product/scripts/proto-doc.sh | 0 .../product/scripts/protoc.sh | 0 .../product/scripts/run-nohup.sh | 0 .../product/scripts/run.sh | 0 .../product/scripts/swag-docs.sh | 0 .../gogo/protobuf/gogoproto/gogo.proto | 0 .../third_party/google/api/BUILD.bazel | 0 .../product/third_party/google/api/README.md | 0 .../third_party/google/api/annotations.proto | 0 .../product/third_party/google/api/auth.proto | 0 .../third_party/google/api/backend.proto | 0 .../third_party/google/api/billing.proto | 0 .../third_party/google/api/client.proto | 0 .../google/api/config_change.proto | 0 .../third_party/google/api/consumer.proto | 0 .../third_party/google/api/context.proto | 0 .../third_party/google/api/control.proto | 0 .../third_party/google/api/distribution.proto | 0 .../google/api/documentation.proto | 0 .../third_party/google/api/endpoint.proto | 0 .../third_party/google/api/expr/BUILD.bazel | 0 .../third_party/google/api/expr/cel.yaml | 0 .../google/api/expr/v1alpha1/BUILD.bazel | 0 .../google/api/expr/v1alpha1/checked.proto | 0 .../expr/v1alpha1/conformance_service.proto | 0 .../google/api/expr/v1alpha1/eval.proto | 0 .../google/api/expr/v1alpha1/explain.proto | 0 .../google/api/expr/v1alpha1/syntax.proto | 0 .../google/api/expr/v1alpha1/value.proto | 0 .../google/api/expr/v1beta1/BUILD.bazel | 0 .../google/api/expr/v1beta1/decl.proto | 0 .../google/api/expr/v1beta1/eval.proto | 0 .../google/api/expr/v1beta1/expr.proto | 0 .../google/api/expr/v1beta1/source.proto | 0 .../google/api/expr/v1beta1/value.proto | 0 .../google/api/field_behavior.proto | 0 .../product/third_party/google/api/http.proto | 0 .../third_party/google/api/httpbody.proto | 0 .../third_party/google/api/label.proto | 0 .../third_party/google/api/launch_stage.proto | 0 .../product/third_party/google/api/log.proto | 0 .../third_party/google/api/logging.proto | 0 .../third_party/google/api/metric.proto | 0 .../google/api/monitored_resource.proto | 0 .../third_party/google/api/monitoring.proto | 0 .../third_party/google/api/quota.proto | 0 .../third_party/google/api/resource.proto | 0 .../third_party/google/api/service.proto | 0 .../third_party/google/api/serviceconfig.yaml | 0 .../google/api/servicecontrol/BUILD.bazel | 0 .../google/api/servicecontrol/README.md | 0 .../google/api/servicecontrol/v1/BUILD.bazel | 0 .../api/servicecontrol/v1/check_error.proto | 0 .../api/servicecontrol/v1/distribution.proto | 0 .../api/servicecontrol/v1/http_request.proto | 0 .../api/servicecontrol/v1/log_entry.proto | 0 .../api/servicecontrol/v1/metric_value.proto | 0 .../api/servicecontrol/v1/operation.proto | 0 .../servicecontrol/v1/quota_controller.proto | 0 .../v1/service_controller.proto | 0 .../api/servicecontrol/v1/servicecontrol.yaml | 0 .../google/api/servicemanagement/BUILD.bazel | 0 .../google/api/servicemanagement/README.md | 0 .../api/servicemanagement/v1/BUILD.bazel | 0 .../api/servicemanagement/v1/resources.proto | 0 .../v1/servicemanagement_gapic.legacy.yaml | 0 .../v1/servicemanagement_gapic.yaml | 0 ...servicemanagement_grpc_service_config.json | 0 .../v1/servicemanagement_v1.yaml | 0 .../servicemanagement/v1/servicemanager.proto | 0 .../third_party/google/api/source_info.proto | 0 .../google/api/system_parameter.proto | 0 .../third_party/google/api/usage.proto | 0 .../google/protobuf/annotations.proto | 0 .../third_party/google/protobuf/any.proto | 0 .../third_party/google/protobuf/api.proto | 0 .../google/protobuf/compiler/plugin.proto | 0 .../google/protobuf/descriptor.proto | 0 .../google/protobuf/duration.proto | 0 .../third_party/google/protobuf/empty.proto | 0 .../google/protobuf/field_mask.proto | 0 .../google/protobuf/source_context.proto | 0 .../third_party/google/protobuf/struct.proto | 0 .../google/protobuf/timestamp.proto | 0 .../third_party/google/protobuf/type.proto | 0 .../google/protobuf/wrappers.proto | 0 .../options/annotations.proto | 0 .../options/openapiv2.proto | 0 .../product/third_party/tagger/tagger.proto | 0 .../third_party/validate/validate.proto | 0 .../shop_gw/.gitignore | 0 .../shop_gw/.golangci.yml | 0 .../shop_gw/Jenkinsfile | 0 .../shop_gw/Makefile | 0 .../shop_gw/README.md | 0 .../shop_gw/api/comment/v1/comment.pb.go | 0 .../api/comment/v1/comment.pb.validate.go | 0 .../shop_gw/api/comment/v1/comment.proto | 0 .../shop_gw/api/comment/v1/comment_grpc.pb.go | 0 .../shop_gw/api/inventory/v1/inventory.pb.go | 0 .../api/inventory/v1/inventory.pb.validate.go | 0 .../shop_gw/api/inventory/v1/inventory.proto | 0 .../api/inventory/v1/inventory_grpc.pb.go | 0 .../shop_gw/api/product/v1/product.pb.go | 0 .../api/product/v1/product.pb.validate.go | 0 .../shop_gw/api/product/v1/product.proto | 0 .../shop_gw/api/product/v1/product_grpc.pb.go | 0 .../shop_gw/api/shop_gw/v1/shop_gw.pb.go | 0 .../api/shop_gw/v1/shop_gw.pb.validate.go | 0 .../shop_gw/api/shop_gw/v1/shop_gw.proto | 0 .../shop_gw/api/shop_gw/v1/shop_gw_grpc.pb.go | 0 .../api/shop_gw/v1/shop_gw_router.pb.go | 0 .../shop_gw/cmd/shop_gw/initial/initApp.go | 0 .../cmd/shop_gw/initial/registerClose.go | 0 .../cmd/shop_gw/initial/registerServer.go | 0 .../shop_gw/cmd/shop_gw/main.go | 0 .../shop_gw/configs/location.go | 0 .../shop_gw/configs/shop_gw.yml | 0 .../shop_gw/configs/shop_gw_cc.yml | 0 .../shop_gw/deployments/binary/README.md | 0 .../shop_gw/deployments/binary/deploy.sh | 0 .../shop_gw/deployments/binary/run.sh | 0 .../deployments/docker-compose/README.md | 0 .../docker-compose/docker-compose.yml | 0 .../shop_gw/deployments/kubernetes/README.md | 0 .../projectNameExample-namespace.yml | 0 .../kubernetes/shop_gw-configmap.yml | 0 .../kubernetes/shop_gw-deployment.yml | 0 .../deployments/kubernetes/shop_gw-svc.yml | 0 .../shop_gw/docs/apis.go | 0 .../shop_gw/docs/apis.swagger.json | 0 .../shop_gw/docs/gen.info | 0 .../{ => example-1-multi-repo}/shop_gw/go.mod | 0 .../{ => example-1-multi-repo}/shop_gw/go.sum | 0 .../shop_gw/internal/config/shop_gw.go | 0 .../shop_gw/internal/config/shop_gw_cc.go | 0 .../shop_gw/internal/config/shop_gw_test.go | 0 .../shop_gw/internal/ecode/shop_gw_rpc.go | 0 .../shop_gw/internal/ecode/systemCode_rpc.go | 0 .../shop_gw/internal/routers/routers.go | 0 .../internal/routers/shop_gw_router.go | 0 .../shop_gw/internal/rpcclient/comment.go | 0 .../shop_gw/internal/rpcclient/inventory.go | 0 .../shop_gw/internal/rpcclient/product.go | 0 .../shop_gw/internal/server/http.go | 0 .../shop_gw/internal/server/http_option.go | 0 .../shop_gw/internal/server/http_test.go | 0 .../shop_gw/internal/service/shop_gw.go | 0 .../shop_gw/scripts/binary-package.sh | 0 .../shop_gw/scripts/build/Dockerfile | 0 .../shop_gw/scripts/build/Dockerfile_build | 0 .../shop_gw/scripts/build/Dockerfile_test | 0 .../shop_gw/scripts/build/README.md | 0 .../shop_gw/scripts/deploy-binary.sh | 0 .../shop_gw/scripts/deploy-docker.sh | 0 .../shop_gw/scripts/deploy-k8s.sh | 0 .../shop_gw/scripts/image-build-local.sh | 0 .../shop_gw/scripts/image-build.sh | 0 .../shop_gw/scripts/image-build2.sh | 0 .../shop_gw/scripts/image-push.sh | 0 .../shop_gw/scripts/image-rpc-test.sh | 0 .../shop_gw/scripts/patch.sh | 0 .../shop_gw/scripts/proto-doc.sh | 0 .../shop_gw/scripts/protoc.sh | 0 .../shop_gw/scripts/run-nohup.sh | 0 .../shop_gw/scripts/run.sh | 0 .../shop_gw/scripts/swag-docs.sh | 0 .../gogo/protobuf/gogoproto/gogo.proto | 0 .../third_party/google/api/BUILD.bazel | 0 .../shop_gw/third_party/google/api/README.md | 0 .../third_party/google/api/annotations.proto | 0 .../shop_gw/third_party/google/api/auth.proto | 0 .../third_party/google/api/backend.proto | 0 .../third_party/google/api/billing.proto | 0 .../third_party/google/api/client.proto | 0 .../google/api/config_change.proto | 0 .../third_party/google/api/consumer.proto | 0 .../third_party/google/api/context.proto | 0 .../third_party/google/api/control.proto | 0 .../third_party/google/api/distribution.proto | 0 .../google/api/documentation.proto | 0 .../third_party/google/api/endpoint.proto | 0 .../third_party/google/api/expr/BUILD.bazel | 0 .../third_party/google/api/expr/cel.yaml | 0 .../google/api/expr/v1alpha1/BUILD.bazel | 0 .../google/api/expr/v1alpha1/checked.proto | 0 .../expr/v1alpha1/conformance_service.proto | 0 .../google/api/expr/v1alpha1/eval.proto | 0 .../google/api/expr/v1alpha1/explain.proto | 0 .../google/api/expr/v1alpha1/syntax.proto | 0 .../google/api/expr/v1alpha1/value.proto | 0 .../google/api/expr/v1beta1/BUILD.bazel | 0 .../google/api/expr/v1beta1/decl.proto | 0 .../google/api/expr/v1beta1/eval.proto | 0 .../google/api/expr/v1beta1/expr.proto | 0 .../google/api/expr/v1beta1/source.proto | 0 .../google/api/expr/v1beta1/value.proto | 0 .../google/api/field_behavior.proto | 0 .../shop_gw/third_party/google/api/http.proto | 0 .../third_party/google/api/httpbody.proto | 0 .../third_party/google/api/label.proto | 0 .../third_party/google/api/launch_stage.proto | 0 .../shop_gw/third_party/google/api/log.proto | 0 .../third_party/google/api/logging.proto | 0 .../third_party/google/api/metric.proto | 0 .../google/api/monitored_resource.proto | 0 .../third_party/google/api/monitoring.proto | 0 .../third_party/google/api/quota.proto | 0 .../third_party/google/api/resource.proto | 0 .../third_party/google/api/service.proto | 0 .../third_party/google/api/serviceconfig.yaml | 0 .../google/api/servicecontrol/BUILD.bazel | 0 .../google/api/servicecontrol/README.md | 0 .../google/api/servicecontrol/v1/BUILD.bazel | 0 .../api/servicecontrol/v1/check_error.proto | 0 .../api/servicecontrol/v1/distribution.proto | 0 .../api/servicecontrol/v1/http_request.proto | 0 .../api/servicecontrol/v1/log_entry.proto | 0 .../api/servicecontrol/v1/metric_value.proto | 0 .../api/servicecontrol/v1/operation.proto | 0 .../servicecontrol/v1/quota_controller.proto | 0 .../v1/service_controller.proto | 0 .../api/servicecontrol/v1/servicecontrol.yaml | 0 .../google/api/servicemanagement/BUILD.bazel | 0 .../google/api/servicemanagement/README.md | 0 .../api/servicemanagement/v1/BUILD.bazel | 0 .../api/servicemanagement/v1/resources.proto | 0 .../v1/servicemanagement_gapic.legacy.yaml | 0 .../v1/servicemanagement_gapic.yaml | 0 ...servicemanagement_grpc_service_config.json | 0 .../v1/servicemanagement_v1.yaml | 0 .../servicemanagement/v1/servicemanager.proto | 0 .../third_party/google/api/source_info.proto | 0 .../google/api/system_parameter.proto | 0 .../third_party/google/api/usage.proto | 0 .../google/protobuf/annotations.proto | 0 .../third_party/google/protobuf/any.proto | 0 .../third_party/google/protobuf/api.proto | 0 .../google/protobuf/compiler/plugin.proto | 0 .../google/protobuf/descriptor.proto | 0 .../google/protobuf/duration.proto | 0 .../third_party/google/protobuf/empty.proto | 0 .../google/protobuf/field_mask.proto | 0 .../google/protobuf/source_context.proto | 0 .../third_party/google/protobuf/struct.proto | 0 .../google/protobuf/timestamp.proto | 0 .../third_party/google/protobuf/type.proto | 0 .../google/protobuf/wrappers.proto | 0 .../options/annotations.proto | 0 .../options/openapiv2.proto | 0 .../shop_gw/third_party/tagger/tagger.proto | 0 .../third_party/validate/validate.proto | 0 .../api/comment/v1/comment.pb.go | 325 +++++ .../api/comment/v1/comment.pb.validate.go | 395 +++++ .../api/comment/v1/comment.proto | 28 + .../api/comment/v1/comment_grpc.pb.go | 111 ++ .../api/eshop_gw/v1/eshop_gw.pb.go | 291 ++++ .../api/eshop_gw/v1/eshop_gw.pb.validate.go | 344 +++++ .../api/eshop_gw/v1/eshop_gw.proto | 67 + .../api/eshop_gw/v1/eshop_gw_grpc.pb.go | 111 ++ .../api/eshop_gw/v1/eshop_gw_router.pb.go | 188 +++ .../api/inventory/v1/inventory.pb.go | 303 ++++ .../api/inventory/v1/inventory.pb.validate.go | 381 +++++ .../api/inventory/v1/inventory.proto | 26 + .../api/inventory/v1/inventory_grpc.pb.go | 111 ++ .../api/product/v1/product.pb.go | 321 ++++ .../api/product/v1/product.pb.validate.go | 385 +++++ .../api/product/v1/product.proto | 28 + .../api/product/v1/product_grpc.pb.go | 111 ++ .../example-2-mono-repo/comment}/.gitignore | 2 +- .../comment}/.golangci.yml | 10 +- .../example-2-mono-repo/comment}/Jenkinsfile | 0 .../example-2-mono-repo/comment/Makefile | 183 +++ .../example-2-mono-repo/comment/README.md | 9 + .../comment/cmd/comment}/initial/close.go | 8 +- .../cmd/comment}/initial/createService.go | 13 +- .../comment/cmd/comment/initial/initApp.go | 132 ++ .../comment/cmd/comment}/main.go | 8 +- .../comment/configs/comment.yml | 14 +- .../comment/configs/comment_cc.yml | 13 + .../comment}/configs/location.go | 0 .../comment/deployments/binary/README.md | 26 + .../comment/deployments/binary/deploy.sh | 33 + .../comment/deployments/binary/run.sh | 46 + .../deployments/docker-compose/README.md | 12 + .../docker-compose/docker-compose.yml | 21 + .../comment/deployments/kubernetes/README.md | 32 + .../kubernetes/comment-configmap.yml | 124 ++ .../kubernetes/comment-deployment.yml | 63 + .../deployments/kubernetes/comment-svc.yml | 17 + .../kubernetes/eshop-namespace.yml | 2 +- .../example-2-mono-repo/comment/docs/gen.info | 1 + .../comment/internal/config/comment.go | 2 +- .../comment/internal/config/comment_cc.go | 2 +- .../comment/internal/config/comment_test.go | 45 + .../comment/internal/ecode/comment_rpc.go | 19 + .../comment}/internal/ecode/systemCode_rpc.go | 7 + .../comment}/internal/server/grpc.go | 31 +- .../comment}/internal/server/grpc_option.go | 0 .../comment}/internal/server/grpc_test.go | 12 +- .../comment/internal/service/comment.go | 74 + .../internal/service/comment_client_test.go | 112 ++ .../comment}/internal/service/service.go | 0 .../comment}/internal/service/service_test.go | 85 +- .../comment/scripts/binary-package.sh | 24 + .../comment/scripts/build/Dockerfile | 26 + .../comment/scripts/build/Dockerfile_build | 47 + .../comment/scripts/build/Dockerfile_test | 16 + .../comment}/scripts/build/README.md | 0 .../comment/scripts/deploy-binary.sh | 35 + .../comment/scripts/deploy-docker.sh | 32 + .../comment/scripts/deploy-k8s.sh | 31 + .../comment/scripts/image-build-local.sh | 51 + .../comment/scripts/image-build.sh | 71 + .../comment/scripts/image-build2.sh | 38 + .../comment/scripts/image-push.sh | 53 + .../comment/scripts/image-rpc-test.sh | 33 + .../comment/scripts/patch-mono.sh | 36 + .../comment/scripts/patch.sh | 81 ++ .../comment/scripts/proto-doc.sh | 46 + .../comment/scripts/protoc.sh | 211 +++ .../comment}/scripts/run-nohup.sh | 4 +- .../comment/scripts/run.sh | 29 + .../example-2-mono-repo/eshop_gw/.gitignore | 26 + .../eshop_gw/.golangci.yml | 342 +++++ .../example-2-mono-repo/eshop_gw/Jenkinsfile | 200 +++ .../example-2-mono-repo/eshop_gw/Makefile | 183 +++ .../example-2-mono-repo/eshop_gw/README.md | 9 + .../eshop_gw/cmd/eshop_gw/initial/close.go | 38 + .../cmd/eshop_gw/initial/createService.go | 98 ++ .../eshop_gw/cmd/eshop_gw/initial/initApp.go | 128 ++ .../eshop_gw/cmd/eshop_gw/main.go | 17 + .../eshop_gw/configs/eshop_gw.yml | 101 ++ .../eshop_gw/configs/eshop_gw_cc.yml | 13 + .../eshop_gw}/configs/location.go | 0 .../eshop_gw/deployments/binary/README.md | 26 + .../eshop_gw/deployments/binary/deploy.sh | 33 + .../eshop_gw/deployments/binary/run.sh | 46 + .../deployments/docker-compose/README.md | 12 + .../docker-compose/docker-compose.yml | 19 + .../eshop_gw/deployments/kubernetes/README.md | 32 + .../kubernetes/eshop-namespace.yml | 4 + .../kubernetes/eshop_gw-configmap.yml | 114 ++ .../kubernetes/eshop_gw-deployment.yml | 63 + .../deployments/kubernetes/eshop_gw-svc.yml | 14 + .../eshop_gw}/docs/apis.go | 0 .../eshop_gw/docs/apis.swagger.json | 168 +++ .../eshop_gw/docs/gen.info | 1 + .../eshop_gw/internal/config/eshop_gw.go | 173 +++ .../eshop_gw/internal/config/eshop_gw_cc.go | 28 + .../eshop_gw/internal/config/eshop_gw_test.go | 45 + .../eshop_gw/internal/ecode/eshop_gw_rpc.go | 19 + .../eshop_gw/internal/ecode/systemCode_rpc.go | 46 + .../internal/routers/eshop_gw_router.go | 70 + .../eshop_gw/internal/routers/routers.go | 167 +++ .../eshop_gw/internal/rpcclient/comment.go | 159 ++ .../eshop_gw/internal/rpcclient/inventory.go | 159 ++ .../eshop_gw/internal/rpcclient/product.go | 159 ++ .../eshop_gw/internal/server/http.go | 88 ++ .../eshop_gw}/internal/server/http_option.go | 0 .../eshop_gw/internal/server/http_test.go | 115 ++ .../eshop_gw/internal/service/eshop_gw.go | 75 + .../eshop_gw/scripts/binary-package.sh | 24 + .../eshop_gw/scripts/build/Dockerfile | 25 + .../eshop_gw/scripts/build/Dockerfile_build | 41 + .../eshop_gw/scripts/build/Dockerfile_test | 16 + .../eshop_gw/scripts/build/README.md | 4 + .../eshop_gw/scripts/deploy-binary.sh | 35 + .../eshop_gw/scripts/deploy-docker.sh | 32 + .../eshop_gw/scripts/deploy-k8s.sh | 31 + .../eshop_gw/scripts/image-build-local.sh | 38 + .../eshop_gw/scripts/image-build.sh | 58 + .../eshop_gw/scripts/image-build2.sh | 38 + .../eshop_gw/scripts/image-push.sh | 53 + .../eshop_gw/scripts/image-rpc-test.sh | 33 + .../eshop_gw/scripts/patch-mono.sh | 36 + .../eshop_gw/scripts/patch.sh | 81 ++ .../eshop_gw/scripts/proto-doc.sh | 46 + .../eshop_gw/scripts/protoc.sh | 221 +++ .../eshop_gw/scripts/run-nohup.sh | 61 + .../eshop_gw/scripts/run.sh | 29 + .../eshop_gw/scripts/swag-docs.sh | 44 + .../example-2-mono-repo}/go.mod | 37 +- .../example-2-mono-repo}/go.sum | 94 +- .../example-2-mono-repo/inventory/.gitignore | 26 + .../inventory/.golangci.yml | 342 +++++ .../example-2-mono-repo/inventory/Jenkinsfile | 200 +++ .../example-2-mono-repo/inventory/Makefile | 183 +++ .../example-2-mono-repo/inventory/README.md | 9 + .../inventory/cmd/inventory/initial/close.go | 44 + .../cmd/inventory/initial/createService.go | 97 ++ .../cmd/inventory/initial/initApp.go | 132 ++ .../inventory/cmd/inventory/main.go | 17 + .../inventory/configs/inventory.yml | 117 ++ .../inventory/configs/inventory_cc.yml | 13 + .../inventory/configs/location.go | 23 + .../inventory/deployments/binary/README.md | 26 + .../inventory/deployments/binary/deploy.sh | 33 + .../inventory/deployments/binary/run.sh | 46 + .../deployments/docker-compose/README.md | 12 + .../docker-compose/docker-compose.yml | 21 + .../deployments/kubernetes/README.md | 32 + .../kubernetes/eshop-namespace.yml | 4 + .../kubernetes/inventory-configmap.yml | 124 ++ .../kubernetes/inventory-deployment.yml | 63 + .../deployments/kubernetes/inventory-svc.yml | 17 + .../inventory/docs/gen.info | 1 + .../inventory/internal/config/inventory.go | 173 +++ .../inventory/internal/config/inventory_cc.go | 28 + .../internal/config/inventory_test.go | 45 + .../inventory/internal/ecode/inventory_rpc.go | 19 + .../internal/ecode/systemCode_rpc.go | 46 + .../inventory/internal/server/grpc.go | 334 +++++ .../inventory/internal/server/grpc_option.go | 34 + .../inventory/internal/server/grpc_test.go | 130 ++ .../inventory/internal/service/inventory.go | 60 + .../internal/service/inventory_client_test.go | 112 ++ .../inventory}/internal/service/service.go | 0 .../internal/service/service_test.go | 173 +++ .../inventory/scripts/binary-package.sh | 24 + .../inventory/scripts/build/Dockerfile | 26 + .../inventory/scripts/build/Dockerfile_build | 47 + .../inventory/scripts/build/Dockerfile_test | 16 + .../inventory/scripts/build/README.md | 4 + .../inventory/scripts/deploy-binary.sh | 35 + .../inventory/scripts/deploy-docker.sh | 32 + .../inventory/scripts/deploy-k8s.sh | 31 + .../inventory/scripts/image-build-local.sh | 51 + .../inventory/scripts/image-build.sh | 71 + .../inventory/scripts/image-build2.sh | 38 + .../inventory/scripts/image-push.sh | 53 + .../inventory/scripts/image-rpc-test.sh | 33 + .../inventory/scripts/patch-mono.sh | 36 + .../inventory/scripts/patch.sh | 81 ++ .../inventory/scripts/proto-doc.sh | 46 + .../inventory/scripts/protoc.sh | 211 +++ .../inventory/scripts/run-nohup.sh | 61 + .../inventory/scripts/run.sh | 29 + .../example-2-mono-repo/product.proto | 28 + .../example-2-mono-repo/product/.gitignore | 26 + .../example-2-mono-repo/product/.golangci.yml | 342 +++++ .../example-2-mono-repo/product/Jenkinsfile | 200 +++ .../example-2-mono-repo/product/Makefile | 183 +++ .../example-2-mono-repo/product/README.md | 9 + .../product/cmd/product/initial/close.go | 44 + .../cmd/product/initial/createService.go | 97 ++ .../product/cmd/product/initial/initApp.go | 132 ++ .../product/cmd/product/main.go | 17 + .../product/configs/location.go | 23 + .../product/configs/product.yml | 117 ++ .../product/configs/product_cc.yml | 13 + .../product/deployments/binary/README.md | 26 + .../product/deployments/binary/deploy.sh | 33 + .../product/deployments/binary/run.sh | 46 + .../deployments/docker-compose/README.md | 12 + .../docker-compose/docker-compose.yml | 21 + .../product/deployments/kubernetes/README.md | 32 + .../kubernetes/eshop-namespace.yml | 4 + .../kubernetes/product-configmap.yml | 124 ++ .../kubernetes/product-deployment.yml | 63 + .../deployments/kubernetes/product-svc.yml | 17 + .../example-2-mono-repo/product/docs/gen.info | 1 + .../product/internal/config/product.go | 173 +++ .../product/internal/config/product_cc.go | 28 + .../product/internal/config/product_test.go | 45 + .../product/internal/ecode/product_rpc.go | 19 + .../product/internal/ecode/systemCode_rpc.go | 46 + .../product/internal/server/grpc.go | 334 +++++ .../product/internal/server/grpc_option.go | 34 + .../product/internal/server/grpc_test.go | 130 ++ .../product/internal/service/product.go | 62 + .../internal/service/product_client_test.go | 112 ++ .../product/internal/service/service.go | 22 + .../product/internal/service/service_test.go | 173 +++ .../product/scripts/binary-package.sh | 24 + .../product/scripts/build/Dockerfile | 26 + .../product/scripts/build/Dockerfile_build | 47 + .../product/scripts/build/Dockerfile_test | 16 + .../product/scripts/build/README.md | 4 + .../product/scripts/deploy-binary.sh | 35 + .../product/scripts/deploy-docker.sh | 32 + .../product/scripts/deploy-k8s.sh | 31 + .../product/scripts/image-build-local.sh | 51 + .../product/scripts/image-build.sh | 71 + .../product/scripts/image-build2.sh | 38 + .../product/scripts/image-push.sh | 53 + .../product/scripts/image-rpc-test.sh | 33 + .../product/scripts/patch-mono.sh | 36 + .../product/scripts/patch.sh | 81 ++ .../product/scripts/proto-doc.sh | 46 + .../product/scripts/protoc.sh | 211 +++ .../product/scripts/run-nohup.sh | 61 + .../product/scripts/run.sh | 29 + .../gogo/protobuf/gogoproto/gogo.proto | 0 .../third_party/google/api/BUILD.bazel | 0 .../third_party/google/api/README.md | 0 .../third_party/google/api/annotations.proto | 0 .../third_party/google/api/auth.proto | 0 .../third_party/google/api/backend.proto | 0 .../third_party/google/api/billing.proto | 0 .../third_party/google/api/client.proto | 0 .../google/api/config_change.proto | 0 .../third_party/google/api/consumer.proto | 0 .../third_party/google/api/context.proto | 0 .../third_party/google/api/control.proto | 0 .../third_party/google/api/distribution.proto | 0 .../google/api/documentation.proto | 0 .../third_party/google/api/endpoint.proto | 0 .../third_party/google/api/expr/BUILD.bazel | 0 .../third_party/google/api/expr/cel.yaml | 0 .../google/api/expr/v1alpha1/BUILD.bazel | 0 .../google/api/expr/v1alpha1/checked.proto | 0 .../expr/v1alpha1/conformance_service.proto | 0 .../google/api/expr/v1alpha1/eval.proto | 0 .../google/api/expr/v1alpha1/explain.proto | 0 .../google/api/expr/v1alpha1/syntax.proto | 0 .../google/api/expr/v1alpha1/value.proto | 0 .../google/api/expr/v1beta1/BUILD.bazel | 0 .../google/api/expr/v1beta1/decl.proto | 0 .../google/api/expr/v1beta1/eval.proto | 0 .../google/api/expr/v1beta1/expr.proto | 0 .../google/api/expr/v1beta1/source.proto | 0 .../google/api/expr/v1beta1/value.proto | 0 .../google/api/field_behavior.proto | 0 .../third_party/google/api/http.proto | 0 .../third_party/google/api/httpbody.proto | 0 .../third_party/google/api/label.proto | 0 .../third_party/google/api/launch_stage.proto | 0 .../third_party/google/api/log.proto | 0 .../third_party/google/api/logging.proto | 0 .../third_party/google/api/metric.proto | 0 .../google/api/monitored_resource.proto | 0 .../third_party/google/api/monitoring.proto | 0 .../third_party/google/api/quota.proto | 0 .../third_party/google/api/resource.proto | 0 .../third_party/google/api/service.proto | 0 .../third_party/google/api/serviceconfig.yaml | 0 .../google/api/servicecontrol/BUILD.bazel | 0 .../google/api/servicecontrol/README.md | 0 .../google/api/servicecontrol/v1/BUILD.bazel | 0 .../api/servicecontrol/v1/check_error.proto | 0 .../api/servicecontrol/v1/distribution.proto | 0 .../api/servicecontrol/v1/http_request.proto | 0 .../api/servicecontrol/v1/log_entry.proto | 0 .../api/servicecontrol/v1/metric_value.proto | 0 .../api/servicecontrol/v1/operation.proto | 0 .../servicecontrol/v1/quota_controller.proto | 0 .../v1/service_controller.proto | 0 .../api/servicecontrol/v1/servicecontrol.yaml | 0 .../google/api/servicemanagement/BUILD.bazel | 0 .../google/api/servicemanagement/README.md | 0 .../api/servicemanagement/v1/BUILD.bazel | 0 .../api/servicemanagement/v1/resources.proto | 0 .../v1/servicemanagement_gapic.legacy.yaml | 0 .../v1/servicemanagement_gapic.yaml | 0 ...servicemanagement_grpc_service_config.json | 0 .../v1/servicemanagement_v1.yaml | 0 .../servicemanagement/v1/servicemanager.proto | 0 .../third_party/google/api/source_info.proto | 0 .../google/api/system_parameter.proto | 0 .../third_party/google/api/usage.proto | 0 .../google/protobuf/annotations.proto | 0 .../third_party/google/protobuf/any.proto | 0 .../third_party/google/protobuf/api.proto | 0 .../google/protobuf/compiler/plugin.proto | 0 .../google/protobuf/descriptor.proto | 0 .../google/protobuf/duration.proto | 0 .../third_party/google/protobuf/empty.proto | 0 .../google/protobuf/field_mask.proto | 0 .../google/protobuf/source_context.proto | 0 .../third_party/google/protobuf/struct.proto | 0 .../google/protobuf/timestamp.proto | 0 .../third_party/google/protobuf/type.proto | 0 .../google/protobuf/wrappers.proto | 0 .../options/annotations.proto | 0 .../options/openapiv2.proto | 0 .../third_party/tagger/tagger.proto | 0 .../third_party/validate/validate.proto | 0 README.md | 13 +- _10_micro-grpc-http-protobuf/go.mod | 4 +- _10_micro-grpc-http-protobuf/go.sum | 14 +- .../go.mod | 4 +- .../go.sum | 17 +- _12_sponge-dtm-flashSale/README.md | 6 +- _12_sponge-dtm-flashSale/grpc+http/README.md | 2 +- _12_sponge-dtm-flashSale/grpc+http/go.mod | 2 +- _12_sponge-dtm-flashSale/grpc+http/go.sum | 6 +- .../grpc+http/internal/service/flashSale.go | 4 +- _12_sponge-dtm-flashSale/http/README.md | 2 +- _12_sponge-dtm-flashSale/http/go.mod | 4 +- _12_sponge-dtm-flashSale/http/go.sum | 16 +- .../http/internal/handler/flashSale.go | 4 +- _13_sponge-dtm-cache/README.md | 6 + .../grpc+http}/.gitignore | 2 +- _13_sponge-dtm-cache/grpc+http/.golangci.yml | 342 +++++ _13_sponge-dtm-cache/grpc+http/Jenkinsfile | 200 +++ .../grpc+http}/Makefile | 70 +- _13_sponge-dtm-cache/grpc+http/README.md | 76 + .../grpc+http/api/stock/v1/atomic.pb.go | 379 +++++ .../api/stock/v1/atomic.pb.validate.go | 477 ++++++ .../grpc+http/api/stock/v1/atomic.proto | 82 ++ .../grpc+http/api/stock/v1/atomic_grpc.pb.go | 150 ++ .../api/stock/v1/atomic_router.pb.go | 221 +++ .../grpc+http/api/stock/v1/callback.pb.go | 325 +++++ .../api/stock/v1/callback.pb.validate.go | 453 ++++++ .../grpc+http/api/stock/v1/callback.proto | 34 + .../api/stock/v1/callback_grpc.pb.go | 150 ++ .../api/stock/v1/callback_router.pb.go | 204 +++ .../grpc+http/api/stock/v1/downgrade.pb.go | 556 +++++++ .../api/stock/v1/downgrade.pb.validate.go | 730 ++++++++++ .../grpc+http/api/stock/v1/downgrade.proto | 108 ++ .../api/stock/v1/downgrade_grpc.pb.go | 189 +++ .../api/stock/v1/downgrade_router.pb.go | 247 ++++ .../grpc+http/api/stock/v1/final.pb.go | 379 +++++ .../api/stock/v1/final.pb.validate.go | 477 ++++++ .../grpc+http/api/stock/v1/final.proto | 81 ++ .../grpc+http/api/stock/v1/final_grpc.pb.go | 150 ++ .../grpc+http/api/stock/v1/final_router.pb.go | 221 +++ .../grpc+http/api/stock/v1/stock.pb.go | 918 ++++++++++++ .../api/stock/v1/stock.pb.validate.go | 1286 +++++++++++++++++ .../grpc+http/api/stock/v1/stock.proto | 198 +++ .../grpc+http/api/stock/v1/stock_grpc.pb.go | 267 ++++ .../grpc+http/api/stock/v1/stock_router.pb.go | 320 ++++ .../grpc+http/api/stock/v1/strong.pb.go | 379 +++++ .../api/stock/v1/strong.pb.validate.go | 477 ++++++ .../grpc+http/api/stock/v1/strong.proto | 81 ++ .../grpc+http/api/stock/v1/strong_grpc.pb.go | 150 ++ .../api/stock/v1/strong_router.pb.go | 221 +++ .../grpc+http/api/types/types.pb.go | 327 +++++ .../grpc+http/api/types/types.pb.validate.go | 413 ++++++ .../grpc+http/api/types/types.proto | 23 + .../grpc+http/cmd/stock/initial/close.go | 48 + .../cmd/stock}/initial/createService.go | 4 +- .../grpc+http/cmd/stock/initial/initApp.go | 139 ++ .../grpc+http/cmd/stock}/main.go | 2 +- .../grpc+http/configs/location.go | 23 + .../grpc+http/configs/stock.yml | 128 ++ .../grpc+http/configs/stock_cc.yml | 2 +- .../grpc+http}/deployments/binary/README.md | 4 +- .../grpc+http}/deployments/binary/deploy.sh | 2 +- .../grpc+http}/deployments/binary/run.sh | 2 +- .../deployments/docker-compose/README.md | 2 +- .../docker-compose/docker-compose.yml | 8 +- .../deployments/kubernetes/README.md | 4 +- .../kubernetes/eshop-namespace.yml | 4 + .../kubernetes/stock-configmap.yml | 8 +- .../kubernetes/stock-deployment.yml | 22 +- .../deployments/kubernetes/stock-svc.yml | 10 +- _13_sponge-dtm-cache/grpc+http/docs/apis.go | 21 + .../grpc+http/docs/apis.swagger.json | 849 +++++++++++ _13_sponge-dtm-cache/grpc+http/docs/gen.info | 1 + .../grpc+http}/go.mod | 80 +- .../grpc+http}/go.sum | 229 ++- .../grpc+http/internal/cache/stock.go | 149 ++ .../grpc+http/internal/cache/stock_test.go | 144 ++ .../grpc+http/internal/config/stock.go | 173 +++ .../grpc+http/internal/config/stock_cc.go | 28 + .../grpc+http/internal/config/stock_test.go | 7 +- .../grpc+http/internal/dao/stock.go | 273 ++++ .../grpc+http/internal/dao/stock_test.go | 232 +++ .../grpc+http/internal/ecode/atomic_rpc.go | 20 + .../grpc+http/internal/ecode/callback_rpc.go | 20 + .../grpc+http/internal/ecode/downgrade_rpc.go | 21 + .../grpc+http/internal/ecode/final_rpc.go | 20 + .../grpc+http/internal/ecode/stock_rpc.go | 23 + .../grpc+http/internal/ecode/strong_rpc.go | 20 + .../internal/ecode/systemCode_http.go | 5 +- .../internal/ecode/systemCode_rpc.go | 4 + .../grpc+http/internal/handler/atomic.go | 35 + .../grpc+http/internal/handler/callback.go | 36 + .../grpc+http/internal/handler/downgrade.go | 42 + .../grpc+http/internal/handler/final.go | 35 + .../grpc+http/internal/handler/stock.go | 46 + .../grpc+http/internal/handler/strong.go | 35 + .../grpc+http/internal/model/init.go | 234 +++ .../grpc+http/internal/model/stock.go | 19 + .../internal/routers/atomic_router.go | 68 + .../internal/routers/callback_router.go | 68 + .../internal/routers/downgrade_router.go | 69 + .../internal/routers/final_router.go | 34 +- .../grpc+http}/internal/routers/routers.go | 4 +- .../internal/routers/stock_router.go | 71 + .../internal/routers/strong_router.go | 68 + .../internal/rpcclient/dtmservice.go | 6 +- .../internal/rpcclient/endpointForDtm.go | 74 + .../grpc+http}/internal/server/grpc.go | 6 +- .../grpc+http/internal/server/grpc_option.go | 34 + .../grpc+http}/internal/server/http.go | 2 +- .../grpc+http/internal/server/http_option.go | 43 + .../grpc+http/internal/service/atomic.go | 116 ++ .../internal/service/atomic_client_test.go | 145 ++ .../grpc+http/internal/service/callback.go | 101 ++ .../internal/service/callback_client_test.go | 139 ++ .../grpc+http/internal/service/downgrade.go | 157 ++ .../internal/service/downgrade_client_test.go | 123 +- .../grpc+http/internal/service/final.go | 116 ++ .../internal/service/final_client_test.go | 145 ++ .../grpc+http/internal/service/service.go | 22 + .../internal/service/service_test.go | 65 +- .../grpc+http/internal/service/stock.go | 200 +++ .../internal/service/stock_client_test.go | 242 ++++ .../grpc+http/internal/service/strong.go | 116 ++ .../internal/service/strong_client_test.go | 145 ++ .../grpc+http/pkg/goredis/goredis.go | 75 + .../grpc+http}/scripts/binary-package.sh | 2 +- .../grpc+http}/scripts/build/Dockerfile | 10 +- .../grpc+http}/scripts/build/Dockerfile_build | 22 +- .../grpc+http}/scripts/build/Dockerfile_test | 10 +- .../grpc+http/scripts/build/README.md | 4 + .../grpc+http}/scripts/deploy-binary.sh | 2 +- .../grpc+http}/scripts/deploy-docker.sh | 4 +- .../grpc+http}/scripts/deploy-k8s.sh | 2 +- .../grpc+http}/scripts/image-build-local.sh | 4 +- .../grpc+http}/scripts/image-build.sh | 4 +- .../grpc+http}/scripts/image-build2.sh | 4 +- .../grpc+http}/scripts/image-push.sh | 2 +- .../grpc+http}/scripts/image-rpc-test.sh | 4 +- .../grpc+http}/scripts/patch.sh | 0 .../grpc+http}/scripts/proto-doc.sh | 0 .../grpc+http}/scripts/protoc.sh | 7 +- .../grpc+http}/scripts/run-nohup.sh | 4 +- .../grpc+http}/scripts/run.sh | 2 +- .../grpc+http}/scripts/swag-docs.sh | 6 +- .../gogo/protobuf/gogoproto/gogo.proto | 0 .../third_party/google/api/BUILD.bazel | 0 .../third_party/google/api/README.md | 0 .../third_party/google/api/annotations.proto | 0 .../third_party/google/api/auth.proto | 0 .../third_party/google/api/backend.proto | 0 .../third_party/google/api/billing.proto | 0 .../third_party/google/api/client.proto | 0 .../google/api/config_change.proto | 0 .../third_party/google/api/consumer.proto | 0 .../third_party/google/api/context.proto | 0 .../third_party/google/api/control.proto | 0 .../third_party/google/api/distribution.proto | 0 .../google/api/documentation.proto | 0 .../third_party/google/api/endpoint.proto | 0 .../third_party/google/api/expr/BUILD.bazel | 0 .../third_party/google/api/expr/cel.yaml | 0 .../google/api/expr/v1alpha1/BUILD.bazel | 0 .../google/api/expr/v1alpha1/checked.proto | 0 .../expr/v1alpha1/conformance_service.proto | 0 .../google/api/expr/v1alpha1/eval.proto | 0 .../google/api/expr/v1alpha1/explain.proto | 0 .../google/api/expr/v1alpha1/syntax.proto | 0 .../google/api/expr/v1alpha1/value.proto | 0 .../google/api/expr/v1beta1/BUILD.bazel | 0 .../google/api/expr/v1beta1/decl.proto | 0 .../google/api/expr/v1beta1/eval.proto | 0 .../google/api/expr/v1beta1/expr.proto | 0 .../google/api/expr/v1beta1/source.proto | 0 .../google/api/expr/v1beta1/value.proto | 0 .../google/api/field_behavior.proto | 0 .../third_party/google/api/http.proto | 0 .../third_party/google/api/httpbody.proto | 0 .../third_party/google/api/label.proto | 0 .../third_party/google/api/launch_stage.proto | 0 .../third_party/google/api/log.proto | 0 .../third_party/google/api/logging.proto | 0 .../third_party/google/api/metric.proto | 0 .../google/api/monitored_resource.proto | 0 .../third_party/google/api/monitoring.proto | 0 .../third_party/google/api/quota.proto | 0 .../third_party/google/api/resource.proto | 0 .../third_party/google/api/service.proto | 0 .../third_party/google/api/serviceconfig.yaml | 0 .../google/api/servicecontrol/BUILD.bazel | 0 .../google/api/servicecontrol/README.md | 0 .../google/api/servicecontrol/v1/BUILD.bazel | 0 .../api/servicecontrol/v1/check_error.proto | 0 .../api/servicecontrol/v1/distribution.proto | 0 .../api/servicecontrol/v1/http_request.proto | 0 .../api/servicecontrol/v1/log_entry.proto | 0 .../api/servicecontrol/v1/metric_value.proto | 0 .../api/servicecontrol/v1/operation.proto | 0 .../servicecontrol/v1/quota_controller.proto | 0 .../v1/service_controller.proto | 0 .../api/servicecontrol/v1/servicecontrol.yaml | 0 .../google/api/servicemanagement/BUILD.bazel | 0 .../google/api/servicemanagement/README.md | 0 .../api/servicemanagement/v1/BUILD.bazel | 0 .../api/servicemanagement/v1/resources.proto | 0 .../v1/servicemanagement_gapic.legacy.yaml | 0 .../v1/servicemanagement_gapic.yaml | 0 ...servicemanagement_grpc_service_config.json | 0 .../v1/servicemanagement_v1.yaml | 0 .../servicemanagement/v1/servicemanager.proto | 0 .../third_party/google/api/source_info.proto | 0 .../google/api/system_parameter.proto | 0 .../third_party/google/api/usage.proto | 0 .../google/protobuf/annotations.proto | 0 .../third_party/google/protobuf/any.proto | 0 .../third_party/google/protobuf/api.proto | 0 .../google/protobuf/compiler/plugin.proto | 0 .../google/protobuf/descriptor.proto | 0 .../google/protobuf/duration.proto | 0 .../third_party/google/protobuf/empty.proto | 0 .../google/protobuf/field_mask.proto | 0 .../google/protobuf/source_context.proto | 0 .../third_party/google/protobuf/struct.proto | 0 .../google/protobuf/timestamp.proto | 0 .../third_party/google/protobuf/type.proto | 0 .../google/protobuf/wrappers.proto | 0 .../options/annotations.proto | 0 .../options/openapiv2.proto | 93 +- .../third_party/tagger/tagger.proto | 0 .../third_party/validate/validate.proto | 0 _13_sponge-dtm-cache/http/.gitignore | 26 + _13_sponge-dtm-cache/http/.golangci.yml | 342 +++++ _13_sponge-dtm-cache/http/Jenkinsfile | 200 +++ .../http}/Makefile | 86 +- _13_sponge-dtm-cache/http/README.md | 69 + .../http/api/stock/v1/atomic.pb.go | 379 +++++ .../http/api/stock/v1/atomic.pb.validate.go | 477 ++++++ .../http/api/stock/v1/atomic.proto | 82 ++ .../http/api/stock/v1/atomic_router.pb.go | 221 +++ .../http/api/stock/v1/callback.pb.go | 325 +++++ .../http/api/stock/v1/callback.pb.validate.go | 453 ++++++ .../http/api/stock/v1/callback.proto | 34 + .../http/api/stock/v1/callback_router.pb.go | 204 +++ .../http/api/stock/v1/downgrade.pb.go | 556 +++++++ .../api/stock/v1/downgrade.pb.validate.go | 730 ++++++++++ .../http/api/stock/v1/downgrade.proto | 108 ++ .../http/api/stock/v1/downgrade_router.pb.go | 247 ++++ .../http/api/stock/v1/final.pb.go | 379 +++++ .../http/api/stock/v1/final.pb.validate.go | 477 ++++++ .../http/api/stock/v1/final.proto | 81 ++ .../http/api/stock/v1/final_router.pb.go | 221 +++ .../http/api/stock/v1/stock.pb.go | 918 ++++++++++++ .../http/api/stock/v1/stock.pb.validate.go | 1286 +++++++++++++++++ .../http/api/stock/v1/stock.proto | 198 +++ .../http/api/stock/v1/stock_router.pb.go | 320 ++++ .../http/api/stock/v1/strong.pb.go | 379 +++++ .../http/api/stock/v1/strong.pb.validate.go | 477 ++++++ .../http/api/stock/v1/strong.proto | 81 ++ .../http/api/stock/v1/strong_router.pb.go | 221 +++ .../http/api/types/types.pb.go | 327 +++++ .../http/api/types/types.pb.validate.go | 413 ++++++ .../http/api/types/types.proto | 23 + .../http/cmd/stock}/initial/close.go | 4 +- .../http/cmd/stock/initial/createService.go | 98 ++ .../http/cmd/stock}/initial/initApp.go | 29 +- _13_sponge-dtm-cache/http/cmd/stock/main.go | 17 + _13_sponge-dtm-cache/http/configs/location.go | 23 + _13_sponge-dtm-cache/http/configs/stock.yml | 92 ++ .../http/configs/stock_cc.yml | 13 + .../http/deployments/binary/README.md | 26 + .../http/deployments/binary/deploy.sh | 33 + .../http/deployments/binary/run.sh | 46 + .../http/deployments/docker-compose/README.md | 12 + .../docker-compose/docker-compose.yml | 19 + .../http/deployments/kubernetes/README.md | 32 + .../kubernetes/eshop-namespace.yml | 4 + .../kubernetes/stock-configmap.yml | 90 ++ .../kubernetes/stock-deployment.yml | 63 + .../http/deployments/kubernetes/stock-svc.yml | 14 + _13_sponge-dtm-cache/http/docs/apis.go | 21 + .../http/docs/apis.swagger.json | 849 +++++++++++ _13_sponge-dtm-cache/http/docs/gen.info | 1 + _13_sponge-dtm-cache/http/go.mod | 178 +++ _13_sponge-dtm-cache/http/go.sum | 1156 +++++++++++++++ .../http/internal/cache/stock.go | 149 ++ .../http/internal/cache/stock_test.go | 144 ++ .../http/internal/config/stock.go | 76 +- .../http/internal/config/stock_cc.go | 28 + .../http/internal/config/stock_test.go | 15 +- .../http/internal/dao/stock.go | 272 ++++ .../http/internal/dao/stock_test.go | 232 +++ .../http/internal/ecode/atomic_http.go | 20 + .../http/internal/ecode/callback_http.go | 20 + .../http/internal/ecode/downgrade_http.go | 21 + .../http/internal/ecode/final_http.go | 20 + .../http/internal/ecode/stock_http.go | 21 + .../http/internal/ecode/strong_http.go | 20 + .../http/internal/ecode/systemCode_http.go | 39 + .../http/internal/handler/atomic.go | 103 ++ .../http/internal/handler/callback.go | 93 ++ .../http/internal/handler/downgrade.go | 143 ++ .../http/internal/handler/final.go | 103 ++ .../http/internal/handler/stock.go | 188 +++ .../http/internal/handler/stock_test.go | 293 ++++ .../http/internal/handler/strong.go | 103 ++ .../http/internal/model/init.go | 234 +++ .../http/internal/model/stock.go | 19 + .../http/internal/routers/atomic_router.go | 58 + .../http/internal/routers/callback_router.go | 58 + .../http/internal/routers/downgrade_router.go | 59 + .../http/internal/routers/final_router.go | 58 + .../http/internal/routers/routers.go | 167 +++ .../http/internal/routers/stock_router.go | 61 + .../http/internal/routers/strong_router.go | 58 + .../http/internal/server/http.go | 88 ++ .../http/internal/server/http_option.go | 43 + .../http/internal/server/http_test.go | 115 ++ .../http/scripts/binary-package.sh | 24 + .../http/scripts/build/Dockerfile | 25 + .../http/scripts/build/Dockerfile_build | 41 + .../http/scripts/build/Dockerfile_test | 16 + .../http/scripts/build/README.md | 4 + .../http/scripts/deploy-binary.sh | 35 + .../http/scripts/deploy-docker.sh | 32 + .../http/scripts/deploy-k8s.sh | 31 + .../http/scripts/image-build-local.sh | 38 + .../http/scripts/image-build.sh | 58 + .../http/scripts/image-build2.sh | 38 + .../http/scripts/image-push.sh | 53 + .../http/scripts/image-rpc-test.sh | 33 + _13_sponge-dtm-cache/http/scripts/patch.sh | 75 + .../http/scripts/proto-doc.sh | 43 + .../http}/scripts/protoc.sh | 44 +- .../http/scripts/run-nohup.sh | 61 + .../http}/scripts/run.sh | 2 +- .../http/scripts/swag-docs.sh | 36 + _13_sponge-dtm-cache/http/test/stock.sql | 18 + .../gogo/protobuf/gogoproto/gogo.proto | 144 ++ .../http/third_party/google/api/BUILD.bazel | 671 +++++++++ .../http/third_party/google/api/README.md | 5 + .../third_party/google/api/annotations.proto | 31 + .../http/third_party/google/api/auth.proto | 228 +++ .../http/third_party/google/api/backend.proto | 182 +++ .../http/third_party/google/api/billing.proto | 77 + .../http/third_party/google/api/client.proto | 99 ++ .../google/api/config_change.proto | 84 ++ .../third_party/google/api/consumer.proto | 82 ++ .../http/third_party/google/api/context.proto | 89 ++ .../http/third_party/google/api/control.proto | 32 + .../third_party/google/api/distribution.proto | 211 +++ .../google/api/documentation.proto | 162 +++ .../third_party/google/api/endpoint.proto | 66 + .../third_party/google/api/expr/BUILD.bazel | 1 + .../http/third_party/google/api/expr/cel.yaml | 61 + .../google/api/expr/v1alpha1/BUILD.bazel | 314 ++++ .../google/api/expr/v1alpha1/checked.proto | 330 +++++ .../expr/v1alpha1/conformance_service.proto | 164 +++ .../google/api/expr/v1alpha1/eval.proto | 118 ++ .../google/api/expr/v1alpha1/explain.proto | 53 + .../google/api/expr/v1alpha1/syntax.proto | 327 +++++ .../google/api/expr/v1alpha1/value.proto | 115 ++ .../google/api/expr/v1beta1/BUILD.bazel | 91 ++ .../google/api/expr/v1beta1/decl.proto | 84 ++ .../google/api/expr/v1beta1/eval.proto | 125 ++ .../google/api/expr/v1beta1/expr.proto | 265 ++++ .../google/api/expr/v1beta1/source.proto | 62 + .../google/api/expr/v1beta1/value.proto | 114 ++ .../google/api/field_behavior.proto | 78 + .../http/third_party/google/api/http.proto | 375 +++++ .../third_party/google/api/httpbody.proto | 77 + .../http/third_party/google/api/label.proto | 48 + .../third_party/google/api/launch_stage.proto | 72 + .../http/third_party/google/api/log.proto | 54 + .../http/third_party/google/api/logging.proto | 80 + .../http/third_party/google/api/metric.proto | 264 ++++ .../google/api/monitored_resource.proto | 118 ++ .../third_party/google/api/monitoring.proto | 105 ++ .../http/third_party/google/api/quota.proto | 183 +++ .../third_party/google/api/resource.proto | 299 ++++ .../http/third_party/google/api/service.proto | 173 +++ .../third_party/google/api/serviceconfig.yaml | 24 + .../google/api/servicecontrol/BUILD.bazel | 0 .../google/api/servicecontrol/README.md | 126 ++ .../google/api/servicecontrol/v1/BUILD.bazel | 45 + .../api/servicecontrol/v1/check_error.proto | 124 ++ .../api/servicecontrol/v1/distribution.proto | 161 +++ .../api/servicecontrol/v1/http_request.proto | 93 ++ .../api/servicecontrol/v1/log_entry.proto | 126 ++ .../api/servicecontrol/v1/metric_value.proto | 79 + .../api/servicecontrol/v1/operation.proto | 121 ++ .../servicecontrol/v1/quota_controller.proto | 239 +++ .../v1/service_controller.proto | 249 ++++ .../api/servicecontrol/v1/servicecontrol.yaml | 178 +++ .../google/api/servicemanagement/BUILD.bazel | 1 + .../google/api/servicemanagement/README.md | 102 ++ .../api/servicemanagement/v1/BUILD.bazel | 386 +++++ .../api/servicemanagement/v1/resources.proto | 304 ++++ .../v1/servicemanagement_gapic.legacy.yaml | 300 ++++ .../v1/servicemanagement_gapic.yaml | 18 + ...servicemanagement_grpc_service_config.json | 12 + .../v1/servicemanagement_v1.yaml | 275 ++++ .../servicemanagement/v1/servicemanager.proto | 560 +++++++ .../third_party/google/api/source_info.proto | 31 + .../google/api/system_parameter.proto | 95 ++ .../http/third_party/google/api/usage.proto | 89 ++ .../google/protobuf/annotations.proto | 31 + .../third_party/google/protobuf/any.proto | 158 ++ .../third_party/google/protobuf/api.proto | 208 +++ .../google/protobuf/compiler/plugin.proto | 183 +++ .../google/protobuf/descriptor.proto | 909 ++++++++++++ .../google/protobuf/duration.proto | 116 ++ .../third_party/google/protobuf/empty.proto | 52 + .../google/protobuf/field_mask.proto | 245 ++++ .../google/protobuf/source_context.proto | 48 + .../third_party/google/protobuf/struct.proto | 95 ++ .../google/protobuf/timestamp.proto | 147 ++ .../third_party/google/protobuf/type.proto | 187 +++ .../google/protobuf/wrappers.proto | 123 ++ .../options/annotations.proto | 44 + .../options/openapiv2.proto | 720 +++++++++ .../http/third_party/tagger/tagger.proto | 18 + .../http/third_party/validate/validate.proto | 862 +++++++++++ .../api/user/v1/user.pb.go | 670 --------- .../api/user/v1/user.pb.validate.go | 1033 ------------- .../api/user/v1/user.proto | 141 -- .../api/user/v1/user_grpc.pb.go | 228 --- .../api/user/v1/user_router.pb.go | 260 ---- .../docs/apis.swagger.json | 281 ---- a_micro-grpc-http-protobuf/docs/gen.info | 1 - .../internal/ecode/user_rpc.go | 21 - .../internal/handler/user.go | 43 - .../internal/service/user.go | 102 -- assets/cache-grpc-http-pb-test.png | Bin 0 -> 71307 bytes assets/cache-http-pb-swagger.png | Bin 0 -> 227244 bytes b_sponge-dtm-msg/LICENSE | 21 - b_sponge-dtm-msg/README.md | 99 -- .../api/transfer/v1/transfer.pb.go | 493 ------- .../api/transfer/v1/transfer.pb.validate.go | 712 --------- .../api/transfer/v1/transfer.proto | 42 - .../api/transfer/v1/transfer_grpc.pb.go | 183 --- .../cmd/transfer/initial/initApp.go | 87 -- b_sponge-dtm-msg/configs/transfer.yml | 68 - b_sponge-dtm-msg/docs/gen.info | 1 - .../internal/ecode/transfer_rpc.go | 20 - .../internal/rpcclient/transfer.go | 36 - .../internal/server/grpc_option.go | 54 - b_sponge-dtm-msg/internal/service/transfer.go | 93 -- .../internal/service/transfer_client_test.go | 166 --- b_sponge-dtm-msg/readme-cn.md | 95 -- 1373 files changed, 64921 insertions(+), 5537 deletions(-) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/.gitignore (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/.golangci.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/Jenkinsfile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/Makefile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/api/comment/v1/comment.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/api/comment/v1/comment.pb.validate.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/api/comment/v1/comment.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/api/comment/v1/comment_grpc.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/cmd/comment/initial/initApp.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/cmd/comment/initial/registerClose.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/cmd/comment/initial/registerServer.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/cmd/comment/main.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/configs/comment.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/configs/comment_cc.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/configs/location.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/deployments/binary/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/deployments/binary/deploy.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/deployments/binary/run.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/deployments/docker-compose/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/deployments/docker-compose/docker-compose.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/deployments/kubernetes/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/deployments/kubernetes/comment-configmap.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/deployments/kubernetes/comment-deployment.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/deployments/kubernetes/comment-svc.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/deployments/kubernetes/projectNameExample-namespace.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/docs/gen.info (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/go.mod (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/go.sum (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/config/comment.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/config/comment_cc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/config/comment_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/ecode/comment_rpc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/ecode/systemCode_rpc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/server/grpc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/server/grpc_option.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/server/grpc_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/service/comment.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/service/comment_client_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/service/service.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/internal/service/service_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/binary-package.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/build/Dockerfile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/build/Dockerfile_build (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/build/Dockerfile_test (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/build/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/deploy-binary.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/deploy-docker.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/deploy-k8s.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/image-build-local.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/image-build.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/image-build2.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/image-push.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/image-rpc-test.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/patch.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/proto-doc.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/protoc.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/run-nohup.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/run.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/scripts/swag-docs.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/gogo/protobuf/gogoproto/gogo.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/auth.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/backend.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/billing.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/client.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/config_change.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/consumer.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/context.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/control.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/distribution.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/documentation.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/endpoint.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/cel.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1alpha1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1alpha1/checked.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1alpha1/conformance_service.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1alpha1/eval.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1alpha1/explain.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1alpha1/syntax.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1alpha1/value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1beta1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1beta1/decl.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1beta1/eval.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1beta1/expr.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1beta1/source.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/expr/v1beta1/value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/field_behavior.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/http.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/httpbody.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/label.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/launch_stage.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/log.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/logging.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/metric.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/monitored_resource.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/monitoring.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/quota.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/resource.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/service.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/serviceconfig.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/v1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/v1/check_error.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/v1/distribution.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/v1/http_request.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/v1/log_entry.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/v1/metric_value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/v1/operation.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/v1/quota_controller.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/v1/service_controller.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicecontrol/v1/servicecontrol.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicemanagement/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicemanagement/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicemanagement/v1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicemanagement/v1/resources.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/servicemanagement/v1/servicemanager.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/source_info.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/system_parameter.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/api/usage.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/any.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/api.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/compiler/plugin.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/descriptor.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/duration.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/empty.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/field_mask.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/source_context.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/struct.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/timestamp.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/type.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/google/protobuf/wrappers.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/protoc-gen-openapiv2/options/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/protoc-gen-openapiv2/options/openapiv2.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/tagger/tagger.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/comment/third_party/validate/validate.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/.gitignore (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/.golangci.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/Jenkinsfile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/Makefile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/api/inventory/v1/inventory.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/api/inventory/v1/inventory.pb.validate.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/api/inventory/v1/inventory.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/api/inventory/v1/inventory_grpc.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/cmd/inventory/initial/initApp.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/cmd/inventory/initial/registerClose.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/cmd/inventory/initial/registerServer.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/cmd/inventory/main.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/configs/inventory.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/configs/inventory_cc.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/configs/location.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/deployments/binary/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/deployments/binary/deploy.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/deployments/binary/run.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/deployments/docker-compose/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/deployments/docker-compose/docker-compose.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/deployments/kubernetes/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/deployments/kubernetes/inventory-configmap.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/deployments/kubernetes/inventory-deployment.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/deployments/kubernetes/inventory-svc.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/deployments/kubernetes/projectNameExample-namespace.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/docs/gen.info (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/go.mod (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/go.sum (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/config/inventory.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/config/inventory_cc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/config/inventory_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/ecode/inventory_rpc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/ecode/systemCode_rpc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/server/grpc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/server/grpc_option.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/server/grpc_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/service/inventory.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/service/inventory_client_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/service/service.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/internal/service/service_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/binary-package.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/build/Dockerfile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/build/Dockerfile_build (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/build/Dockerfile_test (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/build/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/deploy-binary.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/deploy-docker.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/deploy-k8s.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/image-build-local.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/image-build.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/image-build2.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/image-push.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/image-rpc-test.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/patch.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/proto-doc.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/protoc.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/run-nohup.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/run.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/scripts/swag-docs.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/gogo/protobuf/gogoproto/gogo.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/auth.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/backend.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/billing.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/client.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/config_change.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/consumer.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/context.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/control.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/distribution.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/documentation.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/endpoint.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/cel.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1alpha1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1alpha1/checked.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1alpha1/conformance_service.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1alpha1/eval.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1alpha1/explain.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1alpha1/syntax.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1alpha1/value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1beta1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1beta1/decl.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1beta1/eval.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1beta1/expr.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1beta1/source.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/expr/v1beta1/value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/field_behavior.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/http.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/httpbody.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/label.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/launch_stage.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/log.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/logging.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/metric.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/monitored_resource.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/monitoring.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/quota.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/resource.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/service.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/serviceconfig.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/v1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/v1/check_error.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/v1/distribution.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/v1/http_request.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/v1/log_entry.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/v1/metric_value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/v1/operation.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/v1/quota_controller.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/v1/service_controller.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicecontrol/v1/servicecontrol.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicemanagement/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicemanagement/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicemanagement/v1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicemanagement/v1/resources.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/servicemanagement/v1/servicemanager.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/source_info.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/system_parameter.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/api/usage.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/any.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/api.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/compiler/plugin.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/descriptor.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/duration.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/empty.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/field_mask.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/source_context.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/struct.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/timestamp.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/type.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/google/protobuf/wrappers.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/protoc-gen-openapiv2/options/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/protoc-gen-openapiv2/options/openapiv2.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/tagger/tagger.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/inventory/third_party/validate/validate.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/.gitignore (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/.golangci.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/Jenkinsfile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/Makefile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/api/product/v1/product.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/api/product/v1/product.pb.validate.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/api/product/v1/product.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/api/product/v1/product_grpc.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/cmd/product/initial/initApp.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/cmd/product/initial/registerClose.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/cmd/product/initial/registerServer.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/cmd/product/main.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/configs/location.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/configs/product.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/configs/product_cc.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/deployments/binary/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/deployments/binary/deploy.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/deployments/binary/run.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/deployments/docker-compose/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/deployments/docker-compose/docker-compose.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/deployments/kubernetes/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/deployments/kubernetes/product-configmap.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/deployments/kubernetes/product-deployment.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/deployments/kubernetes/product-svc.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/deployments/kubernetes/projectNameExample-namespace.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/docs/gen.info (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/go.mod (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/go.sum (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/config/product.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/config/product_cc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/config/product_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/ecode/product_rpc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/ecode/systemCode_rpc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/server/grpc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/server/grpc_option.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/server/grpc_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/service/product.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/service/product_client_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/service/service.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/internal/service/service_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/binary-package.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/build/Dockerfile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/build/Dockerfile_build (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/build/Dockerfile_test (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/build/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/deploy-binary.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/deploy-docker.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/deploy-k8s.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/image-build-local.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/image-build.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/image-build2.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/image-push.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/image-rpc-test.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/patch.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/proto-doc.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/protoc.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/run-nohup.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/run.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/scripts/swag-docs.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/gogo/protobuf/gogoproto/gogo.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/auth.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/backend.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/billing.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/client.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/config_change.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/consumer.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/context.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/control.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/distribution.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/documentation.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/endpoint.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/cel.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1alpha1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1alpha1/checked.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1alpha1/conformance_service.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1alpha1/eval.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1alpha1/explain.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1alpha1/syntax.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1alpha1/value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1beta1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1beta1/decl.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1beta1/eval.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1beta1/expr.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1beta1/source.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/expr/v1beta1/value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/field_behavior.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/http.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/httpbody.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/label.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/launch_stage.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/log.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/logging.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/metric.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/monitored_resource.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/monitoring.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/quota.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/resource.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/service.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/serviceconfig.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/v1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/v1/check_error.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/v1/distribution.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/v1/http_request.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/v1/log_entry.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/v1/metric_value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/v1/operation.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/v1/quota_controller.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/v1/service_controller.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicecontrol/v1/servicecontrol.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicemanagement/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicemanagement/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicemanagement/v1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicemanagement/v1/resources.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/servicemanagement/v1/servicemanager.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/source_info.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/system_parameter.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/api/usage.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/any.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/api.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/compiler/plugin.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/descriptor.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/duration.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/empty.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/field_mask.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/source_context.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/struct.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/timestamp.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/type.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/google/protobuf/wrappers.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/protoc-gen-openapiv2/options/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/protoc-gen-openapiv2/options/openapiv2.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/tagger/tagger.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/product/third_party/validate/validate.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/.gitignore (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/.golangci.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/Jenkinsfile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/Makefile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/comment/v1/comment.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/comment/v1/comment.pb.validate.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/comment/v1/comment.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/comment/v1/comment_grpc.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/inventory/v1/inventory.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/inventory/v1/inventory.pb.validate.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/inventory/v1/inventory.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/inventory/v1/inventory_grpc.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/product/v1/product.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/product/v1/product.pb.validate.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/product/v1/product.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/product/v1/product_grpc.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/shop_gw/v1/shop_gw.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/shop_gw/v1/shop_gw.pb.validate.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/shop_gw/v1/shop_gw.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/shop_gw/v1/shop_gw_grpc.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/api/shop_gw/v1/shop_gw_router.pb.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/cmd/shop_gw/initial/initApp.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/cmd/shop_gw/initial/registerClose.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/cmd/shop_gw/initial/registerServer.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/cmd/shop_gw/main.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/configs/location.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/configs/shop_gw.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/configs/shop_gw_cc.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/deployments/binary/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/deployments/binary/deploy.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/deployments/binary/run.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/deployments/docker-compose/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/deployments/docker-compose/docker-compose.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/deployments/kubernetes/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/deployments/kubernetes/projectNameExample-namespace.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/deployments/kubernetes/shop_gw-configmap.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/deployments/kubernetes/shop_gw-deployment.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/deployments/kubernetes/shop_gw-svc.yml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/docs/apis.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/docs/apis.swagger.json (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/docs/gen.info (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/go.mod (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/go.sum (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/config/shop_gw.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/config/shop_gw_cc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/config/shop_gw_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/ecode/shop_gw_rpc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/ecode/systemCode_rpc.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/routers/routers.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/routers/shop_gw_router.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/rpcclient/comment.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/rpcclient/inventory.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/rpcclient/product.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/server/http.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/server/http_option.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/server/http_test.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/internal/service/shop_gw.go (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/binary-package.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/build/Dockerfile (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/build/Dockerfile_build (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/build/Dockerfile_test (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/build/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/deploy-binary.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/deploy-docker.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/deploy-k8s.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/image-build-local.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/image-build.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/image-build2.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/image-push.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/image-rpc-test.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/patch.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/proto-doc.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/protoc.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/run-nohup.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/run.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/scripts/swag-docs.sh (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/gogo/protobuf/gogoproto/gogo.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/auth.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/backend.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/billing.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/client.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/config_change.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/consumer.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/context.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/control.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/distribution.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/documentation.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/endpoint.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/cel.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1alpha1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1alpha1/checked.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1alpha1/conformance_service.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1alpha1/eval.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1alpha1/explain.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1alpha1/syntax.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1alpha1/value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1beta1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1beta1/decl.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1beta1/eval.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1beta1/expr.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1beta1/source.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/expr/v1beta1/value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/field_behavior.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/http.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/httpbody.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/label.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/launch_stage.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/log.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/logging.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/metric.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/monitored_resource.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/monitoring.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/quota.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/resource.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/service.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/serviceconfig.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/v1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/v1/check_error.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/v1/distribution.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/v1/http_request.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/v1/log_entry.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/v1/metric_value.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/v1/operation.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/v1/quota_controller.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/v1/service_controller.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicecontrol/v1/servicecontrol.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicemanagement/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicemanagement/README.md (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicemanagement/v1/BUILD.bazel (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicemanagement/v1/resources.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/servicemanagement/v1/servicemanager.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/source_info.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/system_parameter.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/api/usage.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/any.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/api.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/compiler/plugin.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/descriptor.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/duration.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/empty.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/field_mask.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/source_context.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/struct.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/timestamp.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/type.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/google/protobuf/wrappers.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/protoc-gen-openapiv2/options/annotations.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/protoc-gen-openapiv2/options/openapiv2.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/tagger/tagger.proto (100%) rename 6_micro-cluster/{ => example-1-multi-repo}/shop_gw/third_party/validate/validate.proto (100%) create mode 100644 6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.pb.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.pb.validate.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.proto create mode 100644 6_micro-cluster/example-2-mono-repo/api/comment/v1/comment_grpc.pb.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.pb.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.pb.validate.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.proto create mode 100644 6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw_grpc.pb.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw_router.pb.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.pb.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.pb.validate.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.proto create mode 100644 6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory_grpc.pb.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/product/v1/product.pb.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/product/v1/product.pb.validate.go create mode 100644 6_micro-cluster/example-2-mono-repo/api/product/v1/product.proto create mode 100644 6_micro-cluster/example-2-mono-repo/api/product/v1/product_grpc.pb.go rename {b_sponge-dtm-msg => 6_micro-cluster/example-2-mono-repo/comment}/.gitignore (93%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo/comment}/.golangci.yml (99%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo/comment}/Jenkinsfile (100%) create mode 100644 6_micro-cluster/example-2-mono-repo/comment/Makefile create mode 100644 6_micro-cluster/example-2-mono-repo/comment/README.md rename {b_sponge-dtm-msg/cmd/transfer => 6_micro-cluster/example-2-mono-repo/comment/cmd/comment}/initial/close.go (87%) rename {b_sponge-dtm-msg/cmd/transfer => 6_micro-cluster/example-2-mono-repo/comment/cmd/comment}/initial/createService.go (83%) create mode 100644 6_micro-cluster/example-2-mono-repo/comment/cmd/comment/initial/initApp.go rename {b_sponge-dtm-msg/cmd/transfer => 6_micro-cluster/example-2-mono-repo/comment/cmd/comment}/main.go (53%) rename a_micro-grpc-http-protobuf/configs/user.yml => 6_micro-cluster/example-2-mono-repo/comment/configs/comment.yml (92%) create mode 100644 6_micro-cluster/example-2-mono-repo/comment/configs/comment_cc.yml rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo/comment}/configs/location.go (100%) create mode 100644 6_micro-cluster/example-2-mono-repo/comment/deployments/binary/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/comment/deployments/binary/deploy.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/deployments/binary/run.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/deployments/docker-compose/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/comment/deployments/docker-compose/docker-compose.yml create mode 100644 6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-configmap.yml create mode 100644 6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-deployment.yml create mode 100644 6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-svc.yml rename a_micro-grpc-http-protobuf/deployments/kubernetes/edusys-namespace.yml => 6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/eshop-namespace.yml (73%) create mode 100644 6_micro-cluster/example-2-mono-repo/comment/docs/gen.info rename a_micro-grpc-http-protobuf/internal/config/user.go => 6_micro-cluster/example-2-mono-repo/comment/internal/config/comment.go (99%) rename a_micro-grpc-http-protobuf/internal/config/user_cc.go => 6_micro-cluster/example-2-mono-repo/comment/internal/config/comment_cc.go (95%) create mode 100644 6_micro-cluster/example-2-mono-repo/comment/internal/config/comment_test.go create mode 100644 6_micro-cluster/example-2-mono-repo/comment/internal/ecode/comment_rpc.go rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo/comment}/internal/ecode/systemCode_rpc.go (85%) rename {b_sponge-dtm-msg => 6_micro-cluster/example-2-mono-repo/comment}/internal/server/grpc.go (87%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo/comment}/internal/server/grpc_option.go (100%) rename {b_sponge-dtm-msg => 6_micro-cluster/example-2-mono-repo/comment}/internal/server/grpc_test.go (91%) create mode 100644 6_micro-cluster/example-2-mono-repo/comment/internal/service/comment.go create mode 100644 6_micro-cluster/example-2-mono-repo/comment/internal/service/comment_client_test.go rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo/comment}/internal/service/service.go (100%) rename {b_sponge-dtm-msg => 6_micro-cluster/example-2-mono-repo/comment}/internal/service/service_test.go (69%) create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/binary-package.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile_build create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile_test rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo/comment}/scripts/build/README.md (100%) create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-binary.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-docker.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-k8s.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/image-build-local.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/image-build.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/image-build2.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/image-push.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/image-rpc-test.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/patch-mono.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/patch.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/proto-doc.sh create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/protoc.sh rename {b_sponge-dtm-msg => 6_micro-cluster/example-2-mono-repo/comment}/scripts/run-nohup.sh (96%) create mode 100644 6_micro-cluster/example-2-mono-repo/comment/scripts/run.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/.gitignore create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/.golangci.yml create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/Jenkinsfile create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/Makefile create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/close.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/createService.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/initApp.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/main.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/configs/eshop_gw.yml create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/configs/eshop_gw_cc.yml rename {b_sponge-dtm-msg => 6_micro-cluster/example-2-mono-repo/eshop_gw}/configs/location.go (100%) create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/deploy.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/run.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/docker-compose/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/docker-compose/docker-compose.yml create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop-namespace.yml create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-configmap.yml create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-deployment.yml create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-svc.yml rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo/eshop_gw}/docs/apis.go (100%) create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/docs/apis.swagger.json create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/docs/gen.info create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw_cc.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw_test.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/ecode/eshop_gw_rpc.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/ecode/systemCode_rpc.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/routers/eshop_gw_router.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/routers/routers.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/comment.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/inventory.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/product.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/server/http.go rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo/eshop_gw}/internal/server/http_option.go (100%) create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/server/http_test.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/service/eshop_gw.go create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/binary-package.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile_build create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile_test create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-binary.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-docker.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-k8s.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build-local.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build2.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-push.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-rpc-test.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/patch-mono.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/patch.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/proto-doc.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/protoc.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/run-nohup.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/run.sh create mode 100644 6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/swag-docs.sh rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/go.mod (87%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/go.sum (95%) create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/.gitignore create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/.golangci.yml create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/Jenkinsfile create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/Makefile create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/close.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/createService.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/initApp.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/main.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/configs/inventory.yml create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/configs/inventory_cc.yml create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/configs/location.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/deploy.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/run.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/deployments/docker-compose/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/deployments/docker-compose/docker-compose.yml create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/eshop-namespace.yml create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-configmap.yml create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-deployment.yml create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-svc.yml create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/docs/gen.info create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory_cc.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory_test.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/internal/ecode/inventory_rpc.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/internal/ecode/systemCode_rpc.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc_option.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc_test.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/internal/service/inventory.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/internal/service/inventory_client_test.go rename {b_sponge-dtm-msg => 6_micro-cluster/example-2-mono-repo/inventory}/internal/service/service.go (100%) create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/internal/service/service_test.go create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/binary-package.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile_build create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile_test create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/build/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-binary.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-docker.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-k8s.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build-local.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build2.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/image-push.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/image-rpc-test.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/patch-mono.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/patch.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/proto-doc.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/protoc.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/run-nohup.sh create mode 100644 6_micro-cluster/example-2-mono-repo/inventory/scripts/run.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product.proto create mode 100644 6_micro-cluster/example-2-mono-repo/product/.gitignore create mode 100644 6_micro-cluster/example-2-mono-repo/product/.golangci.yml create mode 100644 6_micro-cluster/example-2-mono-repo/product/Jenkinsfile create mode 100644 6_micro-cluster/example-2-mono-repo/product/Makefile create mode 100644 6_micro-cluster/example-2-mono-repo/product/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/close.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/createService.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/initApp.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/cmd/product/main.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/configs/location.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/configs/product.yml create mode 100644 6_micro-cluster/example-2-mono-repo/product/configs/product_cc.yml create mode 100644 6_micro-cluster/example-2-mono-repo/product/deployments/binary/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/product/deployments/binary/deploy.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/deployments/binary/run.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/deployments/docker-compose/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/product/deployments/docker-compose/docker-compose.yml create mode 100644 6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/eshop-namespace.yml create mode 100644 6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-configmap.yml create mode 100644 6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-deployment.yml create mode 100644 6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-svc.yml create mode 100644 6_micro-cluster/example-2-mono-repo/product/docs/gen.info create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/config/product.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/config/product_cc.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/config/product_test.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/ecode/product_rpc.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/ecode/systemCode_rpc.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/server/grpc.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/server/grpc_option.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/server/grpc_test.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/service/product.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/service/product_client_test.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/service/service.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/internal/service/service_test.go create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/binary-package.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile_build create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile_test create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/build/README.md create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/deploy-binary.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/deploy-docker.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/deploy-k8s.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/image-build-local.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/image-build.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/image-build2.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/image-push.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/image-rpc-test.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/patch-mono.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/patch.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/proto-doc.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/protoc.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/run-nohup.sh create mode 100644 6_micro-cluster/example-2-mono-repo/product/scripts/run.sh rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/gogo/protobuf/gogoproto/gogo.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/BUILD.bazel (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/README.md (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/annotations.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/auth.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/backend.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/billing.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/client.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/config_change.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/consumer.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/context.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/control.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/distribution.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/documentation.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/endpoint.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/BUILD.bazel (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/cel.yaml (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1alpha1/BUILD.bazel (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1alpha1/checked.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1alpha1/conformance_service.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1alpha1/eval.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1alpha1/explain.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1alpha1/syntax.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1alpha1/value.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1beta1/BUILD.bazel (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1beta1/decl.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1beta1/eval.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1beta1/expr.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1beta1/source.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/expr/v1beta1/value.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/field_behavior.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/http.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/httpbody.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/label.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/launch_stage.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/log.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/logging.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/metric.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/monitored_resource.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/monitoring.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/quota.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/resource.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/service.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/serviceconfig.yaml (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/BUILD.bazel (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/README.md (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/v1/BUILD.bazel (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/v1/check_error.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/v1/distribution.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/v1/http_request.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/v1/log_entry.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/v1/metric_value.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/v1/operation.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/v1/quota_controller.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/v1/service_controller.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicecontrol/v1/servicecontrol.yaml (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicemanagement/BUILD.bazel (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicemanagement/README.md (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicemanagement/v1/BUILD.bazel (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicemanagement/v1/resources.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/servicemanagement/v1/servicemanager.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/source_info.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/system_parameter.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/api/usage.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/annotations.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/any.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/api.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/compiler/plugin.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/descriptor.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/duration.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/empty.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/field_mask.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/source_context.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/struct.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/timestamp.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/type.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/google/protobuf/wrappers.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/protoc-gen-openapiv2/options/annotations.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/protoc-gen-openapiv2/options/openapiv2.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/tagger/tagger.proto (100%) rename {a_micro-grpc-http-protobuf => 6_micro-cluster/example-2-mono-repo}/third_party/validate/validate.proto (100%) create mode 100644 _13_sponge-dtm-cache/README.md rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/.gitignore (95%) create mode 100644 _13_sponge-dtm-cache/grpc+http/.golangci.yml create mode 100644 _13_sponge-dtm-cache/grpc+http/Jenkinsfile rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/Makefile (62%) create mode 100644 _13_sponge-dtm-cache/grpc+http/README.md create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.pb.validate.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.proto create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic_grpc.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic_router.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.pb.validate.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.proto create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/callback_grpc.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/callback_router.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.pb.validate.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.proto create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade_grpc.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade_router.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/final.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/final.pb.validate.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/final.proto create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/final_grpc.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/final_router.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.pb.validate.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.proto create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/stock_grpc.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/stock_router.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.pb.validate.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.proto create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/strong_grpc.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/stock/v1/strong_router.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/types/types.pb.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/types/types.pb.validate.go create mode 100644 _13_sponge-dtm-cache/grpc+http/api/types/types.proto create mode 100644 _13_sponge-dtm-cache/grpc+http/cmd/stock/initial/close.go rename {a_micro-grpc-http-protobuf/cmd/user => _13_sponge-dtm-cache/grpc+http/cmd/stock}/initial/createService.go (98%) create mode 100644 _13_sponge-dtm-cache/grpc+http/cmd/stock/initial/initApp.go rename {a_micro-grpc-http-protobuf/cmd/user => _13_sponge-dtm-cache/grpc+http/cmd/stock}/main.go (91%) create mode 100644 _13_sponge-dtm-cache/grpc+http/configs/location.go create mode 100644 _13_sponge-dtm-cache/grpc+http/configs/stock.yml rename a_micro-grpc-http-protobuf/configs/user_cc.yml => _13_sponge-dtm-cache/grpc+http/configs/stock_cc.yml (92%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/deployments/binary/README.md (87%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/deployments/binary/deploy.sh (97%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/deployments/binary/run.sh (97%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/deployments/docker-compose/README.md (84%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/deployments/docker-compose/docker-compose.yml (82%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/deployments/kubernetes/README.md (84%) create mode 100644 _13_sponge-dtm-cache/grpc+http/deployments/kubernetes/eshop-namespace.yml rename a_micro-grpc-http-protobuf/deployments/kubernetes/user-configmap.yml => _13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-configmap.yml (98%) rename a_micro-grpc-http-protobuf/deployments/kubernetes/user-deployment.yml => _13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-deployment.yml (82%) rename a_micro-grpc-http-protobuf/deployments/kubernetes/user-svc.yml => _13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-svc.yml (57%) create mode 100644 _13_sponge-dtm-cache/grpc+http/docs/apis.go create mode 100644 _13_sponge-dtm-cache/grpc+http/docs/apis.swagger.json create mode 100644 _13_sponge-dtm-cache/grpc+http/docs/gen.info rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/go.mod (69%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/go.sum (83%) create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/cache/stock.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/cache/stock_test.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/config/stock.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/config/stock_cc.go rename a_micro-grpc-http-protobuf/internal/config/user_test.go => _13_sponge-dtm-cache/grpc+http/internal/config/stock_test.go (83%) create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/dao/stock.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/dao/stock_test.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/ecode/atomic_rpc.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/ecode/callback_rpc.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/ecode/downgrade_rpc.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/ecode/final_rpc.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/ecode/stock_rpc.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/ecode/strong_rpc.go rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/internal/ecode/systemCode_http.go (90%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/internal/ecode/systemCode_rpc.go (91%) create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/handler/atomic.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/handler/callback.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/handler/downgrade.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/handler/final.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/handler/stock.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/handler/strong.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/model/init.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/model/stock.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/routers/atomic_router.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/routers/callback_router.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/routers/downgrade_router.go rename a_micro-grpc-http-protobuf/internal/routers/user_router.go => _13_sponge-dtm-cache/grpc+http/internal/routers/final_router.go (66%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/internal/routers/routers.go (99%) create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/routers/stock_router.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/routers/strong_router.go rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/internal/rpcclient/dtmservice.go (95%) create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/rpcclient/endpointForDtm.go rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/internal/server/grpc.go (99%) create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/server/grpc_option.go rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/internal/server/http.go (98%) create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/server/http_option.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/atomic.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/atomic_client_test.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/callback.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/callback_client_test.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/downgrade.go rename a_micro-grpc-http-protobuf/internal/service/user_client_test.go => _13_sponge-dtm-cache/grpc+http/internal/service/downgrade_client_test.go (52%) create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/final.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/final_client_test.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/service.go rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/internal/service/service_test.go (76%) create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/stock.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/stock_client_test.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/strong.go create mode 100644 _13_sponge-dtm-cache/grpc+http/internal/service/strong_client_test.go create mode 100644 _13_sponge-dtm-cache/grpc+http/pkg/goredis/goredis.go rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/binary-package.sh (97%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/build/Dockerfile (65%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/build/Dockerfile_build (63%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/build/Dockerfile_test (58%) create mode 100644 _13_sponge-dtm-cache/grpc+http/scripts/build/README.md rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/deploy-binary.sh (96%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/deploy-docker.sh (84%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/deploy-k8s.sh (96%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/image-build-local.sh (97%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/image-build.sh (98%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/image-build2.sh (96%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/image-push.sh (98%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/image-rpc-test.sh (95%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/patch.sh (100%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/proto-doc.sh (100%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/protoc.sh (97%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/run-nohup.sh (96%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/run.sh (94%) rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/grpc+http}/scripts/swag-docs.sh (80%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/gogo/protobuf/gogoproto/gogo.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/BUILD.bazel (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/README.md (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/annotations.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/auth.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/backend.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/billing.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/client.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/config_change.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/consumer.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/context.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/control.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/distribution.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/documentation.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/endpoint.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/BUILD.bazel (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/cel.yaml (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1alpha1/BUILD.bazel (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1alpha1/checked.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1alpha1/conformance_service.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1alpha1/eval.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1alpha1/explain.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1alpha1/syntax.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1alpha1/value.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1beta1/BUILD.bazel (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1beta1/decl.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1beta1/eval.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1beta1/expr.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1beta1/source.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/expr/v1beta1/value.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/field_behavior.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/http.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/httpbody.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/label.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/launch_stage.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/log.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/logging.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/metric.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/monitored_resource.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/monitoring.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/quota.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/resource.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/service.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/serviceconfig.yaml (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/BUILD.bazel (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/README.md (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/v1/BUILD.bazel (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/v1/check_error.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/v1/distribution.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/v1/http_request.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/v1/log_entry.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/v1/metric_value.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/v1/operation.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/v1/quota_controller.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/v1/service_controller.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicecontrol/v1/servicecontrol.yaml (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicemanagement/BUILD.bazel (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicemanagement/README.md (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicemanagement/v1/BUILD.bazel (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicemanagement/v1/resources.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/servicemanagement/v1/servicemanager.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/source_info.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/system_parameter.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/api/usage.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/annotations.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/any.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/api.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/compiler/plugin.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/descriptor.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/duration.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/empty.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/field_mask.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/source_context.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/struct.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/timestamp.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/type.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/google/protobuf/wrappers.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/protoc-gen-openapiv2/options/annotations.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/protoc-gen-openapiv2/options/openapiv2.proto (86%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/tagger/tagger.proto (100%) rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/grpc+http}/third_party/validate/validate.proto (100%) create mode 100644 _13_sponge-dtm-cache/http/.gitignore create mode 100644 _13_sponge-dtm-cache/http/.golangci.yml create mode 100644 _13_sponge-dtm-cache/http/Jenkinsfile rename {a_micro-grpc-http-protobuf => _13_sponge-dtm-cache/http}/Makefile (51%) create mode 100644 _13_sponge-dtm-cache/http/README.md create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/atomic.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/atomic.pb.validate.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/atomic.proto create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/atomic_router.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/callback.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/callback.pb.validate.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/callback.proto create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/callback_router.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/downgrade.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/downgrade.pb.validate.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/downgrade.proto create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/downgrade_router.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/final.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/final.pb.validate.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/final.proto create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/final_router.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/stock.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/stock.pb.validate.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/stock.proto create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/stock_router.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/strong.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/strong.pb.validate.go create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/strong.proto create mode 100644 _13_sponge-dtm-cache/http/api/stock/v1/strong_router.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/types/types.pb.go create mode 100644 _13_sponge-dtm-cache/http/api/types/types.pb.validate.go create mode 100644 _13_sponge-dtm-cache/http/api/types/types.proto rename {a_micro-grpc-http-protobuf/cmd/user => _13_sponge-dtm-cache/http/cmd/stock}/initial/close.go (94%) create mode 100644 _13_sponge-dtm-cache/http/cmd/stock/initial/createService.go rename {a_micro-grpc-http-protobuf/cmd/user => _13_sponge-dtm-cache/http/cmd/stock}/initial/initApp.go (87%) create mode 100644 _13_sponge-dtm-cache/http/cmd/stock/main.go create mode 100644 _13_sponge-dtm-cache/http/configs/location.go create mode 100644 _13_sponge-dtm-cache/http/configs/stock.yml create mode 100644 _13_sponge-dtm-cache/http/configs/stock_cc.yml create mode 100644 _13_sponge-dtm-cache/http/deployments/binary/README.md create mode 100644 _13_sponge-dtm-cache/http/deployments/binary/deploy.sh create mode 100644 _13_sponge-dtm-cache/http/deployments/binary/run.sh create mode 100644 _13_sponge-dtm-cache/http/deployments/docker-compose/README.md create mode 100644 _13_sponge-dtm-cache/http/deployments/docker-compose/docker-compose.yml create mode 100644 _13_sponge-dtm-cache/http/deployments/kubernetes/README.md create mode 100644 _13_sponge-dtm-cache/http/deployments/kubernetes/eshop-namespace.yml create mode 100644 _13_sponge-dtm-cache/http/deployments/kubernetes/stock-configmap.yml create mode 100644 _13_sponge-dtm-cache/http/deployments/kubernetes/stock-deployment.yml create mode 100644 _13_sponge-dtm-cache/http/deployments/kubernetes/stock-svc.yml create mode 100644 _13_sponge-dtm-cache/http/docs/apis.go create mode 100644 _13_sponge-dtm-cache/http/docs/apis.swagger.json create mode 100644 _13_sponge-dtm-cache/http/docs/gen.info create mode 100644 _13_sponge-dtm-cache/http/go.mod create mode 100644 _13_sponge-dtm-cache/http/go.sum create mode 100644 _13_sponge-dtm-cache/http/internal/cache/stock.go create mode 100644 _13_sponge-dtm-cache/http/internal/cache/stock_test.go rename b_sponge-dtm-msg/internal/config/transfer.go => _13_sponge-dtm-cache/http/internal/config/stock.go (54%) create mode 100644 _13_sponge-dtm-cache/http/internal/config/stock_cc.go rename b_sponge-dtm-msg/internal/config/transfer_test.go => _13_sponge-dtm-cache/http/internal/config/stock_test.go (59%) create mode 100644 _13_sponge-dtm-cache/http/internal/dao/stock.go create mode 100644 _13_sponge-dtm-cache/http/internal/dao/stock_test.go create mode 100644 _13_sponge-dtm-cache/http/internal/ecode/atomic_http.go create mode 100644 _13_sponge-dtm-cache/http/internal/ecode/callback_http.go create mode 100644 _13_sponge-dtm-cache/http/internal/ecode/downgrade_http.go create mode 100644 _13_sponge-dtm-cache/http/internal/ecode/final_http.go create mode 100644 _13_sponge-dtm-cache/http/internal/ecode/stock_http.go create mode 100644 _13_sponge-dtm-cache/http/internal/ecode/strong_http.go create mode 100644 _13_sponge-dtm-cache/http/internal/ecode/systemCode_http.go create mode 100644 _13_sponge-dtm-cache/http/internal/handler/atomic.go create mode 100644 _13_sponge-dtm-cache/http/internal/handler/callback.go create mode 100644 _13_sponge-dtm-cache/http/internal/handler/downgrade.go create mode 100644 _13_sponge-dtm-cache/http/internal/handler/final.go create mode 100644 _13_sponge-dtm-cache/http/internal/handler/stock.go create mode 100644 _13_sponge-dtm-cache/http/internal/handler/stock_test.go create mode 100644 _13_sponge-dtm-cache/http/internal/handler/strong.go create mode 100644 _13_sponge-dtm-cache/http/internal/model/init.go create mode 100644 _13_sponge-dtm-cache/http/internal/model/stock.go create mode 100644 _13_sponge-dtm-cache/http/internal/routers/atomic_router.go create mode 100644 _13_sponge-dtm-cache/http/internal/routers/callback_router.go create mode 100644 _13_sponge-dtm-cache/http/internal/routers/downgrade_router.go create mode 100644 _13_sponge-dtm-cache/http/internal/routers/final_router.go create mode 100644 _13_sponge-dtm-cache/http/internal/routers/routers.go create mode 100644 _13_sponge-dtm-cache/http/internal/routers/stock_router.go create mode 100644 _13_sponge-dtm-cache/http/internal/routers/strong_router.go create mode 100644 _13_sponge-dtm-cache/http/internal/server/http.go create mode 100644 _13_sponge-dtm-cache/http/internal/server/http_option.go create mode 100644 _13_sponge-dtm-cache/http/internal/server/http_test.go create mode 100644 _13_sponge-dtm-cache/http/scripts/binary-package.sh create mode 100644 _13_sponge-dtm-cache/http/scripts/build/Dockerfile create mode 100644 _13_sponge-dtm-cache/http/scripts/build/Dockerfile_build create mode 100644 _13_sponge-dtm-cache/http/scripts/build/Dockerfile_test create mode 100644 _13_sponge-dtm-cache/http/scripts/build/README.md create mode 100644 _13_sponge-dtm-cache/http/scripts/deploy-binary.sh create mode 100644 _13_sponge-dtm-cache/http/scripts/deploy-docker.sh create mode 100644 _13_sponge-dtm-cache/http/scripts/deploy-k8s.sh create mode 100644 _13_sponge-dtm-cache/http/scripts/image-build-local.sh create mode 100644 _13_sponge-dtm-cache/http/scripts/image-build.sh create mode 100644 _13_sponge-dtm-cache/http/scripts/image-build2.sh create mode 100644 _13_sponge-dtm-cache/http/scripts/image-push.sh create mode 100644 _13_sponge-dtm-cache/http/scripts/image-rpc-test.sh create mode 100644 _13_sponge-dtm-cache/http/scripts/patch.sh create mode 100644 _13_sponge-dtm-cache/http/scripts/proto-doc.sh rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/http}/scripts/protoc.sh (82%) create mode 100644 _13_sponge-dtm-cache/http/scripts/run-nohup.sh rename {b_sponge-dtm-msg => _13_sponge-dtm-cache/http}/scripts/run.sh (93%) create mode 100644 _13_sponge-dtm-cache/http/scripts/swag-docs.sh create mode 100644 _13_sponge-dtm-cache/http/test/stock.sql create mode 100644 _13_sponge-dtm-cache/http/third_party/gogo/protobuf/gogoproto/gogo.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/BUILD.bazel create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/README.md create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/annotations.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/auth.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/backend.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/billing.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/client.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/config_change.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/consumer.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/context.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/control.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/distribution.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/documentation.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/endpoint.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/BUILD.bazel create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/cel.yaml create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/BUILD.bazel create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/checked.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/conformance_service.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/eval.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/explain.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/syntax.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/value.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/BUILD.bazel create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/decl.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/eval.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/expr.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/source.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/value.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/field_behavior.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/http.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/httpbody.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/label.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/launch_stage.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/log.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/logging.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/metric.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/monitored_resource.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/monitoring.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/quota.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/resource.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/service.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/serviceconfig.yaml create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/BUILD.bazel create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/README.md create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/BUILD.bazel create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/check_error.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/distribution.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/http_request.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/log_entry.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/metric_value.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/operation.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/quota_controller.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/service_controller.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/servicecontrol.yaml create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/BUILD.bazel create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/README.md create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/BUILD.bazel create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/resources.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanager.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/source_info.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/system_parameter.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/api/usage.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/annotations.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/any.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/api.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/compiler/plugin.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/descriptor.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/duration.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/empty.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/field_mask.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/source_context.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/struct.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/timestamp.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/type.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/google/protobuf/wrappers.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/protoc-gen-openapiv2/options/annotations.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/protoc-gen-openapiv2/options/openapiv2.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/tagger/tagger.proto create mode 100644 _13_sponge-dtm-cache/http/third_party/validate/validate.proto delete mode 100644 a_micro-grpc-http-protobuf/api/user/v1/user.pb.go delete mode 100644 a_micro-grpc-http-protobuf/api/user/v1/user.pb.validate.go delete mode 100644 a_micro-grpc-http-protobuf/api/user/v1/user.proto delete mode 100644 a_micro-grpc-http-protobuf/api/user/v1/user_grpc.pb.go delete mode 100644 a_micro-grpc-http-protobuf/api/user/v1/user_router.pb.go delete mode 100644 a_micro-grpc-http-protobuf/docs/apis.swagger.json delete mode 100644 a_micro-grpc-http-protobuf/docs/gen.info delete mode 100644 a_micro-grpc-http-protobuf/internal/ecode/user_rpc.go delete mode 100644 a_micro-grpc-http-protobuf/internal/handler/user.go delete mode 100644 a_micro-grpc-http-protobuf/internal/service/user.go create mode 100644 assets/cache-grpc-http-pb-test.png create mode 100644 assets/cache-http-pb-swagger.png delete mode 100644 b_sponge-dtm-msg/LICENSE delete mode 100644 b_sponge-dtm-msg/README.md delete mode 100644 b_sponge-dtm-msg/api/transfer/v1/transfer.pb.go delete mode 100644 b_sponge-dtm-msg/api/transfer/v1/transfer.pb.validate.go delete mode 100644 b_sponge-dtm-msg/api/transfer/v1/transfer.proto delete mode 100644 b_sponge-dtm-msg/api/transfer/v1/transfer_grpc.pb.go delete mode 100644 b_sponge-dtm-msg/cmd/transfer/initial/initApp.go delete mode 100644 b_sponge-dtm-msg/configs/transfer.yml delete mode 100644 b_sponge-dtm-msg/docs/gen.info delete mode 100644 b_sponge-dtm-msg/internal/ecode/transfer_rpc.go delete mode 100644 b_sponge-dtm-msg/internal/rpcclient/transfer.go delete mode 100644 b_sponge-dtm-msg/internal/server/grpc_option.go delete mode 100644 b_sponge-dtm-msg/internal/service/transfer.go delete mode 100644 b_sponge-dtm-msg/internal/service/transfer_client_test.go delete mode 100644 b_sponge-dtm-msg/readme-cn.md diff --git a/6_micro-cluster/README.md b/6_micro-cluster/README.md index e2deaa1..2c26f0e 100644 --- a/6_micro-cluster/README.md +++ b/6_micro-cluster/README.md @@ -1,11 +1,19 @@ +## Microservices Cluster Example -[**micro-cluster 中文说明**](https://juejin.cn/post/7230366377705685052) +Here is an example of quickly creating a microservices cluster using sponge based on protobuf files. It supports two directory structures: multi-repo and mono-repo for microservices. The difference between multi-repo and mono-repo can be seen in [Repository Type Introduction Document](https://go-sponge.com/repository-type). +- [**example-1-multi-repo**](https://github.com/zhufuyi/sponge_examples/tree/main/6_micro-cluster/example-1-multi-repo): microservice multi repository (multi-repo) example. In a multi-repo structure, multiple repositories are used, with each service completely decoupled and independent. Each service has its own `go.mod` file, as well as separate `api` and `third_party` directories. +- [**example-2-mono-repo**](https://github.com/zhufuyi/sponge_examples/tree/main/6_micro-cluster/example-2-mono-repo): microservice monolithic repository (mono-repo) example. In a mono-repo structure, there is only one repository, with a single `go.mod` file, and the `api` and `third_party` directories are shared as common directories among all services. + +The development and code generation processes are the same for both structures; the difference lies in the organization of the code. The mono-repo structure is generally recommended. + +

-The microservice cluster currently created by using sponge is a `multi-repo` type of microservice. Although multiple service codes are placed under a directory `6_micro-cluster`, the codes between different services cannot be reused, so the codes between services are completely independent. +Here is an example of quickly setting up a microservices cluster from scratch based on protobuf files. The setup steps are essentially the same for both `multi-repo` and `mono-repo` repository types, only the following two steps are different: -> Tip: Sponge also supports the creation of a microservice `mono-repo` type where code can be reused between different services, which is simpler. +- When generating code, the `mono-repo` repository type must select the monolithic repository option, while the `multi-repo` repository type does not require selecting this option. +- For the `multi-repo` repository type, if the protobuf files in the current service depend on protobuf files from other services, you need to copy the dependent protobuf files into the `api` directory of the current service. A `make copy-proto` command is provided for convenient copying. The `mono-repo` repository type does not require copying the dependent protobuf files.
diff --git a/6_micro-cluster/comment/.gitignore b/6_micro-cluster/example-1-multi-repo/comment/.gitignore similarity index 100% rename from 6_micro-cluster/comment/.gitignore rename to 6_micro-cluster/example-1-multi-repo/comment/.gitignore diff --git a/6_micro-cluster/comment/.golangci.yml b/6_micro-cluster/example-1-multi-repo/comment/.golangci.yml similarity index 100% rename from 6_micro-cluster/comment/.golangci.yml rename to 6_micro-cluster/example-1-multi-repo/comment/.golangci.yml diff --git a/6_micro-cluster/comment/Jenkinsfile b/6_micro-cluster/example-1-multi-repo/comment/Jenkinsfile similarity index 100% rename from 6_micro-cluster/comment/Jenkinsfile rename to 6_micro-cluster/example-1-multi-repo/comment/Jenkinsfile diff --git a/6_micro-cluster/comment/Makefile b/6_micro-cluster/example-1-multi-repo/comment/Makefile similarity index 100% rename from 6_micro-cluster/comment/Makefile rename to 6_micro-cluster/example-1-multi-repo/comment/Makefile diff --git a/6_micro-cluster/comment/README.md b/6_micro-cluster/example-1-multi-repo/comment/README.md similarity index 100% rename from 6_micro-cluster/comment/README.md rename to 6_micro-cluster/example-1-multi-repo/comment/README.md diff --git a/6_micro-cluster/comment/api/comment/v1/comment.pb.go b/6_micro-cluster/example-1-multi-repo/comment/api/comment/v1/comment.pb.go similarity index 100% rename from 6_micro-cluster/comment/api/comment/v1/comment.pb.go rename to 6_micro-cluster/example-1-multi-repo/comment/api/comment/v1/comment.pb.go diff --git a/6_micro-cluster/comment/api/comment/v1/comment.pb.validate.go b/6_micro-cluster/example-1-multi-repo/comment/api/comment/v1/comment.pb.validate.go similarity index 100% rename from 6_micro-cluster/comment/api/comment/v1/comment.pb.validate.go rename to 6_micro-cluster/example-1-multi-repo/comment/api/comment/v1/comment.pb.validate.go diff --git a/6_micro-cluster/comment/api/comment/v1/comment.proto b/6_micro-cluster/example-1-multi-repo/comment/api/comment/v1/comment.proto similarity index 100% rename from 6_micro-cluster/comment/api/comment/v1/comment.proto rename to 6_micro-cluster/example-1-multi-repo/comment/api/comment/v1/comment.proto diff --git a/6_micro-cluster/comment/api/comment/v1/comment_grpc.pb.go b/6_micro-cluster/example-1-multi-repo/comment/api/comment/v1/comment_grpc.pb.go similarity index 100% rename from 6_micro-cluster/comment/api/comment/v1/comment_grpc.pb.go rename to 6_micro-cluster/example-1-multi-repo/comment/api/comment/v1/comment_grpc.pb.go diff --git a/6_micro-cluster/comment/cmd/comment/initial/initApp.go b/6_micro-cluster/example-1-multi-repo/comment/cmd/comment/initial/initApp.go similarity index 100% rename from 6_micro-cluster/comment/cmd/comment/initial/initApp.go rename to 6_micro-cluster/example-1-multi-repo/comment/cmd/comment/initial/initApp.go diff --git a/6_micro-cluster/comment/cmd/comment/initial/registerClose.go b/6_micro-cluster/example-1-multi-repo/comment/cmd/comment/initial/registerClose.go similarity index 100% rename from 6_micro-cluster/comment/cmd/comment/initial/registerClose.go rename to 6_micro-cluster/example-1-multi-repo/comment/cmd/comment/initial/registerClose.go diff --git a/6_micro-cluster/comment/cmd/comment/initial/registerServer.go b/6_micro-cluster/example-1-multi-repo/comment/cmd/comment/initial/registerServer.go similarity index 100% rename from 6_micro-cluster/comment/cmd/comment/initial/registerServer.go rename to 6_micro-cluster/example-1-multi-repo/comment/cmd/comment/initial/registerServer.go diff --git a/6_micro-cluster/comment/cmd/comment/main.go b/6_micro-cluster/example-1-multi-repo/comment/cmd/comment/main.go similarity index 100% rename from 6_micro-cluster/comment/cmd/comment/main.go rename to 6_micro-cluster/example-1-multi-repo/comment/cmd/comment/main.go diff --git a/6_micro-cluster/comment/configs/comment.yml b/6_micro-cluster/example-1-multi-repo/comment/configs/comment.yml similarity index 100% rename from 6_micro-cluster/comment/configs/comment.yml rename to 6_micro-cluster/example-1-multi-repo/comment/configs/comment.yml diff --git a/6_micro-cluster/comment/configs/comment_cc.yml b/6_micro-cluster/example-1-multi-repo/comment/configs/comment_cc.yml similarity index 100% rename from 6_micro-cluster/comment/configs/comment_cc.yml rename to 6_micro-cluster/example-1-multi-repo/comment/configs/comment_cc.yml diff --git a/6_micro-cluster/comment/configs/location.go b/6_micro-cluster/example-1-multi-repo/comment/configs/location.go similarity index 100% rename from 6_micro-cluster/comment/configs/location.go rename to 6_micro-cluster/example-1-multi-repo/comment/configs/location.go diff --git a/6_micro-cluster/comment/deployments/binary/README.md b/6_micro-cluster/example-1-multi-repo/comment/deployments/binary/README.md similarity index 100% rename from 6_micro-cluster/comment/deployments/binary/README.md rename to 6_micro-cluster/example-1-multi-repo/comment/deployments/binary/README.md diff --git a/6_micro-cluster/comment/deployments/binary/deploy.sh b/6_micro-cluster/example-1-multi-repo/comment/deployments/binary/deploy.sh similarity index 100% rename from 6_micro-cluster/comment/deployments/binary/deploy.sh rename to 6_micro-cluster/example-1-multi-repo/comment/deployments/binary/deploy.sh diff --git a/6_micro-cluster/comment/deployments/binary/run.sh b/6_micro-cluster/example-1-multi-repo/comment/deployments/binary/run.sh similarity index 100% rename from 6_micro-cluster/comment/deployments/binary/run.sh rename to 6_micro-cluster/example-1-multi-repo/comment/deployments/binary/run.sh diff --git a/6_micro-cluster/comment/deployments/docker-compose/README.md b/6_micro-cluster/example-1-multi-repo/comment/deployments/docker-compose/README.md similarity index 100% rename from 6_micro-cluster/comment/deployments/docker-compose/README.md rename to 6_micro-cluster/example-1-multi-repo/comment/deployments/docker-compose/README.md diff --git a/6_micro-cluster/comment/deployments/docker-compose/docker-compose.yml b/6_micro-cluster/example-1-multi-repo/comment/deployments/docker-compose/docker-compose.yml similarity index 100% rename from 6_micro-cluster/comment/deployments/docker-compose/docker-compose.yml rename to 6_micro-cluster/example-1-multi-repo/comment/deployments/docker-compose/docker-compose.yml diff --git a/6_micro-cluster/comment/deployments/kubernetes/README.md b/6_micro-cluster/example-1-multi-repo/comment/deployments/kubernetes/README.md similarity index 100% rename from 6_micro-cluster/comment/deployments/kubernetes/README.md rename to 6_micro-cluster/example-1-multi-repo/comment/deployments/kubernetes/README.md diff --git a/6_micro-cluster/comment/deployments/kubernetes/comment-configmap.yml b/6_micro-cluster/example-1-multi-repo/comment/deployments/kubernetes/comment-configmap.yml similarity index 100% rename from 6_micro-cluster/comment/deployments/kubernetes/comment-configmap.yml rename to 6_micro-cluster/example-1-multi-repo/comment/deployments/kubernetes/comment-configmap.yml diff --git a/6_micro-cluster/comment/deployments/kubernetes/comment-deployment.yml b/6_micro-cluster/example-1-multi-repo/comment/deployments/kubernetes/comment-deployment.yml similarity index 100% rename from 6_micro-cluster/comment/deployments/kubernetes/comment-deployment.yml rename to 6_micro-cluster/example-1-multi-repo/comment/deployments/kubernetes/comment-deployment.yml diff --git a/6_micro-cluster/comment/deployments/kubernetes/comment-svc.yml b/6_micro-cluster/example-1-multi-repo/comment/deployments/kubernetes/comment-svc.yml similarity index 100% rename from 6_micro-cluster/comment/deployments/kubernetes/comment-svc.yml rename to 6_micro-cluster/example-1-multi-repo/comment/deployments/kubernetes/comment-svc.yml diff --git a/6_micro-cluster/comment/deployments/kubernetes/projectNameExample-namespace.yml b/6_micro-cluster/example-1-multi-repo/comment/deployments/kubernetes/projectNameExample-namespace.yml similarity index 100% rename from 6_micro-cluster/comment/deployments/kubernetes/projectNameExample-namespace.yml rename to 6_micro-cluster/example-1-multi-repo/comment/deployments/kubernetes/projectNameExample-namespace.yml diff --git a/6_micro-cluster/comment/docs/gen.info b/6_micro-cluster/example-1-multi-repo/comment/docs/gen.info similarity index 100% rename from 6_micro-cluster/comment/docs/gen.info rename to 6_micro-cluster/example-1-multi-repo/comment/docs/gen.info diff --git a/6_micro-cluster/comment/go.mod b/6_micro-cluster/example-1-multi-repo/comment/go.mod similarity index 100% rename from 6_micro-cluster/comment/go.mod rename to 6_micro-cluster/example-1-multi-repo/comment/go.mod diff --git a/6_micro-cluster/comment/go.sum b/6_micro-cluster/example-1-multi-repo/comment/go.sum similarity index 100% rename from 6_micro-cluster/comment/go.sum rename to 6_micro-cluster/example-1-multi-repo/comment/go.sum diff --git a/6_micro-cluster/comment/internal/config/comment.go b/6_micro-cluster/example-1-multi-repo/comment/internal/config/comment.go similarity index 100% rename from 6_micro-cluster/comment/internal/config/comment.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/config/comment.go diff --git a/6_micro-cluster/comment/internal/config/comment_cc.go b/6_micro-cluster/example-1-multi-repo/comment/internal/config/comment_cc.go similarity index 100% rename from 6_micro-cluster/comment/internal/config/comment_cc.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/config/comment_cc.go diff --git a/6_micro-cluster/comment/internal/config/comment_test.go b/6_micro-cluster/example-1-multi-repo/comment/internal/config/comment_test.go similarity index 100% rename from 6_micro-cluster/comment/internal/config/comment_test.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/config/comment_test.go diff --git a/6_micro-cluster/comment/internal/ecode/comment_rpc.go b/6_micro-cluster/example-1-multi-repo/comment/internal/ecode/comment_rpc.go similarity index 100% rename from 6_micro-cluster/comment/internal/ecode/comment_rpc.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/ecode/comment_rpc.go diff --git a/6_micro-cluster/comment/internal/ecode/systemCode_rpc.go b/6_micro-cluster/example-1-multi-repo/comment/internal/ecode/systemCode_rpc.go similarity index 100% rename from 6_micro-cluster/comment/internal/ecode/systemCode_rpc.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/ecode/systemCode_rpc.go diff --git a/6_micro-cluster/comment/internal/server/grpc.go b/6_micro-cluster/example-1-multi-repo/comment/internal/server/grpc.go similarity index 100% rename from 6_micro-cluster/comment/internal/server/grpc.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/server/grpc.go diff --git a/6_micro-cluster/comment/internal/server/grpc_option.go b/6_micro-cluster/example-1-multi-repo/comment/internal/server/grpc_option.go similarity index 100% rename from 6_micro-cluster/comment/internal/server/grpc_option.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/server/grpc_option.go diff --git a/6_micro-cluster/comment/internal/server/grpc_test.go b/6_micro-cluster/example-1-multi-repo/comment/internal/server/grpc_test.go similarity index 100% rename from 6_micro-cluster/comment/internal/server/grpc_test.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/server/grpc_test.go diff --git a/6_micro-cluster/comment/internal/service/comment.go b/6_micro-cluster/example-1-multi-repo/comment/internal/service/comment.go similarity index 100% rename from 6_micro-cluster/comment/internal/service/comment.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/service/comment.go diff --git a/6_micro-cluster/comment/internal/service/comment_client_test.go b/6_micro-cluster/example-1-multi-repo/comment/internal/service/comment_client_test.go similarity index 100% rename from 6_micro-cluster/comment/internal/service/comment_client_test.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/service/comment_client_test.go diff --git a/6_micro-cluster/comment/internal/service/service.go b/6_micro-cluster/example-1-multi-repo/comment/internal/service/service.go similarity index 100% rename from 6_micro-cluster/comment/internal/service/service.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/service/service.go diff --git a/6_micro-cluster/comment/internal/service/service_test.go b/6_micro-cluster/example-1-multi-repo/comment/internal/service/service_test.go similarity index 100% rename from 6_micro-cluster/comment/internal/service/service_test.go rename to 6_micro-cluster/example-1-multi-repo/comment/internal/service/service_test.go diff --git a/6_micro-cluster/comment/scripts/binary-package.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/binary-package.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/binary-package.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/binary-package.sh diff --git a/6_micro-cluster/comment/scripts/build/Dockerfile b/6_micro-cluster/example-1-multi-repo/comment/scripts/build/Dockerfile similarity index 100% rename from 6_micro-cluster/comment/scripts/build/Dockerfile rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/build/Dockerfile diff --git a/6_micro-cluster/comment/scripts/build/Dockerfile_build b/6_micro-cluster/example-1-multi-repo/comment/scripts/build/Dockerfile_build similarity index 100% rename from 6_micro-cluster/comment/scripts/build/Dockerfile_build rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/build/Dockerfile_build diff --git a/6_micro-cluster/comment/scripts/build/Dockerfile_test b/6_micro-cluster/example-1-multi-repo/comment/scripts/build/Dockerfile_test similarity index 100% rename from 6_micro-cluster/comment/scripts/build/Dockerfile_test rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/build/Dockerfile_test diff --git a/6_micro-cluster/comment/scripts/build/README.md b/6_micro-cluster/example-1-multi-repo/comment/scripts/build/README.md similarity index 100% rename from 6_micro-cluster/comment/scripts/build/README.md rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/build/README.md diff --git a/6_micro-cluster/comment/scripts/deploy-binary.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/deploy-binary.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/deploy-binary.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/deploy-binary.sh diff --git a/6_micro-cluster/comment/scripts/deploy-docker.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/deploy-docker.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/deploy-docker.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/deploy-docker.sh diff --git a/6_micro-cluster/comment/scripts/deploy-k8s.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/deploy-k8s.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/deploy-k8s.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/deploy-k8s.sh diff --git a/6_micro-cluster/comment/scripts/image-build-local.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/image-build-local.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/image-build-local.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/image-build-local.sh diff --git a/6_micro-cluster/comment/scripts/image-build.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/image-build.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/image-build.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/image-build.sh diff --git a/6_micro-cluster/comment/scripts/image-build2.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/image-build2.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/image-build2.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/image-build2.sh diff --git a/6_micro-cluster/comment/scripts/image-push.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/image-push.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/image-push.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/image-push.sh diff --git a/6_micro-cluster/comment/scripts/image-rpc-test.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/image-rpc-test.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/image-rpc-test.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/image-rpc-test.sh diff --git a/6_micro-cluster/comment/scripts/patch.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/patch.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/patch.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/patch.sh diff --git a/6_micro-cluster/comment/scripts/proto-doc.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/proto-doc.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/proto-doc.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/proto-doc.sh diff --git a/6_micro-cluster/comment/scripts/protoc.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/protoc.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/protoc.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/protoc.sh diff --git a/6_micro-cluster/comment/scripts/run-nohup.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/run-nohup.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/run-nohup.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/run-nohup.sh diff --git a/6_micro-cluster/comment/scripts/run.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/run.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/run.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/run.sh diff --git a/6_micro-cluster/comment/scripts/swag-docs.sh b/6_micro-cluster/example-1-multi-repo/comment/scripts/swag-docs.sh similarity index 100% rename from 6_micro-cluster/comment/scripts/swag-docs.sh rename to 6_micro-cluster/example-1-multi-repo/comment/scripts/swag-docs.sh diff --git a/6_micro-cluster/comment/third_party/gogo/protobuf/gogoproto/gogo.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/gogo/protobuf/gogoproto/gogo.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/gogo/protobuf/gogoproto/gogo.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/gogo/protobuf/gogoproto/gogo.proto diff --git a/6_micro-cluster/comment/third_party/google/api/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/BUILD.bazel similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/BUILD.bazel diff --git a/6_micro-cluster/comment/third_party/google/api/README.md b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/README.md similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/README.md rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/README.md diff --git a/6_micro-cluster/comment/third_party/google/api/annotations.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/annotations.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/annotations.proto diff --git a/6_micro-cluster/comment/third_party/google/api/auth.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/auth.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/auth.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/auth.proto diff --git a/6_micro-cluster/comment/third_party/google/api/backend.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/backend.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/backend.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/backend.proto diff --git a/6_micro-cluster/comment/third_party/google/api/billing.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/billing.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/billing.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/billing.proto diff --git a/6_micro-cluster/comment/third_party/google/api/client.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/client.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/client.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/client.proto diff --git a/6_micro-cluster/comment/third_party/google/api/config_change.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/config_change.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/config_change.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/config_change.proto diff --git a/6_micro-cluster/comment/third_party/google/api/consumer.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/consumer.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/consumer.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/consumer.proto diff --git a/6_micro-cluster/comment/third_party/google/api/context.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/context.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/context.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/context.proto diff --git a/6_micro-cluster/comment/third_party/google/api/control.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/control.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/control.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/control.proto diff --git a/6_micro-cluster/comment/third_party/google/api/distribution.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/distribution.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/distribution.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/distribution.proto diff --git a/6_micro-cluster/comment/third_party/google/api/documentation.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/documentation.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/documentation.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/documentation.proto diff --git a/6_micro-cluster/comment/third_party/google/api/endpoint.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/endpoint.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/endpoint.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/endpoint.proto diff --git a/6_micro-cluster/comment/third_party/google/api/expr/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/BUILD.bazel similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/BUILD.bazel diff --git a/6_micro-cluster/comment/third_party/google/api/expr/cel.yaml b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/cel.yaml similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/cel.yaml rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/cel.yaml diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/BUILD.bazel diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/checked.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/checked.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/checked.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/checked.proto diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/conformance_service.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/conformance_service.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/conformance_service.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/conformance_service.proto diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/eval.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/eval.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/eval.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/eval.proto diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/explain.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/explain.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/explain.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/explain.proto diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/syntax.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/syntax.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/syntax.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/syntax.proto diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/value.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/value.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1alpha1/value.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1alpha1/value.proto diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1beta1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1beta1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/BUILD.bazel diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1beta1/decl.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/decl.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1beta1/decl.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/decl.proto diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1beta1/eval.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/eval.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1beta1/eval.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/eval.proto diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1beta1/expr.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/expr.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1beta1/expr.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/expr.proto diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1beta1/source.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/source.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1beta1/source.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/source.proto diff --git a/6_micro-cluster/comment/third_party/google/api/expr/v1beta1/value.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/value.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/expr/v1beta1/value.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/expr/v1beta1/value.proto diff --git a/6_micro-cluster/comment/third_party/google/api/field_behavior.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/field_behavior.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/field_behavior.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/field_behavior.proto diff --git a/6_micro-cluster/comment/third_party/google/api/http.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/http.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/http.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/http.proto diff --git a/6_micro-cluster/comment/third_party/google/api/httpbody.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/httpbody.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/httpbody.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/httpbody.proto diff --git a/6_micro-cluster/comment/third_party/google/api/label.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/label.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/label.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/label.proto diff --git a/6_micro-cluster/comment/third_party/google/api/launch_stage.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/launch_stage.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/launch_stage.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/launch_stage.proto diff --git a/6_micro-cluster/comment/third_party/google/api/log.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/log.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/log.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/log.proto diff --git a/6_micro-cluster/comment/third_party/google/api/logging.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/logging.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/logging.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/logging.proto diff --git a/6_micro-cluster/comment/third_party/google/api/metric.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/metric.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/metric.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/metric.proto diff --git a/6_micro-cluster/comment/third_party/google/api/monitored_resource.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/monitored_resource.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/monitored_resource.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/monitored_resource.proto diff --git a/6_micro-cluster/comment/third_party/google/api/monitoring.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/monitoring.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/monitoring.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/monitoring.proto diff --git a/6_micro-cluster/comment/third_party/google/api/quota.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/quota.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/quota.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/quota.proto diff --git a/6_micro-cluster/comment/third_party/google/api/resource.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/resource.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/resource.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/resource.proto diff --git a/6_micro-cluster/comment/third_party/google/api/service.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/service.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/service.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/service.proto diff --git a/6_micro-cluster/comment/third_party/google/api/serviceconfig.yaml b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/serviceconfig.yaml similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/serviceconfig.yaml rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/serviceconfig.yaml diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/BUILD.bazel similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/BUILD.bazel diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/README.md b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/README.md similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/README.md rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/README.md diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/BUILD.bazel diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/check_error.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/check_error.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/check_error.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/check_error.proto diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/distribution.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/distribution.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/distribution.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/distribution.proto diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/http_request.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/http_request.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/http_request.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/http_request.proto diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/log_entry.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/log_entry.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/log_entry.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/log_entry.proto diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/metric_value.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/metric_value.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/metric_value.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/metric_value.proto diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/operation.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/operation.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/operation.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/operation.proto diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/quota_controller.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/quota_controller.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/quota_controller.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/quota_controller.proto diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/service_controller.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/service_controller.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/service_controller.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/service_controller.proto diff --git a/6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/servicecontrol.yaml b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/servicecontrol.yaml similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicecontrol/v1/servicecontrol.yaml rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicecontrol/v1/servicecontrol.yaml diff --git a/6_micro-cluster/comment/third_party/google/api/servicemanagement/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/BUILD.bazel similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicemanagement/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/BUILD.bazel diff --git a/6_micro-cluster/comment/third_party/google/api/servicemanagement/README.md b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/README.md similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicemanagement/README.md rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/README.md diff --git a/6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/BUILD.bazel diff --git a/6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/resources.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/resources.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/resources.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/resources.proto diff --git a/6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml diff --git a/6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml diff --git a/6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json diff --git a/6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml diff --git a/6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/servicemanager.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/servicemanager.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/servicemanagement/v1/servicemanager.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/servicemanagement/v1/servicemanager.proto diff --git a/6_micro-cluster/comment/third_party/google/api/source_info.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/source_info.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/source_info.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/source_info.proto diff --git a/6_micro-cluster/comment/third_party/google/api/system_parameter.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/system_parameter.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/system_parameter.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/system_parameter.proto diff --git a/6_micro-cluster/comment/third_party/google/api/usage.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/usage.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/api/usage.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/api/usage.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/annotations.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/annotations.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/annotations.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/any.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/any.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/any.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/any.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/api.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/api.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/api.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/api.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/compiler/plugin.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/compiler/plugin.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/compiler/plugin.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/compiler/plugin.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/descriptor.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/descriptor.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/descriptor.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/descriptor.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/duration.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/duration.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/duration.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/duration.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/empty.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/empty.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/empty.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/empty.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/field_mask.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/field_mask.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/field_mask.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/field_mask.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/source_context.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/source_context.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/source_context.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/source_context.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/struct.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/struct.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/struct.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/struct.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/timestamp.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/timestamp.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/timestamp.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/timestamp.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/type.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/type.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/type.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/type.proto diff --git a/6_micro-cluster/comment/third_party/google/protobuf/wrappers.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/wrappers.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/google/protobuf/wrappers.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/google/protobuf/wrappers.proto diff --git a/6_micro-cluster/comment/third_party/protoc-gen-openapiv2/options/annotations.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/protoc-gen-openapiv2/options/annotations.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/protoc-gen-openapiv2/options/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/protoc-gen-openapiv2/options/annotations.proto diff --git a/6_micro-cluster/comment/third_party/protoc-gen-openapiv2/options/openapiv2.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/protoc-gen-openapiv2/options/openapiv2.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/protoc-gen-openapiv2/options/openapiv2.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/protoc-gen-openapiv2/options/openapiv2.proto diff --git a/6_micro-cluster/comment/third_party/tagger/tagger.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/tagger/tagger.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/tagger/tagger.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/tagger/tagger.proto diff --git a/6_micro-cluster/comment/third_party/validate/validate.proto b/6_micro-cluster/example-1-multi-repo/comment/third_party/validate/validate.proto similarity index 100% rename from 6_micro-cluster/comment/third_party/validate/validate.proto rename to 6_micro-cluster/example-1-multi-repo/comment/third_party/validate/validate.proto diff --git a/6_micro-cluster/inventory/.gitignore b/6_micro-cluster/example-1-multi-repo/inventory/.gitignore similarity index 100% rename from 6_micro-cluster/inventory/.gitignore rename to 6_micro-cluster/example-1-multi-repo/inventory/.gitignore diff --git a/6_micro-cluster/inventory/.golangci.yml b/6_micro-cluster/example-1-multi-repo/inventory/.golangci.yml similarity index 100% rename from 6_micro-cluster/inventory/.golangci.yml rename to 6_micro-cluster/example-1-multi-repo/inventory/.golangci.yml diff --git a/6_micro-cluster/inventory/Jenkinsfile b/6_micro-cluster/example-1-multi-repo/inventory/Jenkinsfile similarity index 100% rename from 6_micro-cluster/inventory/Jenkinsfile rename to 6_micro-cluster/example-1-multi-repo/inventory/Jenkinsfile diff --git a/6_micro-cluster/inventory/Makefile b/6_micro-cluster/example-1-multi-repo/inventory/Makefile similarity index 100% rename from 6_micro-cluster/inventory/Makefile rename to 6_micro-cluster/example-1-multi-repo/inventory/Makefile diff --git a/6_micro-cluster/inventory/README.md b/6_micro-cluster/example-1-multi-repo/inventory/README.md similarity index 100% rename from 6_micro-cluster/inventory/README.md rename to 6_micro-cluster/example-1-multi-repo/inventory/README.md diff --git a/6_micro-cluster/inventory/api/inventory/v1/inventory.pb.go b/6_micro-cluster/example-1-multi-repo/inventory/api/inventory/v1/inventory.pb.go similarity index 100% rename from 6_micro-cluster/inventory/api/inventory/v1/inventory.pb.go rename to 6_micro-cluster/example-1-multi-repo/inventory/api/inventory/v1/inventory.pb.go diff --git a/6_micro-cluster/inventory/api/inventory/v1/inventory.pb.validate.go b/6_micro-cluster/example-1-multi-repo/inventory/api/inventory/v1/inventory.pb.validate.go similarity index 100% rename from 6_micro-cluster/inventory/api/inventory/v1/inventory.pb.validate.go rename to 6_micro-cluster/example-1-multi-repo/inventory/api/inventory/v1/inventory.pb.validate.go diff --git a/6_micro-cluster/inventory/api/inventory/v1/inventory.proto b/6_micro-cluster/example-1-multi-repo/inventory/api/inventory/v1/inventory.proto similarity index 100% rename from 6_micro-cluster/inventory/api/inventory/v1/inventory.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/api/inventory/v1/inventory.proto diff --git a/6_micro-cluster/inventory/api/inventory/v1/inventory_grpc.pb.go b/6_micro-cluster/example-1-multi-repo/inventory/api/inventory/v1/inventory_grpc.pb.go similarity index 100% rename from 6_micro-cluster/inventory/api/inventory/v1/inventory_grpc.pb.go rename to 6_micro-cluster/example-1-multi-repo/inventory/api/inventory/v1/inventory_grpc.pb.go diff --git a/6_micro-cluster/inventory/cmd/inventory/initial/initApp.go b/6_micro-cluster/example-1-multi-repo/inventory/cmd/inventory/initial/initApp.go similarity index 100% rename from 6_micro-cluster/inventory/cmd/inventory/initial/initApp.go rename to 6_micro-cluster/example-1-multi-repo/inventory/cmd/inventory/initial/initApp.go diff --git a/6_micro-cluster/inventory/cmd/inventory/initial/registerClose.go b/6_micro-cluster/example-1-multi-repo/inventory/cmd/inventory/initial/registerClose.go similarity index 100% rename from 6_micro-cluster/inventory/cmd/inventory/initial/registerClose.go rename to 6_micro-cluster/example-1-multi-repo/inventory/cmd/inventory/initial/registerClose.go diff --git a/6_micro-cluster/inventory/cmd/inventory/initial/registerServer.go b/6_micro-cluster/example-1-multi-repo/inventory/cmd/inventory/initial/registerServer.go similarity index 100% rename from 6_micro-cluster/inventory/cmd/inventory/initial/registerServer.go rename to 6_micro-cluster/example-1-multi-repo/inventory/cmd/inventory/initial/registerServer.go diff --git a/6_micro-cluster/inventory/cmd/inventory/main.go b/6_micro-cluster/example-1-multi-repo/inventory/cmd/inventory/main.go similarity index 100% rename from 6_micro-cluster/inventory/cmd/inventory/main.go rename to 6_micro-cluster/example-1-multi-repo/inventory/cmd/inventory/main.go diff --git a/6_micro-cluster/inventory/configs/inventory.yml b/6_micro-cluster/example-1-multi-repo/inventory/configs/inventory.yml similarity index 100% rename from 6_micro-cluster/inventory/configs/inventory.yml rename to 6_micro-cluster/example-1-multi-repo/inventory/configs/inventory.yml diff --git a/6_micro-cluster/inventory/configs/inventory_cc.yml b/6_micro-cluster/example-1-multi-repo/inventory/configs/inventory_cc.yml similarity index 100% rename from 6_micro-cluster/inventory/configs/inventory_cc.yml rename to 6_micro-cluster/example-1-multi-repo/inventory/configs/inventory_cc.yml diff --git a/6_micro-cluster/inventory/configs/location.go b/6_micro-cluster/example-1-multi-repo/inventory/configs/location.go similarity index 100% rename from 6_micro-cluster/inventory/configs/location.go rename to 6_micro-cluster/example-1-multi-repo/inventory/configs/location.go diff --git a/6_micro-cluster/inventory/deployments/binary/README.md b/6_micro-cluster/example-1-multi-repo/inventory/deployments/binary/README.md similarity index 100% rename from 6_micro-cluster/inventory/deployments/binary/README.md rename to 6_micro-cluster/example-1-multi-repo/inventory/deployments/binary/README.md diff --git a/6_micro-cluster/inventory/deployments/binary/deploy.sh b/6_micro-cluster/example-1-multi-repo/inventory/deployments/binary/deploy.sh similarity index 100% rename from 6_micro-cluster/inventory/deployments/binary/deploy.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/deployments/binary/deploy.sh diff --git a/6_micro-cluster/inventory/deployments/binary/run.sh b/6_micro-cluster/example-1-multi-repo/inventory/deployments/binary/run.sh similarity index 100% rename from 6_micro-cluster/inventory/deployments/binary/run.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/deployments/binary/run.sh diff --git a/6_micro-cluster/inventory/deployments/docker-compose/README.md b/6_micro-cluster/example-1-multi-repo/inventory/deployments/docker-compose/README.md similarity index 100% rename from 6_micro-cluster/inventory/deployments/docker-compose/README.md rename to 6_micro-cluster/example-1-multi-repo/inventory/deployments/docker-compose/README.md diff --git a/6_micro-cluster/inventory/deployments/docker-compose/docker-compose.yml b/6_micro-cluster/example-1-multi-repo/inventory/deployments/docker-compose/docker-compose.yml similarity index 100% rename from 6_micro-cluster/inventory/deployments/docker-compose/docker-compose.yml rename to 6_micro-cluster/example-1-multi-repo/inventory/deployments/docker-compose/docker-compose.yml diff --git a/6_micro-cluster/inventory/deployments/kubernetes/README.md b/6_micro-cluster/example-1-multi-repo/inventory/deployments/kubernetes/README.md similarity index 100% rename from 6_micro-cluster/inventory/deployments/kubernetes/README.md rename to 6_micro-cluster/example-1-multi-repo/inventory/deployments/kubernetes/README.md diff --git a/6_micro-cluster/inventory/deployments/kubernetes/inventory-configmap.yml b/6_micro-cluster/example-1-multi-repo/inventory/deployments/kubernetes/inventory-configmap.yml similarity index 100% rename from 6_micro-cluster/inventory/deployments/kubernetes/inventory-configmap.yml rename to 6_micro-cluster/example-1-multi-repo/inventory/deployments/kubernetes/inventory-configmap.yml diff --git a/6_micro-cluster/inventory/deployments/kubernetes/inventory-deployment.yml b/6_micro-cluster/example-1-multi-repo/inventory/deployments/kubernetes/inventory-deployment.yml similarity index 100% rename from 6_micro-cluster/inventory/deployments/kubernetes/inventory-deployment.yml rename to 6_micro-cluster/example-1-multi-repo/inventory/deployments/kubernetes/inventory-deployment.yml diff --git a/6_micro-cluster/inventory/deployments/kubernetes/inventory-svc.yml b/6_micro-cluster/example-1-multi-repo/inventory/deployments/kubernetes/inventory-svc.yml similarity index 100% rename from 6_micro-cluster/inventory/deployments/kubernetes/inventory-svc.yml rename to 6_micro-cluster/example-1-multi-repo/inventory/deployments/kubernetes/inventory-svc.yml diff --git a/6_micro-cluster/inventory/deployments/kubernetes/projectNameExample-namespace.yml b/6_micro-cluster/example-1-multi-repo/inventory/deployments/kubernetes/projectNameExample-namespace.yml similarity index 100% rename from 6_micro-cluster/inventory/deployments/kubernetes/projectNameExample-namespace.yml rename to 6_micro-cluster/example-1-multi-repo/inventory/deployments/kubernetes/projectNameExample-namespace.yml diff --git a/6_micro-cluster/inventory/docs/gen.info b/6_micro-cluster/example-1-multi-repo/inventory/docs/gen.info similarity index 100% rename from 6_micro-cluster/inventory/docs/gen.info rename to 6_micro-cluster/example-1-multi-repo/inventory/docs/gen.info diff --git a/6_micro-cluster/inventory/go.mod b/6_micro-cluster/example-1-multi-repo/inventory/go.mod similarity index 100% rename from 6_micro-cluster/inventory/go.mod rename to 6_micro-cluster/example-1-multi-repo/inventory/go.mod diff --git a/6_micro-cluster/inventory/go.sum b/6_micro-cluster/example-1-multi-repo/inventory/go.sum similarity index 100% rename from 6_micro-cluster/inventory/go.sum rename to 6_micro-cluster/example-1-multi-repo/inventory/go.sum diff --git a/6_micro-cluster/inventory/internal/config/inventory.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/config/inventory.go similarity index 100% rename from 6_micro-cluster/inventory/internal/config/inventory.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/config/inventory.go diff --git a/6_micro-cluster/inventory/internal/config/inventory_cc.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/config/inventory_cc.go similarity index 100% rename from 6_micro-cluster/inventory/internal/config/inventory_cc.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/config/inventory_cc.go diff --git a/6_micro-cluster/inventory/internal/config/inventory_test.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/config/inventory_test.go similarity index 100% rename from 6_micro-cluster/inventory/internal/config/inventory_test.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/config/inventory_test.go diff --git a/6_micro-cluster/inventory/internal/ecode/inventory_rpc.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/ecode/inventory_rpc.go similarity index 100% rename from 6_micro-cluster/inventory/internal/ecode/inventory_rpc.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/ecode/inventory_rpc.go diff --git a/6_micro-cluster/inventory/internal/ecode/systemCode_rpc.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/ecode/systemCode_rpc.go similarity index 100% rename from 6_micro-cluster/inventory/internal/ecode/systemCode_rpc.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/ecode/systemCode_rpc.go diff --git a/6_micro-cluster/inventory/internal/server/grpc.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/server/grpc.go similarity index 100% rename from 6_micro-cluster/inventory/internal/server/grpc.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/server/grpc.go diff --git a/6_micro-cluster/inventory/internal/server/grpc_option.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/server/grpc_option.go similarity index 100% rename from 6_micro-cluster/inventory/internal/server/grpc_option.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/server/grpc_option.go diff --git a/6_micro-cluster/inventory/internal/server/grpc_test.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/server/grpc_test.go similarity index 100% rename from 6_micro-cluster/inventory/internal/server/grpc_test.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/server/grpc_test.go diff --git a/6_micro-cluster/inventory/internal/service/inventory.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/service/inventory.go similarity index 100% rename from 6_micro-cluster/inventory/internal/service/inventory.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/service/inventory.go diff --git a/6_micro-cluster/inventory/internal/service/inventory_client_test.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/service/inventory_client_test.go similarity index 100% rename from 6_micro-cluster/inventory/internal/service/inventory_client_test.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/service/inventory_client_test.go diff --git a/6_micro-cluster/inventory/internal/service/service.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/service/service.go similarity index 100% rename from 6_micro-cluster/inventory/internal/service/service.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/service/service.go diff --git a/6_micro-cluster/inventory/internal/service/service_test.go b/6_micro-cluster/example-1-multi-repo/inventory/internal/service/service_test.go similarity index 100% rename from 6_micro-cluster/inventory/internal/service/service_test.go rename to 6_micro-cluster/example-1-multi-repo/inventory/internal/service/service_test.go diff --git a/6_micro-cluster/inventory/scripts/binary-package.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/binary-package.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/binary-package.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/binary-package.sh diff --git a/6_micro-cluster/inventory/scripts/build/Dockerfile b/6_micro-cluster/example-1-multi-repo/inventory/scripts/build/Dockerfile similarity index 100% rename from 6_micro-cluster/inventory/scripts/build/Dockerfile rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/build/Dockerfile diff --git a/6_micro-cluster/inventory/scripts/build/Dockerfile_build b/6_micro-cluster/example-1-multi-repo/inventory/scripts/build/Dockerfile_build similarity index 100% rename from 6_micro-cluster/inventory/scripts/build/Dockerfile_build rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/build/Dockerfile_build diff --git a/6_micro-cluster/inventory/scripts/build/Dockerfile_test b/6_micro-cluster/example-1-multi-repo/inventory/scripts/build/Dockerfile_test similarity index 100% rename from 6_micro-cluster/inventory/scripts/build/Dockerfile_test rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/build/Dockerfile_test diff --git a/6_micro-cluster/inventory/scripts/build/README.md b/6_micro-cluster/example-1-multi-repo/inventory/scripts/build/README.md similarity index 100% rename from 6_micro-cluster/inventory/scripts/build/README.md rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/build/README.md diff --git a/6_micro-cluster/inventory/scripts/deploy-binary.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/deploy-binary.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/deploy-binary.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/deploy-binary.sh diff --git a/6_micro-cluster/inventory/scripts/deploy-docker.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/deploy-docker.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/deploy-docker.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/deploy-docker.sh diff --git a/6_micro-cluster/inventory/scripts/deploy-k8s.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/deploy-k8s.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/deploy-k8s.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/deploy-k8s.sh diff --git a/6_micro-cluster/inventory/scripts/image-build-local.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/image-build-local.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/image-build-local.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/image-build-local.sh diff --git a/6_micro-cluster/inventory/scripts/image-build.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/image-build.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/image-build.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/image-build.sh diff --git a/6_micro-cluster/inventory/scripts/image-build2.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/image-build2.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/image-build2.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/image-build2.sh diff --git a/6_micro-cluster/inventory/scripts/image-push.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/image-push.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/image-push.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/image-push.sh diff --git a/6_micro-cluster/inventory/scripts/image-rpc-test.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/image-rpc-test.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/image-rpc-test.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/image-rpc-test.sh diff --git a/6_micro-cluster/inventory/scripts/patch.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/patch.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/patch.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/patch.sh diff --git a/6_micro-cluster/inventory/scripts/proto-doc.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/proto-doc.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/proto-doc.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/proto-doc.sh diff --git a/6_micro-cluster/inventory/scripts/protoc.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/protoc.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/protoc.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/protoc.sh diff --git a/6_micro-cluster/inventory/scripts/run-nohup.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/run-nohup.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/run-nohup.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/run-nohup.sh diff --git a/6_micro-cluster/inventory/scripts/run.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/run.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/run.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/run.sh diff --git a/6_micro-cluster/inventory/scripts/swag-docs.sh b/6_micro-cluster/example-1-multi-repo/inventory/scripts/swag-docs.sh similarity index 100% rename from 6_micro-cluster/inventory/scripts/swag-docs.sh rename to 6_micro-cluster/example-1-multi-repo/inventory/scripts/swag-docs.sh diff --git a/6_micro-cluster/inventory/third_party/gogo/protobuf/gogoproto/gogo.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/gogo/protobuf/gogoproto/gogo.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/gogo/protobuf/gogoproto/gogo.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/gogo/protobuf/gogoproto/gogo.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/BUILD.bazel similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/BUILD.bazel diff --git a/6_micro-cluster/inventory/third_party/google/api/README.md b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/README.md similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/README.md rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/README.md diff --git a/6_micro-cluster/inventory/third_party/google/api/annotations.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/annotations.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/annotations.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/auth.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/auth.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/auth.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/auth.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/backend.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/backend.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/backend.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/backend.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/billing.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/billing.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/billing.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/billing.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/client.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/client.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/client.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/client.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/config_change.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/config_change.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/config_change.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/config_change.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/consumer.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/consumer.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/consumer.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/consumer.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/context.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/context.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/context.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/context.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/control.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/control.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/control.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/control.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/distribution.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/distribution.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/distribution.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/distribution.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/documentation.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/documentation.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/documentation.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/documentation.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/endpoint.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/endpoint.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/endpoint.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/endpoint.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/BUILD.bazel similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/BUILD.bazel diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/cel.yaml b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/cel.yaml similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/cel.yaml rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/cel.yaml diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/BUILD.bazel diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/checked.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/checked.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/checked.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/checked.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/conformance_service.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/conformance_service.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/conformance_service.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/conformance_service.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/eval.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/eval.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/eval.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/eval.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/explain.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/explain.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/explain.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/explain.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/syntax.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/syntax.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/syntax.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/syntax.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/value.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/value.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1alpha1/value.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1alpha1/value.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/BUILD.bazel diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/decl.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/decl.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/decl.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/decl.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/eval.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/eval.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/eval.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/eval.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/expr.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/expr.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/expr.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/expr.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/source.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/source.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/source.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/source.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/value.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/value.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/expr/v1beta1/value.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/expr/v1beta1/value.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/field_behavior.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/field_behavior.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/field_behavior.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/field_behavior.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/http.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/http.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/http.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/http.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/httpbody.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/httpbody.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/httpbody.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/httpbody.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/label.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/label.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/label.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/label.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/launch_stage.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/launch_stage.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/launch_stage.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/launch_stage.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/log.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/log.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/log.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/log.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/logging.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/logging.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/logging.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/logging.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/metric.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/metric.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/metric.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/metric.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/monitored_resource.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/monitored_resource.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/monitored_resource.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/monitored_resource.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/monitoring.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/monitoring.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/monitoring.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/monitoring.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/quota.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/quota.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/quota.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/quota.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/resource.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/resource.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/resource.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/resource.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/service.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/service.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/service.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/service.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/serviceconfig.yaml b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/serviceconfig.yaml similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/serviceconfig.yaml rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/serviceconfig.yaml diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/BUILD.bazel similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/BUILD.bazel diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/README.md b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/README.md similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/README.md rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/README.md diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/BUILD.bazel diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/check_error.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/check_error.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/check_error.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/check_error.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/distribution.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/distribution.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/distribution.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/distribution.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/http_request.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/http_request.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/http_request.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/http_request.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/log_entry.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/log_entry.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/log_entry.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/log_entry.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/metric_value.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/metric_value.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/metric_value.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/metric_value.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/operation.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/operation.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/operation.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/operation.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/quota_controller.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/quota_controller.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/quota_controller.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/quota_controller.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/service_controller.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/service_controller.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/service_controller.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/service_controller.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/servicecontrol.yaml b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/servicecontrol.yaml similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicecontrol/v1/servicecontrol.yaml rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicecontrol/v1/servicecontrol.yaml diff --git a/6_micro-cluster/inventory/third_party/google/api/servicemanagement/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/BUILD.bazel similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicemanagement/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/BUILD.bazel diff --git a/6_micro-cluster/inventory/third_party/google/api/servicemanagement/README.md b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/README.md similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicemanagement/README.md rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/README.md diff --git a/6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/BUILD.bazel diff --git a/6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/resources.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/resources.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/resources.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/resources.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml diff --git a/6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml diff --git a/6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json diff --git a/6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml diff --git a/6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/servicemanager.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/servicemanager.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/servicemanagement/v1/servicemanager.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/servicemanagement/v1/servicemanager.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/source_info.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/source_info.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/source_info.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/source_info.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/system_parameter.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/system_parameter.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/system_parameter.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/system_parameter.proto diff --git a/6_micro-cluster/inventory/third_party/google/api/usage.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/usage.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/api/usage.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/api/usage.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/annotations.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/annotations.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/annotations.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/any.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/any.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/any.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/any.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/api.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/api.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/api.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/api.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/compiler/plugin.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/compiler/plugin.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/compiler/plugin.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/compiler/plugin.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/descriptor.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/descriptor.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/descriptor.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/descriptor.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/duration.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/duration.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/duration.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/duration.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/empty.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/empty.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/empty.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/empty.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/field_mask.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/field_mask.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/field_mask.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/field_mask.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/source_context.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/source_context.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/source_context.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/source_context.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/struct.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/struct.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/struct.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/struct.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/timestamp.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/timestamp.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/timestamp.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/timestamp.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/type.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/type.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/type.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/type.proto diff --git a/6_micro-cluster/inventory/third_party/google/protobuf/wrappers.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/wrappers.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/google/protobuf/wrappers.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/google/protobuf/wrappers.proto diff --git a/6_micro-cluster/inventory/third_party/protoc-gen-openapiv2/options/annotations.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/protoc-gen-openapiv2/options/annotations.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/protoc-gen-openapiv2/options/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/protoc-gen-openapiv2/options/annotations.proto diff --git a/6_micro-cluster/inventory/third_party/protoc-gen-openapiv2/options/openapiv2.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/protoc-gen-openapiv2/options/openapiv2.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/protoc-gen-openapiv2/options/openapiv2.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/protoc-gen-openapiv2/options/openapiv2.proto diff --git a/6_micro-cluster/inventory/third_party/tagger/tagger.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/tagger/tagger.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/tagger/tagger.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/tagger/tagger.proto diff --git a/6_micro-cluster/inventory/third_party/validate/validate.proto b/6_micro-cluster/example-1-multi-repo/inventory/third_party/validate/validate.proto similarity index 100% rename from 6_micro-cluster/inventory/third_party/validate/validate.proto rename to 6_micro-cluster/example-1-multi-repo/inventory/third_party/validate/validate.proto diff --git a/6_micro-cluster/product/.gitignore b/6_micro-cluster/example-1-multi-repo/product/.gitignore similarity index 100% rename from 6_micro-cluster/product/.gitignore rename to 6_micro-cluster/example-1-multi-repo/product/.gitignore diff --git a/6_micro-cluster/product/.golangci.yml b/6_micro-cluster/example-1-multi-repo/product/.golangci.yml similarity index 100% rename from 6_micro-cluster/product/.golangci.yml rename to 6_micro-cluster/example-1-multi-repo/product/.golangci.yml diff --git a/6_micro-cluster/product/Jenkinsfile b/6_micro-cluster/example-1-multi-repo/product/Jenkinsfile similarity index 100% rename from 6_micro-cluster/product/Jenkinsfile rename to 6_micro-cluster/example-1-multi-repo/product/Jenkinsfile diff --git a/6_micro-cluster/product/Makefile b/6_micro-cluster/example-1-multi-repo/product/Makefile similarity index 100% rename from 6_micro-cluster/product/Makefile rename to 6_micro-cluster/example-1-multi-repo/product/Makefile diff --git a/6_micro-cluster/product/README.md b/6_micro-cluster/example-1-multi-repo/product/README.md similarity index 100% rename from 6_micro-cluster/product/README.md rename to 6_micro-cluster/example-1-multi-repo/product/README.md diff --git a/6_micro-cluster/product/api/product/v1/product.pb.go b/6_micro-cluster/example-1-multi-repo/product/api/product/v1/product.pb.go similarity index 100% rename from 6_micro-cluster/product/api/product/v1/product.pb.go rename to 6_micro-cluster/example-1-multi-repo/product/api/product/v1/product.pb.go diff --git a/6_micro-cluster/product/api/product/v1/product.pb.validate.go b/6_micro-cluster/example-1-multi-repo/product/api/product/v1/product.pb.validate.go similarity index 100% rename from 6_micro-cluster/product/api/product/v1/product.pb.validate.go rename to 6_micro-cluster/example-1-multi-repo/product/api/product/v1/product.pb.validate.go diff --git a/6_micro-cluster/product/api/product/v1/product.proto b/6_micro-cluster/example-1-multi-repo/product/api/product/v1/product.proto similarity index 100% rename from 6_micro-cluster/product/api/product/v1/product.proto rename to 6_micro-cluster/example-1-multi-repo/product/api/product/v1/product.proto diff --git a/6_micro-cluster/product/api/product/v1/product_grpc.pb.go b/6_micro-cluster/example-1-multi-repo/product/api/product/v1/product_grpc.pb.go similarity index 100% rename from 6_micro-cluster/product/api/product/v1/product_grpc.pb.go rename to 6_micro-cluster/example-1-multi-repo/product/api/product/v1/product_grpc.pb.go diff --git a/6_micro-cluster/product/cmd/product/initial/initApp.go b/6_micro-cluster/example-1-multi-repo/product/cmd/product/initial/initApp.go similarity index 100% rename from 6_micro-cluster/product/cmd/product/initial/initApp.go rename to 6_micro-cluster/example-1-multi-repo/product/cmd/product/initial/initApp.go diff --git a/6_micro-cluster/product/cmd/product/initial/registerClose.go b/6_micro-cluster/example-1-multi-repo/product/cmd/product/initial/registerClose.go similarity index 100% rename from 6_micro-cluster/product/cmd/product/initial/registerClose.go rename to 6_micro-cluster/example-1-multi-repo/product/cmd/product/initial/registerClose.go diff --git a/6_micro-cluster/product/cmd/product/initial/registerServer.go b/6_micro-cluster/example-1-multi-repo/product/cmd/product/initial/registerServer.go similarity index 100% rename from 6_micro-cluster/product/cmd/product/initial/registerServer.go rename to 6_micro-cluster/example-1-multi-repo/product/cmd/product/initial/registerServer.go diff --git a/6_micro-cluster/product/cmd/product/main.go b/6_micro-cluster/example-1-multi-repo/product/cmd/product/main.go similarity index 100% rename from 6_micro-cluster/product/cmd/product/main.go rename to 6_micro-cluster/example-1-multi-repo/product/cmd/product/main.go diff --git a/6_micro-cluster/product/configs/location.go b/6_micro-cluster/example-1-multi-repo/product/configs/location.go similarity index 100% rename from 6_micro-cluster/product/configs/location.go rename to 6_micro-cluster/example-1-multi-repo/product/configs/location.go diff --git a/6_micro-cluster/product/configs/product.yml b/6_micro-cluster/example-1-multi-repo/product/configs/product.yml similarity index 100% rename from 6_micro-cluster/product/configs/product.yml rename to 6_micro-cluster/example-1-multi-repo/product/configs/product.yml diff --git a/6_micro-cluster/product/configs/product_cc.yml b/6_micro-cluster/example-1-multi-repo/product/configs/product_cc.yml similarity index 100% rename from 6_micro-cluster/product/configs/product_cc.yml rename to 6_micro-cluster/example-1-multi-repo/product/configs/product_cc.yml diff --git a/6_micro-cluster/product/deployments/binary/README.md b/6_micro-cluster/example-1-multi-repo/product/deployments/binary/README.md similarity index 100% rename from 6_micro-cluster/product/deployments/binary/README.md rename to 6_micro-cluster/example-1-multi-repo/product/deployments/binary/README.md diff --git a/6_micro-cluster/product/deployments/binary/deploy.sh b/6_micro-cluster/example-1-multi-repo/product/deployments/binary/deploy.sh similarity index 100% rename from 6_micro-cluster/product/deployments/binary/deploy.sh rename to 6_micro-cluster/example-1-multi-repo/product/deployments/binary/deploy.sh diff --git a/6_micro-cluster/product/deployments/binary/run.sh b/6_micro-cluster/example-1-multi-repo/product/deployments/binary/run.sh similarity index 100% rename from 6_micro-cluster/product/deployments/binary/run.sh rename to 6_micro-cluster/example-1-multi-repo/product/deployments/binary/run.sh diff --git a/6_micro-cluster/product/deployments/docker-compose/README.md b/6_micro-cluster/example-1-multi-repo/product/deployments/docker-compose/README.md similarity index 100% rename from 6_micro-cluster/product/deployments/docker-compose/README.md rename to 6_micro-cluster/example-1-multi-repo/product/deployments/docker-compose/README.md diff --git a/6_micro-cluster/product/deployments/docker-compose/docker-compose.yml b/6_micro-cluster/example-1-multi-repo/product/deployments/docker-compose/docker-compose.yml similarity index 100% rename from 6_micro-cluster/product/deployments/docker-compose/docker-compose.yml rename to 6_micro-cluster/example-1-multi-repo/product/deployments/docker-compose/docker-compose.yml diff --git a/6_micro-cluster/product/deployments/kubernetes/README.md b/6_micro-cluster/example-1-multi-repo/product/deployments/kubernetes/README.md similarity index 100% rename from 6_micro-cluster/product/deployments/kubernetes/README.md rename to 6_micro-cluster/example-1-multi-repo/product/deployments/kubernetes/README.md diff --git a/6_micro-cluster/product/deployments/kubernetes/product-configmap.yml b/6_micro-cluster/example-1-multi-repo/product/deployments/kubernetes/product-configmap.yml similarity index 100% rename from 6_micro-cluster/product/deployments/kubernetes/product-configmap.yml rename to 6_micro-cluster/example-1-multi-repo/product/deployments/kubernetes/product-configmap.yml diff --git a/6_micro-cluster/product/deployments/kubernetes/product-deployment.yml b/6_micro-cluster/example-1-multi-repo/product/deployments/kubernetes/product-deployment.yml similarity index 100% rename from 6_micro-cluster/product/deployments/kubernetes/product-deployment.yml rename to 6_micro-cluster/example-1-multi-repo/product/deployments/kubernetes/product-deployment.yml diff --git a/6_micro-cluster/product/deployments/kubernetes/product-svc.yml b/6_micro-cluster/example-1-multi-repo/product/deployments/kubernetes/product-svc.yml similarity index 100% rename from 6_micro-cluster/product/deployments/kubernetes/product-svc.yml rename to 6_micro-cluster/example-1-multi-repo/product/deployments/kubernetes/product-svc.yml diff --git a/6_micro-cluster/product/deployments/kubernetes/projectNameExample-namespace.yml b/6_micro-cluster/example-1-multi-repo/product/deployments/kubernetes/projectNameExample-namespace.yml similarity index 100% rename from 6_micro-cluster/product/deployments/kubernetes/projectNameExample-namespace.yml rename to 6_micro-cluster/example-1-multi-repo/product/deployments/kubernetes/projectNameExample-namespace.yml diff --git a/6_micro-cluster/product/docs/gen.info b/6_micro-cluster/example-1-multi-repo/product/docs/gen.info similarity index 100% rename from 6_micro-cluster/product/docs/gen.info rename to 6_micro-cluster/example-1-multi-repo/product/docs/gen.info diff --git a/6_micro-cluster/product/go.mod b/6_micro-cluster/example-1-multi-repo/product/go.mod similarity index 100% rename from 6_micro-cluster/product/go.mod rename to 6_micro-cluster/example-1-multi-repo/product/go.mod diff --git a/6_micro-cluster/product/go.sum b/6_micro-cluster/example-1-multi-repo/product/go.sum similarity index 100% rename from 6_micro-cluster/product/go.sum rename to 6_micro-cluster/example-1-multi-repo/product/go.sum diff --git a/6_micro-cluster/product/internal/config/product.go b/6_micro-cluster/example-1-multi-repo/product/internal/config/product.go similarity index 100% rename from 6_micro-cluster/product/internal/config/product.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/config/product.go diff --git a/6_micro-cluster/product/internal/config/product_cc.go b/6_micro-cluster/example-1-multi-repo/product/internal/config/product_cc.go similarity index 100% rename from 6_micro-cluster/product/internal/config/product_cc.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/config/product_cc.go diff --git a/6_micro-cluster/product/internal/config/product_test.go b/6_micro-cluster/example-1-multi-repo/product/internal/config/product_test.go similarity index 100% rename from 6_micro-cluster/product/internal/config/product_test.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/config/product_test.go diff --git a/6_micro-cluster/product/internal/ecode/product_rpc.go b/6_micro-cluster/example-1-multi-repo/product/internal/ecode/product_rpc.go similarity index 100% rename from 6_micro-cluster/product/internal/ecode/product_rpc.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/ecode/product_rpc.go diff --git a/6_micro-cluster/product/internal/ecode/systemCode_rpc.go b/6_micro-cluster/example-1-multi-repo/product/internal/ecode/systemCode_rpc.go similarity index 100% rename from 6_micro-cluster/product/internal/ecode/systemCode_rpc.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/ecode/systemCode_rpc.go diff --git a/6_micro-cluster/product/internal/server/grpc.go b/6_micro-cluster/example-1-multi-repo/product/internal/server/grpc.go similarity index 100% rename from 6_micro-cluster/product/internal/server/grpc.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/server/grpc.go diff --git a/6_micro-cluster/product/internal/server/grpc_option.go b/6_micro-cluster/example-1-multi-repo/product/internal/server/grpc_option.go similarity index 100% rename from 6_micro-cluster/product/internal/server/grpc_option.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/server/grpc_option.go diff --git a/6_micro-cluster/product/internal/server/grpc_test.go b/6_micro-cluster/example-1-multi-repo/product/internal/server/grpc_test.go similarity index 100% rename from 6_micro-cluster/product/internal/server/grpc_test.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/server/grpc_test.go diff --git a/6_micro-cluster/product/internal/service/product.go b/6_micro-cluster/example-1-multi-repo/product/internal/service/product.go similarity index 100% rename from 6_micro-cluster/product/internal/service/product.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/service/product.go diff --git a/6_micro-cluster/product/internal/service/product_client_test.go b/6_micro-cluster/example-1-multi-repo/product/internal/service/product_client_test.go similarity index 100% rename from 6_micro-cluster/product/internal/service/product_client_test.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/service/product_client_test.go diff --git a/6_micro-cluster/product/internal/service/service.go b/6_micro-cluster/example-1-multi-repo/product/internal/service/service.go similarity index 100% rename from 6_micro-cluster/product/internal/service/service.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/service/service.go diff --git a/6_micro-cluster/product/internal/service/service_test.go b/6_micro-cluster/example-1-multi-repo/product/internal/service/service_test.go similarity index 100% rename from 6_micro-cluster/product/internal/service/service_test.go rename to 6_micro-cluster/example-1-multi-repo/product/internal/service/service_test.go diff --git a/6_micro-cluster/product/scripts/binary-package.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/binary-package.sh similarity index 100% rename from 6_micro-cluster/product/scripts/binary-package.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/binary-package.sh diff --git a/6_micro-cluster/product/scripts/build/Dockerfile b/6_micro-cluster/example-1-multi-repo/product/scripts/build/Dockerfile similarity index 100% rename from 6_micro-cluster/product/scripts/build/Dockerfile rename to 6_micro-cluster/example-1-multi-repo/product/scripts/build/Dockerfile diff --git a/6_micro-cluster/product/scripts/build/Dockerfile_build b/6_micro-cluster/example-1-multi-repo/product/scripts/build/Dockerfile_build similarity index 100% rename from 6_micro-cluster/product/scripts/build/Dockerfile_build rename to 6_micro-cluster/example-1-multi-repo/product/scripts/build/Dockerfile_build diff --git a/6_micro-cluster/product/scripts/build/Dockerfile_test b/6_micro-cluster/example-1-multi-repo/product/scripts/build/Dockerfile_test similarity index 100% rename from 6_micro-cluster/product/scripts/build/Dockerfile_test rename to 6_micro-cluster/example-1-multi-repo/product/scripts/build/Dockerfile_test diff --git a/6_micro-cluster/product/scripts/build/README.md b/6_micro-cluster/example-1-multi-repo/product/scripts/build/README.md similarity index 100% rename from 6_micro-cluster/product/scripts/build/README.md rename to 6_micro-cluster/example-1-multi-repo/product/scripts/build/README.md diff --git a/6_micro-cluster/product/scripts/deploy-binary.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/deploy-binary.sh similarity index 100% rename from 6_micro-cluster/product/scripts/deploy-binary.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/deploy-binary.sh diff --git a/6_micro-cluster/product/scripts/deploy-docker.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/deploy-docker.sh similarity index 100% rename from 6_micro-cluster/product/scripts/deploy-docker.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/deploy-docker.sh diff --git a/6_micro-cluster/product/scripts/deploy-k8s.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/deploy-k8s.sh similarity index 100% rename from 6_micro-cluster/product/scripts/deploy-k8s.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/deploy-k8s.sh diff --git a/6_micro-cluster/product/scripts/image-build-local.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/image-build-local.sh similarity index 100% rename from 6_micro-cluster/product/scripts/image-build-local.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/image-build-local.sh diff --git a/6_micro-cluster/product/scripts/image-build.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/image-build.sh similarity index 100% rename from 6_micro-cluster/product/scripts/image-build.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/image-build.sh diff --git a/6_micro-cluster/product/scripts/image-build2.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/image-build2.sh similarity index 100% rename from 6_micro-cluster/product/scripts/image-build2.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/image-build2.sh diff --git a/6_micro-cluster/product/scripts/image-push.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/image-push.sh similarity index 100% rename from 6_micro-cluster/product/scripts/image-push.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/image-push.sh diff --git a/6_micro-cluster/product/scripts/image-rpc-test.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/image-rpc-test.sh similarity index 100% rename from 6_micro-cluster/product/scripts/image-rpc-test.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/image-rpc-test.sh diff --git a/6_micro-cluster/product/scripts/patch.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/patch.sh similarity index 100% rename from 6_micro-cluster/product/scripts/patch.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/patch.sh diff --git a/6_micro-cluster/product/scripts/proto-doc.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/proto-doc.sh similarity index 100% rename from 6_micro-cluster/product/scripts/proto-doc.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/proto-doc.sh diff --git a/6_micro-cluster/product/scripts/protoc.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/protoc.sh similarity index 100% rename from 6_micro-cluster/product/scripts/protoc.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/protoc.sh diff --git a/6_micro-cluster/product/scripts/run-nohup.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/run-nohup.sh similarity index 100% rename from 6_micro-cluster/product/scripts/run-nohup.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/run-nohup.sh diff --git a/6_micro-cluster/product/scripts/run.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/run.sh similarity index 100% rename from 6_micro-cluster/product/scripts/run.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/run.sh diff --git a/6_micro-cluster/product/scripts/swag-docs.sh b/6_micro-cluster/example-1-multi-repo/product/scripts/swag-docs.sh similarity index 100% rename from 6_micro-cluster/product/scripts/swag-docs.sh rename to 6_micro-cluster/example-1-multi-repo/product/scripts/swag-docs.sh diff --git a/6_micro-cluster/product/third_party/gogo/protobuf/gogoproto/gogo.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/gogo/protobuf/gogoproto/gogo.proto similarity index 100% rename from 6_micro-cluster/product/third_party/gogo/protobuf/gogoproto/gogo.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/gogo/protobuf/gogoproto/gogo.proto diff --git a/6_micro-cluster/product/third_party/google/api/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/BUILD.bazel similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/BUILD.bazel diff --git a/6_micro-cluster/product/third_party/google/api/README.md b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/README.md similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/README.md rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/README.md diff --git a/6_micro-cluster/product/third_party/google/api/annotations.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/annotations.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/annotations.proto diff --git a/6_micro-cluster/product/third_party/google/api/auth.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/auth.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/auth.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/auth.proto diff --git a/6_micro-cluster/product/third_party/google/api/backend.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/backend.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/backend.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/backend.proto diff --git a/6_micro-cluster/product/third_party/google/api/billing.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/billing.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/billing.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/billing.proto diff --git a/6_micro-cluster/product/third_party/google/api/client.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/client.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/client.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/client.proto diff --git a/6_micro-cluster/product/third_party/google/api/config_change.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/config_change.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/config_change.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/config_change.proto diff --git a/6_micro-cluster/product/third_party/google/api/consumer.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/consumer.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/consumer.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/consumer.proto diff --git a/6_micro-cluster/product/third_party/google/api/context.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/context.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/context.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/context.proto diff --git a/6_micro-cluster/product/third_party/google/api/control.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/control.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/control.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/control.proto diff --git a/6_micro-cluster/product/third_party/google/api/distribution.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/distribution.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/distribution.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/distribution.proto diff --git a/6_micro-cluster/product/third_party/google/api/documentation.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/documentation.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/documentation.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/documentation.proto diff --git a/6_micro-cluster/product/third_party/google/api/endpoint.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/endpoint.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/endpoint.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/endpoint.proto diff --git a/6_micro-cluster/product/third_party/google/api/expr/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/BUILD.bazel similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/BUILD.bazel diff --git a/6_micro-cluster/product/third_party/google/api/expr/cel.yaml b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/cel.yaml similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/cel.yaml rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/cel.yaml diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1alpha1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1alpha1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/BUILD.bazel diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1alpha1/checked.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/checked.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1alpha1/checked.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/checked.proto diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1alpha1/conformance_service.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/conformance_service.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1alpha1/conformance_service.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/conformance_service.proto diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1alpha1/eval.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/eval.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1alpha1/eval.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/eval.proto diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1alpha1/explain.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/explain.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1alpha1/explain.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/explain.proto diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1alpha1/syntax.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/syntax.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1alpha1/syntax.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/syntax.proto diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1alpha1/value.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/value.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1alpha1/value.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1alpha1/value.proto diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1beta1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1beta1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/BUILD.bazel diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1beta1/decl.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/decl.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1beta1/decl.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/decl.proto diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1beta1/eval.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/eval.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1beta1/eval.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/eval.proto diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1beta1/expr.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/expr.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1beta1/expr.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/expr.proto diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1beta1/source.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/source.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1beta1/source.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/source.proto diff --git a/6_micro-cluster/product/third_party/google/api/expr/v1beta1/value.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/value.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/expr/v1beta1/value.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/expr/v1beta1/value.proto diff --git a/6_micro-cluster/product/third_party/google/api/field_behavior.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/field_behavior.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/field_behavior.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/field_behavior.proto diff --git a/6_micro-cluster/product/third_party/google/api/http.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/http.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/http.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/http.proto diff --git a/6_micro-cluster/product/third_party/google/api/httpbody.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/httpbody.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/httpbody.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/httpbody.proto diff --git a/6_micro-cluster/product/third_party/google/api/label.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/label.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/label.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/label.proto diff --git a/6_micro-cluster/product/third_party/google/api/launch_stage.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/launch_stage.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/launch_stage.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/launch_stage.proto diff --git a/6_micro-cluster/product/third_party/google/api/log.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/log.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/log.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/log.proto diff --git a/6_micro-cluster/product/third_party/google/api/logging.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/logging.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/logging.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/logging.proto diff --git a/6_micro-cluster/product/third_party/google/api/metric.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/metric.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/metric.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/metric.proto diff --git a/6_micro-cluster/product/third_party/google/api/monitored_resource.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/monitored_resource.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/monitored_resource.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/monitored_resource.proto diff --git a/6_micro-cluster/product/third_party/google/api/monitoring.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/monitoring.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/monitoring.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/monitoring.proto diff --git a/6_micro-cluster/product/third_party/google/api/quota.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/quota.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/quota.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/quota.proto diff --git a/6_micro-cluster/product/third_party/google/api/resource.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/resource.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/resource.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/resource.proto diff --git a/6_micro-cluster/product/third_party/google/api/service.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/service.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/service.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/service.proto diff --git a/6_micro-cluster/product/third_party/google/api/serviceconfig.yaml b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/serviceconfig.yaml similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/serviceconfig.yaml rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/serviceconfig.yaml diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/BUILD.bazel similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/BUILD.bazel diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/README.md b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/README.md similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/README.md rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/README.md diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/v1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/v1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/BUILD.bazel diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/v1/check_error.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/check_error.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/v1/check_error.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/check_error.proto diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/v1/distribution.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/distribution.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/v1/distribution.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/distribution.proto diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/v1/http_request.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/http_request.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/v1/http_request.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/http_request.proto diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/v1/log_entry.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/log_entry.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/v1/log_entry.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/log_entry.proto diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/v1/metric_value.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/metric_value.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/v1/metric_value.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/metric_value.proto diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/v1/operation.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/operation.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/v1/operation.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/operation.proto diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/v1/quota_controller.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/quota_controller.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/v1/quota_controller.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/quota_controller.proto diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/v1/service_controller.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/service_controller.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/v1/service_controller.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/service_controller.proto diff --git a/6_micro-cluster/product/third_party/google/api/servicecontrol/v1/servicecontrol.yaml b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/servicecontrol.yaml similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicecontrol/v1/servicecontrol.yaml rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicecontrol/v1/servicecontrol.yaml diff --git a/6_micro-cluster/product/third_party/google/api/servicemanagement/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/BUILD.bazel similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicemanagement/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/BUILD.bazel diff --git a/6_micro-cluster/product/third_party/google/api/servicemanagement/README.md b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/README.md similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicemanagement/README.md rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/README.md diff --git a/6_micro-cluster/product/third_party/google/api/servicemanagement/v1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicemanagement/v1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/BUILD.bazel diff --git a/6_micro-cluster/product/third_party/google/api/servicemanagement/v1/resources.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/resources.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicemanagement/v1/resources.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/resources.proto diff --git a/6_micro-cluster/product/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml diff --git a/6_micro-cluster/product/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml diff --git a/6_micro-cluster/product/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json diff --git a/6_micro-cluster/product/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml diff --git a/6_micro-cluster/product/third_party/google/api/servicemanagement/v1/servicemanager.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/servicemanager.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/servicemanagement/v1/servicemanager.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/servicemanagement/v1/servicemanager.proto diff --git a/6_micro-cluster/product/third_party/google/api/source_info.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/source_info.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/source_info.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/source_info.proto diff --git a/6_micro-cluster/product/third_party/google/api/system_parameter.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/system_parameter.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/system_parameter.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/system_parameter.proto diff --git a/6_micro-cluster/product/third_party/google/api/usage.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/api/usage.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/api/usage.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/api/usage.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/annotations.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/annotations.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/annotations.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/any.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/any.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/any.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/any.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/api.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/api.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/api.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/api.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/compiler/plugin.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/compiler/plugin.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/compiler/plugin.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/compiler/plugin.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/descriptor.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/descriptor.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/descriptor.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/descriptor.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/duration.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/duration.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/duration.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/duration.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/empty.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/empty.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/empty.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/empty.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/field_mask.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/field_mask.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/field_mask.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/field_mask.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/source_context.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/source_context.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/source_context.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/source_context.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/struct.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/struct.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/struct.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/struct.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/timestamp.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/timestamp.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/timestamp.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/timestamp.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/type.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/type.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/type.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/type.proto diff --git a/6_micro-cluster/product/third_party/google/protobuf/wrappers.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/wrappers.proto similarity index 100% rename from 6_micro-cluster/product/third_party/google/protobuf/wrappers.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/google/protobuf/wrappers.proto diff --git a/6_micro-cluster/product/third_party/protoc-gen-openapiv2/options/annotations.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/protoc-gen-openapiv2/options/annotations.proto similarity index 100% rename from 6_micro-cluster/product/third_party/protoc-gen-openapiv2/options/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/protoc-gen-openapiv2/options/annotations.proto diff --git a/6_micro-cluster/product/third_party/protoc-gen-openapiv2/options/openapiv2.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/protoc-gen-openapiv2/options/openapiv2.proto similarity index 100% rename from 6_micro-cluster/product/third_party/protoc-gen-openapiv2/options/openapiv2.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/protoc-gen-openapiv2/options/openapiv2.proto diff --git a/6_micro-cluster/product/third_party/tagger/tagger.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/tagger/tagger.proto similarity index 100% rename from 6_micro-cluster/product/third_party/tagger/tagger.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/tagger/tagger.proto diff --git a/6_micro-cluster/product/third_party/validate/validate.proto b/6_micro-cluster/example-1-multi-repo/product/third_party/validate/validate.proto similarity index 100% rename from 6_micro-cluster/product/third_party/validate/validate.proto rename to 6_micro-cluster/example-1-multi-repo/product/third_party/validate/validate.proto diff --git a/6_micro-cluster/shop_gw/.gitignore b/6_micro-cluster/example-1-multi-repo/shop_gw/.gitignore similarity index 100% rename from 6_micro-cluster/shop_gw/.gitignore rename to 6_micro-cluster/example-1-multi-repo/shop_gw/.gitignore diff --git a/6_micro-cluster/shop_gw/.golangci.yml b/6_micro-cluster/example-1-multi-repo/shop_gw/.golangci.yml similarity index 100% rename from 6_micro-cluster/shop_gw/.golangci.yml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/.golangci.yml diff --git a/6_micro-cluster/shop_gw/Jenkinsfile b/6_micro-cluster/example-1-multi-repo/shop_gw/Jenkinsfile similarity index 100% rename from 6_micro-cluster/shop_gw/Jenkinsfile rename to 6_micro-cluster/example-1-multi-repo/shop_gw/Jenkinsfile diff --git a/6_micro-cluster/shop_gw/Makefile b/6_micro-cluster/example-1-multi-repo/shop_gw/Makefile similarity index 100% rename from 6_micro-cluster/shop_gw/Makefile rename to 6_micro-cluster/example-1-multi-repo/shop_gw/Makefile diff --git a/6_micro-cluster/shop_gw/README.md b/6_micro-cluster/example-1-multi-repo/shop_gw/README.md similarity index 100% rename from 6_micro-cluster/shop_gw/README.md rename to 6_micro-cluster/example-1-multi-repo/shop_gw/README.md diff --git a/6_micro-cluster/shop_gw/api/comment/v1/comment.pb.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/comment/v1/comment.pb.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/comment/v1/comment.pb.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/comment/v1/comment.pb.go diff --git a/6_micro-cluster/shop_gw/api/comment/v1/comment.pb.validate.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/comment/v1/comment.pb.validate.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/comment/v1/comment.pb.validate.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/comment/v1/comment.pb.validate.go diff --git a/6_micro-cluster/shop_gw/api/comment/v1/comment.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/api/comment/v1/comment.proto similarity index 100% rename from 6_micro-cluster/shop_gw/api/comment/v1/comment.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/comment/v1/comment.proto diff --git a/6_micro-cluster/shop_gw/api/comment/v1/comment_grpc.pb.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/comment/v1/comment_grpc.pb.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/comment/v1/comment_grpc.pb.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/comment/v1/comment_grpc.pb.go diff --git a/6_micro-cluster/shop_gw/api/inventory/v1/inventory.pb.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/inventory/v1/inventory.pb.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/inventory/v1/inventory.pb.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/inventory/v1/inventory.pb.go diff --git a/6_micro-cluster/shop_gw/api/inventory/v1/inventory.pb.validate.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/inventory/v1/inventory.pb.validate.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/inventory/v1/inventory.pb.validate.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/inventory/v1/inventory.pb.validate.go diff --git a/6_micro-cluster/shop_gw/api/inventory/v1/inventory.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/api/inventory/v1/inventory.proto similarity index 100% rename from 6_micro-cluster/shop_gw/api/inventory/v1/inventory.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/inventory/v1/inventory.proto diff --git a/6_micro-cluster/shop_gw/api/inventory/v1/inventory_grpc.pb.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/inventory/v1/inventory_grpc.pb.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/inventory/v1/inventory_grpc.pb.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/inventory/v1/inventory_grpc.pb.go diff --git a/6_micro-cluster/shop_gw/api/product/v1/product.pb.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/product/v1/product.pb.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/product/v1/product.pb.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/product/v1/product.pb.go diff --git a/6_micro-cluster/shop_gw/api/product/v1/product.pb.validate.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/product/v1/product.pb.validate.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/product/v1/product.pb.validate.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/product/v1/product.pb.validate.go diff --git a/6_micro-cluster/shop_gw/api/product/v1/product.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/api/product/v1/product.proto similarity index 100% rename from 6_micro-cluster/shop_gw/api/product/v1/product.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/product/v1/product.proto diff --git a/6_micro-cluster/shop_gw/api/product/v1/product_grpc.pb.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/product/v1/product_grpc.pb.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/product/v1/product_grpc.pb.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/product/v1/product_grpc.pb.go diff --git a/6_micro-cluster/shop_gw/api/shop_gw/v1/shop_gw.pb.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/shop_gw/v1/shop_gw.pb.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/shop_gw/v1/shop_gw.pb.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/shop_gw/v1/shop_gw.pb.go diff --git a/6_micro-cluster/shop_gw/api/shop_gw/v1/shop_gw.pb.validate.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/shop_gw/v1/shop_gw.pb.validate.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/shop_gw/v1/shop_gw.pb.validate.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/shop_gw/v1/shop_gw.pb.validate.go diff --git a/6_micro-cluster/shop_gw/api/shop_gw/v1/shop_gw.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/api/shop_gw/v1/shop_gw.proto similarity index 100% rename from 6_micro-cluster/shop_gw/api/shop_gw/v1/shop_gw.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/shop_gw/v1/shop_gw.proto diff --git a/6_micro-cluster/shop_gw/api/shop_gw/v1/shop_gw_grpc.pb.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/shop_gw/v1/shop_gw_grpc.pb.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/shop_gw/v1/shop_gw_grpc.pb.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/shop_gw/v1/shop_gw_grpc.pb.go diff --git a/6_micro-cluster/shop_gw/api/shop_gw/v1/shop_gw_router.pb.go b/6_micro-cluster/example-1-multi-repo/shop_gw/api/shop_gw/v1/shop_gw_router.pb.go similarity index 100% rename from 6_micro-cluster/shop_gw/api/shop_gw/v1/shop_gw_router.pb.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/api/shop_gw/v1/shop_gw_router.pb.go diff --git a/6_micro-cluster/shop_gw/cmd/shop_gw/initial/initApp.go b/6_micro-cluster/example-1-multi-repo/shop_gw/cmd/shop_gw/initial/initApp.go similarity index 100% rename from 6_micro-cluster/shop_gw/cmd/shop_gw/initial/initApp.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/cmd/shop_gw/initial/initApp.go diff --git a/6_micro-cluster/shop_gw/cmd/shop_gw/initial/registerClose.go b/6_micro-cluster/example-1-multi-repo/shop_gw/cmd/shop_gw/initial/registerClose.go similarity index 100% rename from 6_micro-cluster/shop_gw/cmd/shop_gw/initial/registerClose.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/cmd/shop_gw/initial/registerClose.go diff --git a/6_micro-cluster/shop_gw/cmd/shop_gw/initial/registerServer.go b/6_micro-cluster/example-1-multi-repo/shop_gw/cmd/shop_gw/initial/registerServer.go similarity index 100% rename from 6_micro-cluster/shop_gw/cmd/shop_gw/initial/registerServer.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/cmd/shop_gw/initial/registerServer.go diff --git a/6_micro-cluster/shop_gw/cmd/shop_gw/main.go b/6_micro-cluster/example-1-multi-repo/shop_gw/cmd/shop_gw/main.go similarity index 100% rename from 6_micro-cluster/shop_gw/cmd/shop_gw/main.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/cmd/shop_gw/main.go diff --git a/6_micro-cluster/shop_gw/configs/location.go b/6_micro-cluster/example-1-multi-repo/shop_gw/configs/location.go similarity index 100% rename from 6_micro-cluster/shop_gw/configs/location.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/configs/location.go diff --git a/6_micro-cluster/shop_gw/configs/shop_gw.yml b/6_micro-cluster/example-1-multi-repo/shop_gw/configs/shop_gw.yml similarity index 100% rename from 6_micro-cluster/shop_gw/configs/shop_gw.yml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/configs/shop_gw.yml diff --git a/6_micro-cluster/shop_gw/configs/shop_gw_cc.yml b/6_micro-cluster/example-1-multi-repo/shop_gw/configs/shop_gw_cc.yml similarity index 100% rename from 6_micro-cluster/shop_gw/configs/shop_gw_cc.yml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/configs/shop_gw_cc.yml diff --git a/6_micro-cluster/shop_gw/deployments/binary/README.md b/6_micro-cluster/example-1-multi-repo/shop_gw/deployments/binary/README.md similarity index 100% rename from 6_micro-cluster/shop_gw/deployments/binary/README.md rename to 6_micro-cluster/example-1-multi-repo/shop_gw/deployments/binary/README.md diff --git a/6_micro-cluster/shop_gw/deployments/binary/deploy.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/deployments/binary/deploy.sh similarity index 100% rename from 6_micro-cluster/shop_gw/deployments/binary/deploy.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/deployments/binary/deploy.sh diff --git a/6_micro-cluster/shop_gw/deployments/binary/run.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/deployments/binary/run.sh similarity index 100% rename from 6_micro-cluster/shop_gw/deployments/binary/run.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/deployments/binary/run.sh diff --git a/6_micro-cluster/shop_gw/deployments/docker-compose/README.md b/6_micro-cluster/example-1-multi-repo/shop_gw/deployments/docker-compose/README.md similarity index 100% rename from 6_micro-cluster/shop_gw/deployments/docker-compose/README.md rename to 6_micro-cluster/example-1-multi-repo/shop_gw/deployments/docker-compose/README.md diff --git a/6_micro-cluster/shop_gw/deployments/docker-compose/docker-compose.yml b/6_micro-cluster/example-1-multi-repo/shop_gw/deployments/docker-compose/docker-compose.yml similarity index 100% rename from 6_micro-cluster/shop_gw/deployments/docker-compose/docker-compose.yml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/deployments/docker-compose/docker-compose.yml diff --git a/6_micro-cluster/shop_gw/deployments/kubernetes/README.md b/6_micro-cluster/example-1-multi-repo/shop_gw/deployments/kubernetes/README.md similarity index 100% rename from 6_micro-cluster/shop_gw/deployments/kubernetes/README.md rename to 6_micro-cluster/example-1-multi-repo/shop_gw/deployments/kubernetes/README.md diff --git a/6_micro-cluster/shop_gw/deployments/kubernetes/projectNameExample-namespace.yml b/6_micro-cluster/example-1-multi-repo/shop_gw/deployments/kubernetes/projectNameExample-namespace.yml similarity index 100% rename from 6_micro-cluster/shop_gw/deployments/kubernetes/projectNameExample-namespace.yml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/deployments/kubernetes/projectNameExample-namespace.yml diff --git a/6_micro-cluster/shop_gw/deployments/kubernetes/shop_gw-configmap.yml b/6_micro-cluster/example-1-multi-repo/shop_gw/deployments/kubernetes/shop_gw-configmap.yml similarity index 100% rename from 6_micro-cluster/shop_gw/deployments/kubernetes/shop_gw-configmap.yml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/deployments/kubernetes/shop_gw-configmap.yml diff --git a/6_micro-cluster/shop_gw/deployments/kubernetes/shop_gw-deployment.yml b/6_micro-cluster/example-1-multi-repo/shop_gw/deployments/kubernetes/shop_gw-deployment.yml similarity index 100% rename from 6_micro-cluster/shop_gw/deployments/kubernetes/shop_gw-deployment.yml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/deployments/kubernetes/shop_gw-deployment.yml diff --git a/6_micro-cluster/shop_gw/deployments/kubernetes/shop_gw-svc.yml b/6_micro-cluster/example-1-multi-repo/shop_gw/deployments/kubernetes/shop_gw-svc.yml similarity index 100% rename from 6_micro-cluster/shop_gw/deployments/kubernetes/shop_gw-svc.yml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/deployments/kubernetes/shop_gw-svc.yml diff --git a/6_micro-cluster/shop_gw/docs/apis.go b/6_micro-cluster/example-1-multi-repo/shop_gw/docs/apis.go similarity index 100% rename from 6_micro-cluster/shop_gw/docs/apis.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/docs/apis.go diff --git a/6_micro-cluster/shop_gw/docs/apis.swagger.json b/6_micro-cluster/example-1-multi-repo/shop_gw/docs/apis.swagger.json similarity index 100% rename from 6_micro-cluster/shop_gw/docs/apis.swagger.json rename to 6_micro-cluster/example-1-multi-repo/shop_gw/docs/apis.swagger.json diff --git a/6_micro-cluster/shop_gw/docs/gen.info b/6_micro-cluster/example-1-multi-repo/shop_gw/docs/gen.info similarity index 100% rename from 6_micro-cluster/shop_gw/docs/gen.info rename to 6_micro-cluster/example-1-multi-repo/shop_gw/docs/gen.info diff --git a/6_micro-cluster/shop_gw/go.mod b/6_micro-cluster/example-1-multi-repo/shop_gw/go.mod similarity index 100% rename from 6_micro-cluster/shop_gw/go.mod rename to 6_micro-cluster/example-1-multi-repo/shop_gw/go.mod diff --git a/6_micro-cluster/shop_gw/go.sum b/6_micro-cluster/example-1-multi-repo/shop_gw/go.sum similarity index 100% rename from 6_micro-cluster/shop_gw/go.sum rename to 6_micro-cluster/example-1-multi-repo/shop_gw/go.sum diff --git a/6_micro-cluster/shop_gw/internal/config/shop_gw.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/config/shop_gw.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/config/shop_gw.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/config/shop_gw.go diff --git a/6_micro-cluster/shop_gw/internal/config/shop_gw_cc.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/config/shop_gw_cc.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/config/shop_gw_cc.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/config/shop_gw_cc.go diff --git a/6_micro-cluster/shop_gw/internal/config/shop_gw_test.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/config/shop_gw_test.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/config/shop_gw_test.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/config/shop_gw_test.go diff --git a/6_micro-cluster/shop_gw/internal/ecode/shop_gw_rpc.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/ecode/shop_gw_rpc.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/ecode/shop_gw_rpc.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/ecode/shop_gw_rpc.go diff --git a/6_micro-cluster/shop_gw/internal/ecode/systemCode_rpc.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/ecode/systemCode_rpc.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/ecode/systemCode_rpc.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/ecode/systemCode_rpc.go diff --git a/6_micro-cluster/shop_gw/internal/routers/routers.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/routers/routers.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/routers/routers.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/routers/routers.go diff --git a/6_micro-cluster/shop_gw/internal/routers/shop_gw_router.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/routers/shop_gw_router.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/routers/shop_gw_router.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/routers/shop_gw_router.go diff --git a/6_micro-cluster/shop_gw/internal/rpcclient/comment.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/rpcclient/comment.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/rpcclient/comment.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/rpcclient/comment.go diff --git a/6_micro-cluster/shop_gw/internal/rpcclient/inventory.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/rpcclient/inventory.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/rpcclient/inventory.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/rpcclient/inventory.go diff --git a/6_micro-cluster/shop_gw/internal/rpcclient/product.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/rpcclient/product.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/rpcclient/product.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/rpcclient/product.go diff --git a/6_micro-cluster/shop_gw/internal/server/http.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/server/http.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/server/http.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/server/http.go diff --git a/6_micro-cluster/shop_gw/internal/server/http_option.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/server/http_option.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/server/http_option.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/server/http_option.go diff --git a/6_micro-cluster/shop_gw/internal/server/http_test.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/server/http_test.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/server/http_test.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/server/http_test.go diff --git a/6_micro-cluster/shop_gw/internal/service/shop_gw.go b/6_micro-cluster/example-1-multi-repo/shop_gw/internal/service/shop_gw.go similarity index 100% rename from 6_micro-cluster/shop_gw/internal/service/shop_gw.go rename to 6_micro-cluster/example-1-multi-repo/shop_gw/internal/service/shop_gw.go diff --git a/6_micro-cluster/shop_gw/scripts/binary-package.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/binary-package.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/binary-package.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/binary-package.sh diff --git a/6_micro-cluster/shop_gw/scripts/build/Dockerfile b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/build/Dockerfile similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/build/Dockerfile rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/build/Dockerfile diff --git a/6_micro-cluster/shop_gw/scripts/build/Dockerfile_build b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/build/Dockerfile_build similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/build/Dockerfile_build rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/build/Dockerfile_build diff --git a/6_micro-cluster/shop_gw/scripts/build/Dockerfile_test b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/build/Dockerfile_test similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/build/Dockerfile_test rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/build/Dockerfile_test diff --git a/6_micro-cluster/shop_gw/scripts/build/README.md b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/build/README.md similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/build/README.md rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/build/README.md diff --git a/6_micro-cluster/shop_gw/scripts/deploy-binary.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/deploy-binary.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/deploy-binary.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/deploy-binary.sh diff --git a/6_micro-cluster/shop_gw/scripts/deploy-docker.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/deploy-docker.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/deploy-docker.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/deploy-docker.sh diff --git a/6_micro-cluster/shop_gw/scripts/deploy-k8s.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/deploy-k8s.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/deploy-k8s.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/deploy-k8s.sh diff --git a/6_micro-cluster/shop_gw/scripts/image-build-local.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/image-build-local.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/image-build-local.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/image-build-local.sh diff --git a/6_micro-cluster/shop_gw/scripts/image-build.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/image-build.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/image-build.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/image-build.sh diff --git a/6_micro-cluster/shop_gw/scripts/image-build2.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/image-build2.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/image-build2.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/image-build2.sh diff --git a/6_micro-cluster/shop_gw/scripts/image-push.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/image-push.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/image-push.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/image-push.sh diff --git a/6_micro-cluster/shop_gw/scripts/image-rpc-test.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/image-rpc-test.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/image-rpc-test.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/image-rpc-test.sh diff --git a/6_micro-cluster/shop_gw/scripts/patch.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/patch.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/patch.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/patch.sh diff --git a/6_micro-cluster/shop_gw/scripts/proto-doc.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/proto-doc.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/proto-doc.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/proto-doc.sh diff --git a/6_micro-cluster/shop_gw/scripts/protoc.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/protoc.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/protoc.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/protoc.sh diff --git a/6_micro-cluster/shop_gw/scripts/run-nohup.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/run-nohup.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/run-nohup.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/run-nohup.sh diff --git a/6_micro-cluster/shop_gw/scripts/run.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/run.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/run.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/run.sh diff --git a/6_micro-cluster/shop_gw/scripts/swag-docs.sh b/6_micro-cluster/example-1-multi-repo/shop_gw/scripts/swag-docs.sh similarity index 100% rename from 6_micro-cluster/shop_gw/scripts/swag-docs.sh rename to 6_micro-cluster/example-1-multi-repo/shop_gw/scripts/swag-docs.sh diff --git a/6_micro-cluster/shop_gw/third_party/gogo/protobuf/gogoproto/gogo.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/gogo/protobuf/gogoproto/gogo.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/gogo/protobuf/gogoproto/gogo.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/gogo/protobuf/gogoproto/gogo.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/BUILD.bazel similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/BUILD.bazel diff --git a/6_micro-cluster/shop_gw/third_party/google/api/README.md b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/README.md similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/README.md rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/README.md diff --git a/6_micro-cluster/shop_gw/third_party/google/api/annotations.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/annotations.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/annotations.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/auth.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/auth.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/auth.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/auth.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/backend.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/backend.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/backend.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/backend.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/billing.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/billing.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/billing.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/billing.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/client.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/client.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/client.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/client.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/config_change.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/config_change.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/config_change.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/config_change.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/consumer.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/consumer.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/consumer.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/consumer.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/context.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/context.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/context.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/context.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/control.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/control.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/control.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/control.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/distribution.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/distribution.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/distribution.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/distribution.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/documentation.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/documentation.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/documentation.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/documentation.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/endpoint.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/endpoint.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/endpoint.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/endpoint.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/BUILD.bazel similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/BUILD.bazel diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/cel.yaml b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/cel.yaml similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/cel.yaml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/cel.yaml diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/BUILD.bazel diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/checked.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/checked.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/checked.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/checked.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/conformance_service.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/conformance_service.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/conformance_service.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/conformance_service.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/eval.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/eval.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/eval.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/eval.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/explain.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/explain.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/explain.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/explain.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/syntax.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/syntax.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/syntax.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/syntax.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/value.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/value.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1alpha1/value.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1alpha1/value.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/BUILD.bazel diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/decl.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/decl.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/decl.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/decl.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/eval.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/eval.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/eval.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/eval.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/expr.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/expr.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/expr.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/expr.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/source.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/source.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/source.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/source.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/value.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/value.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/expr/v1beta1/value.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/expr/v1beta1/value.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/field_behavior.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/field_behavior.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/field_behavior.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/field_behavior.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/http.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/http.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/http.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/http.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/httpbody.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/httpbody.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/httpbody.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/httpbody.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/label.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/label.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/label.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/label.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/launch_stage.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/launch_stage.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/launch_stage.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/launch_stage.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/log.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/log.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/log.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/log.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/logging.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/logging.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/logging.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/logging.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/metric.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/metric.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/metric.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/metric.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/monitored_resource.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/monitored_resource.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/monitored_resource.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/monitored_resource.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/monitoring.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/monitoring.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/monitoring.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/monitoring.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/quota.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/quota.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/quota.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/quota.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/resource.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/resource.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/resource.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/resource.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/service.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/service.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/service.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/service.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/serviceconfig.yaml b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/serviceconfig.yaml similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/serviceconfig.yaml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/serviceconfig.yaml diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/BUILD.bazel similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/BUILD.bazel diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/README.md b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/README.md similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/README.md rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/README.md diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/BUILD.bazel diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/check_error.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/check_error.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/check_error.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/check_error.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/distribution.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/distribution.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/distribution.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/distribution.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/http_request.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/http_request.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/http_request.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/http_request.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/log_entry.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/log_entry.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/log_entry.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/log_entry.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/metric_value.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/metric_value.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/metric_value.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/metric_value.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/operation.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/operation.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/operation.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/operation.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/quota_controller.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/quota_controller.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/quota_controller.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/quota_controller.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/service_controller.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/service_controller.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/service_controller.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/service_controller.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/servicecontrol.yaml b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/servicecontrol.yaml similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicecontrol/v1/servicecontrol.yaml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicecontrol/v1/servicecontrol.yaml diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/BUILD.bazel similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/BUILD.bazel diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/README.md b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/README.md similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/README.md rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/README.md diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/BUILD.bazel b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/BUILD.bazel similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/BUILD.bazel rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/BUILD.bazel diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/resources.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/resources.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/resources.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/resources.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml diff --git a/6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/servicemanager.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/servicemanager.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/servicemanagement/v1/servicemanager.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/servicemanagement/v1/servicemanager.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/source_info.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/source_info.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/source_info.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/source_info.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/system_parameter.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/system_parameter.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/system_parameter.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/system_parameter.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/api/usage.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/usage.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/api/usage.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/api/usage.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/annotations.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/annotations.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/annotations.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/any.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/any.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/any.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/any.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/api.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/api.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/api.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/api.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/compiler/plugin.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/compiler/plugin.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/compiler/plugin.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/compiler/plugin.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/descriptor.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/descriptor.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/descriptor.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/descriptor.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/duration.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/duration.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/duration.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/duration.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/empty.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/empty.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/empty.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/empty.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/field_mask.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/field_mask.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/field_mask.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/field_mask.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/source_context.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/source_context.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/source_context.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/source_context.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/struct.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/struct.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/struct.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/struct.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/timestamp.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/timestamp.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/timestamp.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/timestamp.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/type.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/type.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/type.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/type.proto diff --git a/6_micro-cluster/shop_gw/third_party/google/protobuf/wrappers.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/wrappers.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/google/protobuf/wrappers.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/google/protobuf/wrappers.proto diff --git a/6_micro-cluster/shop_gw/third_party/protoc-gen-openapiv2/options/annotations.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/protoc-gen-openapiv2/options/annotations.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/protoc-gen-openapiv2/options/annotations.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/protoc-gen-openapiv2/options/annotations.proto diff --git a/6_micro-cluster/shop_gw/third_party/protoc-gen-openapiv2/options/openapiv2.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/protoc-gen-openapiv2/options/openapiv2.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/protoc-gen-openapiv2/options/openapiv2.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/protoc-gen-openapiv2/options/openapiv2.proto diff --git a/6_micro-cluster/shop_gw/third_party/tagger/tagger.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/tagger/tagger.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/tagger/tagger.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/tagger/tagger.proto diff --git a/6_micro-cluster/shop_gw/third_party/validate/validate.proto b/6_micro-cluster/example-1-multi-repo/shop_gw/third_party/validate/validate.proto similarity index 100% rename from 6_micro-cluster/shop_gw/third_party/validate/validate.proto rename to 6_micro-cluster/example-1-multi-repo/shop_gw/third_party/validate/validate.proto diff --git a/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.pb.go b/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.pb.go new file mode 100644 index 0000000..4da2db8 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.pb.go @@ -0,0 +1,325 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/comment/v1/comment.proto + +package v1 + +import ( + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type ListByProductIDRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ProductID uint64 `protobuf:"varint,1,opt,name=productID,proto3" json:"productID"` +} + +func (x *ListByProductIDRequest) Reset() { + *x = ListByProductIDRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_comment_v1_comment_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListByProductIDRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListByProductIDRequest) ProtoMessage() {} + +func (x *ListByProductIDRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_comment_v1_comment_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListByProductIDRequest.ProtoReflect.Descriptor instead. +func (*ListByProductIDRequest) Descriptor() ([]byte, []int) { + return file_api_comment_v1_comment_proto_rawDescGZIP(), []int{0} +} + +func (x *ListByProductIDRequest) GetProductID() uint64 { + if x != nil { + return x.ProductID + } + return 0 +} + +type CommentDetail struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` + Username string `protobuf:"bytes,2,opt,name=username,proto3" json:"username"` + Content string `protobuf:"bytes,3,opt,name=content,proto3" json:"content"` +} + +func (x *CommentDetail) Reset() { + *x = CommentDetail{} + if protoimpl.UnsafeEnabled { + mi := &file_api_comment_v1_comment_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CommentDetail) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CommentDetail) ProtoMessage() {} + +func (x *CommentDetail) ProtoReflect() protoreflect.Message { + mi := &file_api_comment_v1_comment_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CommentDetail.ProtoReflect.Descriptor instead. +func (*CommentDetail) Descriptor() ([]byte, []int) { + return file_api_comment_v1_comment_proto_rawDescGZIP(), []int{1} +} + +func (x *CommentDetail) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *CommentDetail) GetUsername() string { + if x != nil { + return x.Username + } + return "" +} + +func (x *CommentDetail) GetContent() string { + if x != nil { + return x.Content + } + return "" +} + +type ListByProductIDReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Total int32 `protobuf:"varint,1,opt,name=total,proto3" json:"total"` + ProductID uint64 `protobuf:"varint,2,opt,name=productID,proto3" json:"productID"` + CommentDetails []*CommentDetail `protobuf:"bytes,3,rep,name=commentDetails,proto3" json:"commentDetails"` +} + +func (x *ListByProductIDReply) Reset() { + *x = ListByProductIDReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_comment_v1_comment_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListByProductIDReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListByProductIDReply) ProtoMessage() {} + +func (x *ListByProductIDReply) ProtoReflect() protoreflect.Message { + mi := &file_api_comment_v1_comment_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListByProductIDReply.ProtoReflect.Descriptor instead. +func (*ListByProductIDReply) Descriptor() ([]byte, []int) { + return file_api_comment_v1_comment_proto_rawDescGZIP(), []int{2} +} + +func (x *ListByProductIDReply) GetTotal() int32 { + if x != nil { + return x.Total + } + return 0 +} + +func (x *ListByProductIDReply) GetProductID() uint64 { + if x != nil { + return x.ProductID + } + return 0 +} + +func (x *ListByProductIDReply) GetCommentDetails() []*CommentDetail { + if x != nil { + return x.CommentDetails + } + return nil +} + +var File_api_comment_v1_comment_proto protoreflect.FileDescriptor + +var file_api_comment_v1_comment_proto_rawDesc = []byte{ + 0x0a, 0x1c, 0x61, 0x70, 0x69, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x2f, 0x76, 0x31, + 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0e, + 0x61, 0x70, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x31, 0x1a, 0x17, + 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x3f, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74, 0x42, + 0x79, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x25, 0x0a, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x04, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x28, 0x01, 0x52, 0x09, 0x70, + 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x22, 0x55, 0x0a, 0x0d, 0x43, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, + 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, + 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, + 0x91, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x79, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, + 0x74, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x74, 0x61, + 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x1c, + 0x0a, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x04, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x12, 0x45, 0x0a, 0x0e, + 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x03, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x74, + 0x61, 0x69, 0x6c, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x73, 0x32, 0x6c, 0x0a, 0x07, 0x43, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x61, + 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x79, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, + 0x44, 0x12, 0x26, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x2e, + 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x79, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, + 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x42, + 0x79, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, + 0x00, 0x42, 0x19, 0x5a, 0x17, 0x65, 0x73, 0x68, 0x6f, 0x70, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x63, + 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_comment_v1_comment_proto_rawDescOnce sync.Once + file_api_comment_v1_comment_proto_rawDescData = file_api_comment_v1_comment_proto_rawDesc +) + +func file_api_comment_v1_comment_proto_rawDescGZIP() []byte { + file_api_comment_v1_comment_proto_rawDescOnce.Do(func() { + file_api_comment_v1_comment_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_comment_v1_comment_proto_rawDescData) + }) + return file_api_comment_v1_comment_proto_rawDescData +} + +var file_api_comment_v1_comment_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_api_comment_v1_comment_proto_goTypes = []interface{}{ + (*ListByProductIDRequest)(nil), // 0: api.comment.v1.ListByProductIDRequest + (*CommentDetail)(nil), // 1: api.comment.v1.CommentDetail + (*ListByProductIDReply)(nil), // 2: api.comment.v1.ListByProductIDReply +} +var file_api_comment_v1_comment_proto_depIdxs = []int32{ + 1, // 0: api.comment.v1.ListByProductIDReply.commentDetails:type_name -> api.comment.v1.CommentDetail + 0, // 1: api.comment.v1.Comment.ListByProductID:input_type -> api.comment.v1.ListByProductIDRequest + 2, // 2: api.comment.v1.Comment.ListByProductID:output_type -> api.comment.v1.ListByProductIDReply + 2, // [2:3] is the sub-list for method output_type + 1, // [1:2] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_api_comment_v1_comment_proto_init() } +func file_api_comment_v1_comment_proto_init() { + if File_api_comment_v1_comment_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_comment_v1_comment_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListByProductIDRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_comment_v1_comment_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CommentDetail); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_comment_v1_comment_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListByProductIDReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_comment_v1_comment_proto_rawDesc, + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_comment_v1_comment_proto_goTypes, + DependencyIndexes: file_api_comment_v1_comment_proto_depIdxs, + MessageInfos: file_api_comment_v1_comment_proto_msgTypes, + }.Build() + File_api_comment_v1_comment_proto = out.File + file_api_comment_v1_comment_proto_rawDesc = nil + file_api_comment_v1_comment_proto_goTypes = nil + file_api_comment_v1_comment_proto_depIdxs = nil +} diff --git a/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.pb.validate.go b/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.pb.validate.go new file mode 100644 index 0000000..1a9a71b --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.pb.validate.go @@ -0,0 +1,395 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/comment/v1/comment.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on ListByProductIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ListByProductIDRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ListByProductIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ListByProductIDRequestMultiError, or nil if none found. +func (m *ListByProductIDRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *ListByProductIDRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetProductID() < 1 { + err := ListByProductIDRequestValidationError{ + field: "ProductID", + reason: "value must be greater than or equal to 1", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return ListByProductIDRequestMultiError(errors) + } + + return nil +} + +// ListByProductIDRequestMultiError is an error wrapping multiple validation +// errors returned by ListByProductIDRequest.ValidateAll() if the designated +// constraints aren't met. +type ListByProductIDRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ListByProductIDRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ListByProductIDRequestMultiError) AllErrors() []error { return m } + +// ListByProductIDRequestValidationError is the validation error returned by +// ListByProductIDRequest.Validate if the designated constraints aren't met. +type ListByProductIDRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ListByProductIDRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ListByProductIDRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ListByProductIDRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ListByProductIDRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ListByProductIDRequestValidationError) ErrorName() string { + return "ListByProductIDRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e ListByProductIDRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sListByProductIDRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ListByProductIDRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ListByProductIDRequestValidationError{} + +// Validate checks the field values on CommentDetail with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *CommentDetail) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on CommentDetail with the rules defined +// in the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in CommentDetailMultiError, or +// nil if none found. +func (m *CommentDetail) ValidateAll() error { + return m.validate(true) +} + +func (m *CommentDetail) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Id + + // no validation rules for Username + + // no validation rules for Content + + if len(errors) > 0 { + return CommentDetailMultiError(errors) + } + + return nil +} + +// CommentDetailMultiError is an error wrapping multiple validation errors +// returned by CommentDetail.ValidateAll() if the designated constraints +// aren't met. +type CommentDetailMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CommentDetailMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CommentDetailMultiError) AllErrors() []error { return m } + +// CommentDetailValidationError is the validation error returned by +// CommentDetail.Validate if the designated constraints aren't met. +type CommentDetailValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CommentDetailValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CommentDetailValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CommentDetailValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CommentDetailValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CommentDetailValidationError) ErrorName() string { return "CommentDetailValidationError" } + +// Error satisfies the builtin error interface +func (e CommentDetailValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCommentDetail.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CommentDetailValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CommentDetailValidationError{} + +// Validate checks the field values on ListByProductIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ListByProductIDReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ListByProductIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ListByProductIDReplyMultiError, or nil if none found. +func (m *ListByProductIDReply) ValidateAll() error { + return m.validate(true) +} + +func (m *ListByProductIDReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Total + + // no validation rules for ProductID + + for idx, item := range m.GetCommentDetails() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ListByProductIDReplyValidationError{ + field: fmt.Sprintf("CommentDetails[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ListByProductIDReplyValidationError{ + field: fmt.Sprintf("CommentDetails[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ListByProductIDReplyValidationError{ + field: fmt.Sprintf("CommentDetails[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ListByProductIDReplyMultiError(errors) + } + + return nil +} + +// ListByProductIDReplyMultiError is an error wrapping multiple validation +// errors returned by ListByProductIDReply.ValidateAll() if the designated +// constraints aren't met. +type ListByProductIDReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ListByProductIDReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ListByProductIDReplyMultiError) AllErrors() []error { return m } + +// ListByProductIDReplyValidationError is the validation error returned by +// ListByProductIDReply.Validate if the designated constraints aren't met. +type ListByProductIDReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ListByProductIDReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ListByProductIDReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ListByProductIDReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ListByProductIDReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ListByProductIDReplyValidationError) ErrorName() string { + return "ListByProductIDReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e ListByProductIDReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sListByProductIDReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ListByProductIDReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ListByProductIDReplyValidationError{} diff --git a/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.proto b/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.proto new file mode 100644 index 0000000..3d4779d --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; + +package api.comment.v1; + +import "validate/validate.proto"; + +option go_package = "eshop/api/comment/v1;v1"; + +service Comment { + // list of comments by product id + rpc ListByProductID(ListByProductIDRequest) returns (ListByProductIDReply) {} +} + +message ListByProductIDRequest { + uint64 productID = 1 [(validate.rules).uint64.gte = 1]; +} + +message CommentDetail { + uint64 id=1; + string username = 2; + string content = 3; +} + +message ListByProductIDReply { + int32 total = 1; + uint64 productID = 2; + repeated CommentDetail commentDetails = 3; +} diff --git a/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment_grpc.pb.go b/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment_grpc.pb.go new file mode 100644 index 0000000..501e2ca --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/comment/v1/comment_grpc.pb.go @@ -0,0 +1,111 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.2 +// source: api/comment/v1/comment.proto + +package v1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Comment_ListByProductID_FullMethodName = "/api.comment.v1.Comment/ListByProductID" +) + +// CommentClient is the client API for Comment service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type CommentClient interface { + // list of comments by product id + ListByProductID(ctx context.Context, in *ListByProductIDRequest, opts ...grpc.CallOption) (*ListByProductIDReply, error) +} + +type commentClient struct { + cc grpc.ClientConnInterface +} + +func NewCommentClient(cc grpc.ClientConnInterface) CommentClient { + return &commentClient{cc} +} + +func (c *commentClient) ListByProductID(ctx context.Context, in *ListByProductIDRequest, opts ...grpc.CallOption) (*ListByProductIDReply, error) { + out := new(ListByProductIDReply) + err := c.cc.Invoke(ctx, Comment_ListByProductID_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CommentServer is the server API for Comment service. +// All implementations must embed UnimplementedCommentServer +// for forward compatibility +type CommentServer interface { + // list of comments by product id + ListByProductID(context.Context, *ListByProductIDRequest) (*ListByProductIDReply, error) + mustEmbedUnimplementedCommentServer() +} + +// UnimplementedCommentServer must be embedded to have forward compatible implementations. +type UnimplementedCommentServer struct { +} + +func (UnimplementedCommentServer) ListByProductID(context.Context, *ListByProductIDRequest) (*ListByProductIDReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListByProductID not implemented") +} +func (UnimplementedCommentServer) mustEmbedUnimplementedCommentServer() {} + +// UnsafeCommentServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to CommentServer will +// result in compilation errors. +type UnsafeCommentServer interface { + mustEmbedUnimplementedCommentServer() +} + +func RegisterCommentServer(s grpc.ServiceRegistrar, srv CommentServer) { + s.RegisterService(&Comment_ServiceDesc, srv) +} + +func _Comment_ListByProductID_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListByProductIDRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CommentServer).ListByProductID(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Comment_ListByProductID_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CommentServer).ListByProductID(ctx, req.(*ListByProductIDRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Comment_ServiceDesc is the grpc.ServiceDesc for Comment service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Comment_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.comment.v1.Comment", + HandlerType: (*CommentServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListByProductID", + Handler: _Comment_ListByProductID_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "api/comment/v1/comment.proto", +} diff --git a/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.pb.go b/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.pb.go new file mode 100644 index 0000000..815251e --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.pb.go @@ -0,0 +1,291 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/eshop_gw/v1/eshop_gw.proto + +package v1 + +import ( + v12 "eshop/api/comment/v1" + v11 "eshop/api/inventory/v1" + v1 "eshop/api/product/v1" + + + + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type GetDetailsByProductIDRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ProductID uint64 `protobuf:"varint,1,opt,name=productID,proto3" json:"productID" form:"productID"` +} + +func (x *GetDetailsByProductIDRequest) Reset() { + *x = GetDetailsByProductIDRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_eshop_gw_v1_eshop_gw_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetDetailsByProductIDRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetDetailsByProductIDRequest) ProtoMessage() {} + +func (x *GetDetailsByProductIDRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_eshop_gw_v1_eshop_gw_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetDetailsByProductIDRequest.ProtoReflect.Descriptor instead. +func (*GetDetailsByProductIDRequest) Descriptor() ([]byte, []int) { + return file_api_eshop_gw_v1_eshop_gw_proto_rawDescGZIP(), []int{0} +} + +func (x *GetDetailsByProductIDRequest) GetProductID() uint64 { + if x != nil { + return x.ProductID + } + return 0 +} + +type GetDetailsByProductIDReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ProductDetail *v1.ProductDetail `protobuf:"bytes,1,opt,name=productDetail,proto3" json:"productDetail"` + InventoryDetail *v11.InventoryDetail `protobuf:"bytes,2,opt,name=inventoryDetail,proto3" json:"inventoryDetail"` + CommentDetails []*v12.CommentDetail `protobuf:"bytes,3,rep,name=commentDetails,proto3" json:"commentDetails"` +} + +func (x *GetDetailsByProductIDReply) Reset() { + *x = GetDetailsByProductIDReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_eshop_gw_v1_eshop_gw_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetDetailsByProductIDReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetDetailsByProductIDReply) ProtoMessage() {} + +func (x *GetDetailsByProductIDReply) ProtoReflect() protoreflect.Message { + mi := &file_api_eshop_gw_v1_eshop_gw_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetDetailsByProductIDReply.ProtoReflect.Descriptor instead. +func (*GetDetailsByProductIDReply) Descriptor() ([]byte, []int) { + return file_api_eshop_gw_v1_eshop_gw_proto_rawDescGZIP(), []int{1} +} + +func (x *GetDetailsByProductIDReply) GetProductDetail() *v1.ProductDetail { + if x != nil { + return x.ProductDetail + } + return nil +} + +func (x *GetDetailsByProductIDReply) GetInventoryDetail() *v11.InventoryDetail { + if x != nil { + return x.InventoryDetail + } + return nil +} + +func (x *GetDetailsByProductIDReply) GetCommentDetails() []*v12.CommentDetail { + if x != nil { + return x.CommentDetails + } + return nil +} + +var File_api_eshop_gw_v1_eshop_gw_proto protoreflect.FileDescriptor + +var file_api_eshop_gw_v1_eshop_gw_proto_rawDesc = []byte{ + 0x0a, 0x1e, 0x61, 0x70, 0x69, 0x2f, 0x65, 0x73, 0x68, 0x6f, 0x70, 0x5f, 0x67, 0x77, 0x2f, 0x76, + 0x31, 0x2f, 0x65, 0x73, 0x68, 0x6f, 0x70, 0x5f, 0x67, 0x77, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x0f, 0x61, 0x70, 0x69, 0x2e, 0x65, 0x73, 0x68, 0x6f, 0x70, 0x5f, 0x67, 0x77, 0x2e, 0x76, + 0x31, 0x1a, 0x1c, 0x61, 0x70, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x2f, 0x76, + 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, + 0x1c, 0x61, 0x70, 0x69, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x2f, 0x76, 0x31, 0x2f, + 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x20, 0x61, + 0x70, 0x69, 0x2f, 0x69, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, 0x79, 0x2f, 0x76, 0x31, 0x2f, + 0x69, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, + 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, + 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x76, + 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x74, + 0x61, 0x67, 0x67, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x5a, 0x0a, 0x1c, 0x47, + 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x42, 0x79, 0x50, 0x72, 0x6f, 0x64, 0x75, + 0x63, 0x74, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, 0x09, 0x70, + 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x1c, + 0xfa, 0x42, 0x04, 0x32, 0x02, 0x28, 0x01, 0x9a, 0x84, 0x9e, 0x03, 0x10, 0x66, 0x6f, 0x72, 0x6d, + 0x3a, 0x22, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x22, 0x52, 0x09, 0x70, 0x72, + 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x22, 0xf5, 0x01, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x44, + 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x42, 0x79, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, + 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x43, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, + 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x50, + 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x52, 0x0d, 0x70, 0x72, + 0x6f, 0x64, 0x75, 0x63, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x4b, 0x0a, 0x0f, 0x69, + 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, 0x79, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x69, 0x6e, 0x76, 0x65, 0x6e, + 0x74, 0x6f, 0x72, 0x79, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, + 0x79, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x52, 0x0f, 0x69, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, + 0x72, 0x79, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x45, 0x0a, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x76, + 0x31, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x52, + 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x32, + 0xd2, 0x01, 0x0a, 0x07, 0x45, 0x53, 0x68, 0x6f, 0x70, 0x47, 0x77, 0x12, 0xc6, 0x01, 0x0a, 0x15, + 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x42, 0x79, 0x50, 0x72, 0x6f, 0x64, + 0x75, 0x63, 0x74, 0x49, 0x44, 0x12, 0x2d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x65, 0x73, 0x68, 0x6f, + 0x70, 0x5f, 0x67, 0x77, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x73, 0x42, 0x79, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x65, 0x73, 0x68, 0x6f, 0x70, + 0x5f, 0x67, 0x77, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, + 0x73, 0x42, 0x79, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, + 0x79, 0x22, 0x51, 0x92, 0x41, 0x38, 0x12, 0x0a, 0x67, 0x65, 0x74, 0x20, 0x64, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x1a, 0x18, 0x67, 0x65, 0x74, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x20, 0x62, + 0x79, 0x20, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x20, 0x69, 0x64, 0x62, 0x10, 0x0a, 0x0e, + 0x0a, 0x0a, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68, 0x12, 0x00, 0x82, 0xd3, + 0xe4, 0x93, 0x02, 0x10, 0x12, 0x0e, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x64, 0x65, + 0x74, 0x61, 0x69, 0x6c, 0x42, 0xbb, 0x01, 0x5a, 0x18, 0x65, 0x73, 0x68, 0x6f, 0x70, 0x2f, 0x61, + 0x70, 0x69, 0x2f, 0x65, 0x73, 0x68, 0x6f, 0x70, 0x5f, 0x67, 0x77, 0x2f, 0x76, 0x31, 0x3b, 0x76, + 0x31, 0x92, 0x41, 0x9d, 0x01, 0x12, 0x14, 0x0a, 0x0d, 0x75, 0x73, 0x65, 0x72, 0x20, 0x61, 0x70, + 0x69, 0x20, 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, 0x32, 0x2e, 0x30, 0x1a, 0x0e, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, 0x30, 0x38, 0x30, 0x2a, 0x02, 0x01, 0x02, 0x32, + 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, + 0x6e, 0x3a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, + 0x73, 0x6f, 0x6e, 0x5a, 0x4d, 0x0a, 0x4b, 0x0a, 0x0a, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x41, + 0x75, 0x74, 0x68, 0x12, 0x3d, 0x08, 0x02, 0x12, 0x28, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x61, + 0x20, 0x22, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x72, 0x2d, 0x6a, 0x77, + 0x74, 0x2d, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x20, 0x74, 0x6f, 0x20, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x1a, 0x0d, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x02, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_eshop_gw_v1_eshop_gw_proto_rawDescOnce sync.Once + file_api_eshop_gw_v1_eshop_gw_proto_rawDescData = file_api_eshop_gw_v1_eshop_gw_proto_rawDesc +) + +func file_api_eshop_gw_v1_eshop_gw_proto_rawDescGZIP() []byte { + file_api_eshop_gw_v1_eshop_gw_proto_rawDescOnce.Do(func() { + file_api_eshop_gw_v1_eshop_gw_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_eshop_gw_v1_eshop_gw_proto_rawDescData) + }) + return file_api_eshop_gw_v1_eshop_gw_proto_rawDescData +} + +var file_api_eshop_gw_v1_eshop_gw_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_api_eshop_gw_v1_eshop_gw_proto_goTypes = []interface{}{ + (*GetDetailsByProductIDRequest)(nil), // 0: api.eshop_gw.v1.GetDetailsByProductIDRequest + (*GetDetailsByProductIDReply)(nil), // 1: api.eshop_gw.v1.GetDetailsByProductIDReply + (*v1.ProductDetail)(nil), // 2: api.product.v1.ProductDetail + (*v11.InventoryDetail)(nil), // 3: api.inventory.v1.InventoryDetail + (*v12.CommentDetail)(nil), // 4: api.comment.v1.CommentDetail +} +var file_api_eshop_gw_v1_eshop_gw_proto_depIdxs = []int32{ + 2, // 0: api.eshop_gw.v1.GetDetailsByProductIDReply.productDetail:type_name -> api.product.v1.ProductDetail + 3, // 1: api.eshop_gw.v1.GetDetailsByProductIDReply.inventoryDetail:type_name -> api.inventory.v1.InventoryDetail + 4, // 2: api.eshop_gw.v1.GetDetailsByProductIDReply.commentDetails:type_name -> api.comment.v1.CommentDetail + 0, // 3: api.eshop_gw.v1.EShopGw.GetDetailsByProductID:input_type -> api.eshop_gw.v1.GetDetailsByProductIDRequest + 1, // 4: api.eshop_gw.v1.EShopGw.GetDetailsByProductID:output_type -> api.eshop_gw.v1.GetDetailsByProductIDReply + 4, // [4:5] is the sub-list for method output_type + 3, // [3:4] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_api_eshop_gw_v1_eshop_gw_proto_init() } +func file_api_eshop_gw_v1_eshop_gw_proto_init() { + if File_api_eshop_gw_v1_eshop_gw_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_eshop_gw_v1_eshop_gw_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetDetailsByProductIDRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_eshop_gw_v1_eshop_gw_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetDetailsByProductIDReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_eshop_gw_v1_eshop_gw_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_eshop_gw_v1_eshop_gw_proto_goTypes, + DependencyIndexes: file_api_eshop_gw_v1_eshop_gw_proto_depIdxs, + MessageInfos: file_api_eshop_gw_v1_eshop_gw_proto_msgTypes, + }.Build() + File_api_eshop_gw_v1_eshop_gw_proto = out.File + file_api_eshop_gw_v1_eshop_gw_proto_rawDesc = nil + file_api_eshop_gw_v1_eshop_gw_proto_goTypes = nil + file_api_eshop_gw_v1_eshop_gw_proto_depIdxs = nil +} diff --git a/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.pb.validate.go b/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.pb.validate.go new file mode 100644 index 0000000..9eb724d --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.pb.validate.go @@ -0,0 +1,344 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/eshop_gw/v1/eshop_gw.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on GetDetailsByProductIDRequest with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *GetDetailsByProductIDRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on GetDetailsByProductIDRequest with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// GetDetailsByProductIDRequestMultiError, or nil if none found. +func (m *GetDetailsByProductIDRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *GetDetailsByProductIDRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetProductID() < 1 { + err := GetDetailsByProductIDRequestValidationError{ + field: "ProductID", + reason: "value must be greater than or equal to 1", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return GetDetailsByProductIDRequestMultiError(errors) + } + + return nil +} + +// GetDetailsByProductIDRequestMultiError is an error wrapping multiple +// validation errors returned by GetDetailsByProductIDRequest.ValidateAll() if +// the designated constraints aren't met. +type GetDetailsByProductIDRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m GetDetailsByProductIDRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m GetDetailsByProductIDRequestMultiError) AllErrors() []error { return m } + +// GetDetailsByProductIDRequestValidationError is the validation error returned +// by GetDetailsByProductIDRequest.Validate if the designated constraints +// aren't met. +type GetDetailsByProductIDRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e GetDetailsByProductIDRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e GetDetailsByProductIDRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e GetDetailsByProductIDRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e GetDetailsByProductIDRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e GetDetailsByProductIDRequestValidationError) ErrorName() string { + return "GetDetailsByProductIDRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e GetDetailsByProductIDRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sGetDetailsByProductIDRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = GetDetailsByProductIDRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = GetDetailsByProductIDRequestValidationError{} + +// Validate checks the field values on GetDetailsByProductIDReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *GetDetailsByProductIDReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on GetDetailsByProductIDReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// GetDetailsByProductIDReplyMultiError, or nil if none found. +func (m *GetDetailsByProductIDReply) ValidateAll() error { + return m.validate(true) +} + +func (m *GetDetailsByProductIDReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if all { + switch v := interface{}(m.GetProductDetail()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, GetDetailsByProductIDReplyValidationError{ + field: "ProductDetail", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, GetDetailsByProductIDReplyValidationError{ + field: "ProductDetail", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetProductDetail()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return GetDetailsByProductIDReplyValidationError{ + field: "ProductDetail", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetInventoryDetail()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, GetDetailsByProductIDReplyValidationError{ + field: "InventoryDetail", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, GetDetailsByProductIDReplyValidationError{ + field: "InventoryDetail", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetInventoryDetail()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return GetDetailsByProductIDReplyValidationError{ + field: "InventoryDetail", + reason: "embedded message failed validation", + cause: err, + } + } + } + + for idx, item := range m.GetCommentDetails() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, GetDetailsByProductIDReplyValidationError{ + field: fmt.Sprintf("CommentDetails[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, GetDetailsByProductIDReplyValidationError{ + field: fmt.Sprintf("CommentDetails[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return GetDetailsByProductIDReplyValidationError{ + field: fmt.Sprintf("CommentDetails[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return GetDetailsByProductIDReplyMultiError(errors) + } + + return nil +} + +// GetDetailsByProductIDReplyMultiError is an error wrapping multiple +// validation errors returned by GetDetailsByProductIDReply.ValidateAll() if +// the designated constraints aren't met. +type GetDetailsByProductIDReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m GetDetailsByProductIDReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m GetDetailsByProductIDReplyMultiError) AllErrors() []error { return m } + +// GetDetailsByProductIDReplyValidationError is the validation error returned +// by GetDetailsByProductIDReply.Validate if the designated constraints aren't met. +type GetDetailsByProductIDReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e GetDetailsByProductIDReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e GetDetailsByProductIDReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e GetDetailsByProductIDReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e GetDetailsByProductIDReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e GetDetailsByProductIDReplyValidationError) ErrorName() string { + return "GetDetailsByProductIDReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e GetDetailsByProductIDReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sGetDetailsByProductIDReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = GetDetailsByProductIDReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = GetDetailsByProductIDReplyValidationError{} diff --git a/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.proto b/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.proto new file mode 100644 index 0000000..b99c59a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw.proto @@ -0,0 +1,67 @@ +syntax = "proto3"; + +package api.eshop_gw.v1; + +import "api/product/v1/product.proto"; +import "api/comment/v1/comment.proto"; +import "api/inventory/v1/inventory.proto"; +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "validate/validate.proto"; +import "tagger/tagger.proto"; + +option go_package = "eshop/api/eshop_gw/v1;v1"; + +// default settings for generating *.swagger.json documents +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + host: "localhost:8080" + base_path: "" + info: { + title: "user api docs"; + version: "2.0"; + } + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; + security_definitions: { + security: { + key: "BearerAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + description: "Input a \"Bearer your-jwt-token\" to Value"; + } + } + } +}; + +service EShopGw { + // get page detail by product id + rpc GetDetailsByProductID(GetDetailsByProductIDRequest) returns (GetDetailsByProductIDReply) { + option (google.api.http) = { + get: "/api/v1/detail" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "get detail", + description: "get detail by product id", + security: { + security_requirement: { + key: "BearerAuth"; + value: {} + } + } + }; + } +} + +message GetDetailsByProductIDRequest { + uint64 productID = 1 [(validate.rules).uint64.gte = 1, (tagger.tags) = "form:\"productID\""]; +} + +message GetDetailsByProductIDReply { + api.product.v1.ProductDetail productDetail = 1; + api.inventory.v1.InventoryDetail inventoryDetail = 2; + repeated api.comment.v1.CommentDetail commentDetails = 3; +} diff --git a/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw_grpc.pb.go b/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw_grpc.pb.go new file mode 100644 index 0000000..dbb186c --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw_grpc.pb.go @@ -0,0 +1,111 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.2 +// source: api/eshop_gw/v1/eshop_gw.proto + +package v1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + EShopGw_GetDetailsByProductID_FullMethodName = "/api.eshop_gw.v1.EShopGw/GetDetailsByProductID" +) + +// EShopGwClient is the client API for EShopGw service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type EShopGwClient interface { + // get page detail by product id + GetDetailsByProductID(ctx context.Context, in *GetDetailsByProductIDRequest, opts ...grpc.CallOption) (*GetDetailsByProductIDReply, error) +} + +type eShopGwClient struct { + cc grpc.ClientConnInterface +} + +func NewEShopGwClient(cc grpc.ClientConnInterface) EShopGwClient { + return &eShopGwClient{cc} +} + +func (c *eShopGwClient) GetDetailsByProductID(ctx context.Context, in *GetDetailsByProductIDRequest, opts ...grpc.CallOption) (*GetDetailsByProductIDReply, error) { + out := new(GetDetailsByProductIDReply) + err := c.cc.Invoke(ctx, EShopGw_GetDetailsByProductID_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// EShopGwServer is the server API for EShopGw service. +// All implementations must embed UnimplementedEShopGwServer +// for forward compatibility +type EShopGwServer interface { + // get page detail by product id + GetDetailsByProductID(context.Context, *GetDetailsByProductIDRequest) (*GetDetailsByProductIDReply, error) + mustEmbedUnimplementedEShopGwServer() +} + +// UnimplementedEShopGwServer must be embedded to have forward compatible implementations. +type UnimplementedEShopGwServer struct { +} + +func (UnimplementedEShopGwServer) GetDetailsByProductID(context.Context, *GetDetailsByProductIDRequest) (*GetDetailsByProductIDReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetDetailsByProductID not implemented") +} +func (UnimplementedEShopGwServer) mustEmbedUnimplementedEShopGwServer() {} + +// UnsafeEShopGwServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to EShopGwServer will +// result in compilation errors. +type UnsafeEShopGwServer interface { + mustEmbedUnimplementedEShopGwServer() +} + +func RegisterEShopGwServer(s grpc.ServiceRegistrar, srv EShopGwServer) { + s.RegisterService(&EShopGw_ServiceDesc, srv) +} + +func _EShopGw_GetDetailsByProductID_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDetailsByProductIDRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EShopGwServer).GetDetailsByProductID(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: EShopGw_GetDetailsByProductID_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EShopGwServer).GetDetailsByProductID(ctx, req.(*GetDetailsByProductIDRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// EShopGw_ServiceDesc is the grpc.ServiceDesc for EShopGw service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var EShopGw_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.eshop_gw.v1.EShopGw", + HandlerType: (*EShopGwServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetDetailsByProductID", + Handler: _EShopGw_GetDetailsByProductID_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "api/eshop_gw/v1/eshop_gw.proto", +} diff --git a/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw_router.pb.go b/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw_router.pb.go new file mode 100644 index 0000000..f1db500 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/eshop_gw/v1/eshop_gw_router.pb.go @@ -0,0 +1,188 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" + + +) + +type EShopGwLogicer interface { + GetDetailsByProductID(ctx context.Context, req *GetDetailsByProductIDRequest) (*GetDetailsByProductIDReply, error) +} + +type EShopGwOption func(*eShopGwOptions) + +type eShopGwOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *eShopGwOptions) apply(opts ...EShopGwOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithEShopGwHTTPResponse() EShopGwOption { + return func(o *eShopGwOptions) { + o.isFromRPC = false + } +} + +func WithEShopGwRPCResponse() EShopGwOption { + return func(o *eShopGwOptions) { + o.isFromRPC = true + } +} + +func WithEShopGwResponser(responser errcode.Responser) EShopGwOption { + return func(o *eShopGwOptions) { + o.responser = responser + } +} + +func WithEShopGwLogger(zapLog *zap.Logger) EShopGwOption { + return func(o *eShopGwOptions) { + o.zapLog = zapLog + } +} + +func WithEShopGwErrorToHTTPCode(e ...*errcode.Error) EShopGwOption { + return func(o *eShopGwOptions) { + o.httpErrors = e + } +} + +func WithEShopGwRPCStatusToHTTPCode(s ...*errcode.RPCStatus) EShopGwOption { + return func(o *eShopGwOptions) { + o.rpcStatus = s + } +} + +func WithEShopGwWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) EShopGwOption { + return func(o *eShopGwOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterEShopGwRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic EShopGwLogicer, + opts ...EShopGwOption) { + + o := &eShopGwOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog,_ = zap.NewProduction() + } + + r := &eShopGwRouter { + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type eShopGwRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic EShopGwLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *eShopGwRouter) register() { + r.iRouter.Handle("GET", "/api/v1/detail", r.withMiddleware("GET", "/api/v1/detail", r.GetDetailsByProductID_0)...) + +} + +func (r *eShopGwRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + + +func (r *eShopGwRouter) GetDetailsByProductID_0 (c *gin.Context) { + req := &GetDetailsByProductIDRequest{} + var err error + + + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + + + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + + out, err := r.iLogic.GetDetailsByProductID(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + diff --git a/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.pb.go b/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.pb.go new file mode 100644 index 0000000..8ec64c2 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.pb.go @@ -0,0 +1,303 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/inventory/v1/inventory.proto + +package v1 + +import ( + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type GetByIDRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` +} + +func (x *GetByIDRequest) Reset() { + *x = GetByIDRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_inventory_v1_inventory_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetByIDRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetByIDRequest) ProtoMessage() {} + +func (x *GetByIDRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_inventory_v1_inventory_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetByIDRequest.ProtoReflect.Descriptor instead. +func (*GetByIDRequest) Descriptor() ([]byte, []int) { + return file_api_inventory_v1_inventory_proto_rawDescGZIP(), []int{0} +} + +func (x *GetByIDRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type InventoryDetail struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` + Num float32 `protobuf:"fixed32,4,opt,name=num,proto3" json:"num"` + SoldNum int32 `protobuf:"varint,3,opt,name=soldNum,proto3" json:"soldNum"` +} + +func (x *InventoryDetail) Reset() { + *x = InventoryDetail{} + if protoimpl.UnsafeEnabled { + mi := &file_api_inventory_v1_inventory_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InventoryDetail) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InventoryDetail) ProtoMessage() {} + +func (x *InventoryDetail) ProtoReflect() protoreflect.Message { + mi := &file_api_inventory_v1_inventory_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InventoryDetail.ProtoReflect.Descriptor instead. +func (*InventoryDetail) Descriptor() ([]byte, []int) { + return file_api_inventory_v1_inventory_proto_rawDescGZIP(), []int{1} +} + +func (x *InventoryDetail) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *InventoryDetail) GetNum() float32 { + if x != nil { + return x.Num + } + return 0 +} + +func (x *InventoryDetail) GetSoldNum() int32 { + if x != nil { + return x.SoldNum + } + return 0 +} + +type GetByIDReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + InventoryDetail *InventoryDetail `protobuf:"bytes,1,opt,name=inventoryDetail,proto3" json:"inventoryDetail"` +} + +func (x *GetByIDReply) Reset() { + *x = GetByIDReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_inventory_v1_inventory_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetByIDReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetByIDReply) ProtoMessage() {} + +func (x *GetByIDReply) ProtoReflect() protoreflect.Message { + mi := &file_api_inventory_v1_inventory_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetByIDReply.ProtoReflect.Descriptor instead. +func (*GetByIDReply) Descriptor() ([]byte, []int) { + return file_api_inventory_v1_inventory_proto_rawDescGZIP(), []int{2} +} + +func (x *GetByIDReply) GetInventoryDetail() *InventoryDetail { + if x != nil { + return x.InventoryDetail + } + return nil +} + +var File_api_inventory_v1_inventory_proto protoreflect.FileDescriptor + +var file_api_inventory_v1_inventory_proto_rawDesc = []byte{ + 0x0a, 0x20, 0x61, 0x70, 0x69, 0x2f, 0x69, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, 0x79, 0x2f, + 0x76, 0x31, 0x2f, 0x69, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x12, 0x10, 0x61, 0x70, 0x69, 0x2e, 0x69, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, + 0x79, 0x2e, 0x76, 0x31, 0x1a, 0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, + 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x29, 0x0a, + 0x0e, 0x47, 0x65, 0x74, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x17, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x07, 0xfa, 0x42, 0x04, + 0x32, 0x02, 0x28, 0x01, 0x52, 0x02, 0x69, 0x64, 0x22, 0x4d, 0x0a, 0x0f, 0x49, 0x6e, 0x76, 0x65, + 0x6e, 0x74, 0x6f, 0x72, 0x79, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x0e, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x69, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x6e, + 0x75, 0x6d, 0x18, 0x04, 0x20, 0x01, 0x28, 0x02, 0x52, 0x03, 0x6e, 0x75, 0x6d, 0x12, 0x18, 0x0a, + 0x07, 0x73, 0x6f, 0x6c, 0x64, 0x4e, 0x75, 0x6d, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, + 0x73, 0x6f, 0x6c, 0x64, 0x4e, 0x75, 0x6d, 0x22, 0x5b, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x42, 0x79, + 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x4b, 0x0a, 0x0f, 0x69, 0x6e, 0x76, 0x65, 0x6e, + 0x74, 0x6f, 0x72, 0x79, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x69, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, 0x79, + 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, 0x79, 0x44, 0x65, 0x74, + 0x61, 0x69, 0x6c, 0x52, 0x0f, 0x69, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, 0x79, 0x44, 0x65, + 0x74, 0x61, 0x69, 0x6c, 0x32, 0x5a, 0x0a, 0x09, 0x49, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, + 0x79, 0x12, 0x4d, 0x0a, 0x07, 0x47, 0x65, 0x74, 0x42, 0x79, 0x49, 0x44, 0x12, 0x20, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x69, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, 0x79, 0x2e, 0x76, 0x31, 0x2e, + 0x47, 0x65, 0x74, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x69, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, 0x79, 0x2e, 0x76, + 0x31, 0x2e, 0x47, 0x65, 0x74, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, + 0x42, 0x1b, 0x5a, 0x19, 0x65, 0x73, 0x68, 0x6f, 0x70, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x69, 0x6e, + 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, 0x79, 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_inventory_v1_inventory_proto_rawDescOnce sync.Once + file_api_inventory_v1_inventory_proto_rawDescData = file_api_inventory_v1_inventory_proto_rawDesc +) + +func file_api_inventory_v1_inventory_proto_rawDescGZIP() []byte { + file_api_inventory_v1_inventory_proto_rawDescOnce.Do(func() { + file_api_inventory_v1_inventory_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_inventory_v1_inventory_proto_rawDescData) + }) + return file_api_inventory_v1_inventory_proto_rawDescData +} + +var file_api_inventory_v1_inventory_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_api_inventory_v1_inventory_proto_goTypes = []interface{}{ + (*GetByIDRequest)(nil), // 0: api.inventory.v1.GetByIDRequest + (*InventoryDetail)(nil), // 1: api.inventory.v1.InventoryDetail + (*GetByIDReply)(nil), // 2: api.inventory.v1.GetByIDReply +} +var file_api_inventory_v1_inventory_proto_depIdxs = []int32{ + 1, // 0: api.inventory.v1.GetByIDReply.inventoryDetail:type_name -> api.inventory.v1.InventoryDetail + 0, // 1: api.inventory.v1.Inventory.GetByID:input_type -> api.inventory.v1.GetByIDRequest + 2, // 2: api.inventory.v1.Inventory.GetByID:output_type -> api.inventory.v1.GetByIDReply + 2, // [2:3] is the sub-list for method output_type + 1, // [1:2] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_api_inventory_v1_inventory_proto_init() } +func file_api_inventory_v1_inventory_proto_init() { + if File_api_inventory_v1_inventory_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_inventory_v1_inventory_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetByIDRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_inventory_v1_inventory_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InventoryDetail); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_inventory_v1_inventory_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetByIDReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_inventory_v1_inventory_proto_rawDesc, + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_inventory_v1_inventory_proto_goTypes, + DependencyIndexes: file_api_inventory_v1_inventory_proto_depIdxs, + MessageInfos: file_api_inventory_v1_inventory_proto_msgTypes, + }.Build() + File_api_inventory_v1_inventory_proto = out.File + file_api_inventory_v1_inventory_proto_rawDesc = nil + file_api_inventory_v1_inventory_proto_goTypes = nil + file_api_inventory_v1_inventory_proto_depIdxs = nil +} diff --git a/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.pb.validate.go b/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.pb.validate.go new file mode 100644 index 0000000..337f2df --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.pb.validate.go @@ -0,0 +1,381 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/inventory/v1/inventory.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on GetByIDRequest with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *GetByIDRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on GetByIDRequest with the rules defined +// in the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in GetByIDRequestMultiError, +// or nil if none found. +func (m *GetByIDRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *GetByIDRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() < 1 { + err := GetByIDRequestValidationError{ + field: "Id", + reason: "value must be greater than or equal to 1", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return GetByIDRequestMultiError(errors) + } + + return nil +} + +// GetByIDRequestMultiError is an error wrapping multiple validation errors +// returned by GetByIDRequest.ValidateAll() if the designated constraints +// aren't met. +type GetByIDRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m GetByIDRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m GetByIDRequestMultiError) AllErrors() []error { return m } + +// GetByIDRequestValidationError is the validation error returned by +// GetByIDRequest.Validate if the designated constraints aren't met. +type GetByIDRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e GetByIDRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e GetByIDRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e GetByIDRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e GetByIDRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e GetByIDRequestValidationError) ErrorName() string { return "GetByIDRequestValidationError" } + +// Error satisfies the builtin error interface +func (e GetByIDRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sGetByIDRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = GetByIDRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = GetByIDRequestValidationError{} + +// Validate checks the field values on InventoryDetail with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *InventoryDetail) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on InventoryDetail with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// InventoryDetailMultiError, or nil if none found. +func (m *InventoryDetail) ValidateAll() error { + return m.validate(true) +} + +func (m *InventoryDetail) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Id + + // no validation rules for Num + + // no validation rules for SoldNum + + if len(errors) > 0 { + return InventoryDetailMultiError(errors) + } + + return nil +} + +// InventoryDetailMultiError is an error wrapping multiple validation errors +// returned by InventoryDetail.ValidateAll() if the designated constraints +// aren't met. +type InventoryDetailMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m InventoryDetailMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m InventoryDetailMultiError) AllErrors() []error { return m } + +// InventoryDetailValidationError is the validation error returned by +// InventoryDetail.Validate if the designated constraints aren't met. +type InventoryDetailValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e InventoryDetailValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e InventoryDetailValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e InventoryDetailValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e InventoryDetailValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e InventoryDetailValidationError) ErrorName() string { return "InventoryDetailValidationError" } + +// Error satisfies the builtin error interface +func (e InventoryDetailValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sInventoryDetail.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = InventoryDetailValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = InventoryDetailValidationError{} + +// Validate checks the field values on GetByIDReply with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *GetByIDReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on GetByIDReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in GetByIDReplyMultiError, or +// nil if none found. +func (m *GetByIDReply) ValidateAll() error { + return m.validate(true) +} + +func (m *GetByIDReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if all { + switch v := interface{}(m.GetInventoryDetail()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, GetByIDReplyValidationError{ + field: "InventoryDetail", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, GetByIDReplyValidationError{ + field: "InventoryDetail", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetInventoryDetail()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return GetByIDReplyValidationError{ + field: "InventoryDetail", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return GetByIDReplyMultiError(errors) + } + + return nil +} + +// GetByIDReplyMultiError is an error wrapping multiple validation errors +// returned by GetByIDReply.ValidateAll() if the designated constraints aren't met. +type GetByIDReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m GetByIDReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m GetByIDReplyMultiError) AllErrors() []error { return m } + +// GetByIDReplyValidationError is the validation error returned by +// GetByIDReply.Validate if the designated constraints aren't met. +type GetByIDReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e GetByIDReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e GetByIDReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e GetByIDReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e GetByIDReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e GetByIDReplyValidationError) ErrorName() string { return "GetByIDReplyValidationError" } + +// Error satisfies the builtin error interface +func (e GetByIDReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sGetByIDReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = GetByIDReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = GetByIDReplyValidationError{} diff --git a/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.proto b/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.proto new file mode 100644 index 0000000..973edbd --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; + +package api.inventory.v1; + +import "validate/validate.proto"; + +option go_package = "eshop/api/inventory/v1;v1"; + +service Inventory { + // get inventory by id + rpc GetByID(GetByIDRequest) returns (GetByIDReply) {} +} + +message GetByIDRequest { + uint64 id = 1 [(validate.rules).uint64.gte = 1]; +} + +message InventoryDetail { + uint64 id = 1; + float num = 4; + int32 soldNum =3; +} + +message GetByIDReply { + InventoryDetail inventoryDetail = 1; +} diff --git a/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory_grpc.pb.go b/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory_grpc.pb.go new file mode 100644 index 0000000..af02be9 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/inventory/v1/inventory_grpc.pb.go @@ -0,0 +1,111 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.2 +// source: api/inventory/v1/inventory.proto + +package v1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Inventory_GetByID_FullMethodName = "/api.inventory.v1.Inventory/GetByID" +) + +// InventoryClient is the client API for Inventory service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type InventoryClient interface { + // get inventory by id + GetByID(ctx context.Context, in *GetByIDRequest, opts ...grpc.CallOption) (*GetByIDReply, error) +} + +type inventoryClient struct { + cc grpc.ClientConnInterface +} + +func NewInventoryClient(cc grpc.ClientConnInterface) InventoryClient { + return &inventoryClient{cc} +} + +func (c *inventoryClient) GetByID(ctx context.Context, in *GetByIDRequest, opts ...grpc.CallOption) (*GetByIDReply, error) { + out := new(GetByIDReply) + err := c.cc.Invoke(ctx, Inventory_GetByID_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// InventoryServer is the server API for Inventory service. +// All implementations must embed UnimplementedInventoryServer +// for forward compatibility +type InventoryServer interface { + // get inventory by id + GetByID(context.Context, *GetByIDRequest) (*GetByIDReply, error) + mustEmbedUnimplementedInventoryServer() +} + +// UnimplementedInventoryServer must be embedded to have forward compatible implementations. +type UnimplementedInventoryServer struct { +} + +func (UnimplementedInventoryServer) GetByID(context.Context, *GetByIDRequest) (*GetByIDReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetByID not implemented") +} +func (UnimplementedInventoryServer) mustEmbedUnimplementedInventoryServer() {} + +// UnsafeInventoryServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to InventoryServer will +// result in compilation errors. +type UnsafeInventoryServer interface { + mustEmbedUnimplementedInventoryServer() +} + +func RegisterInventoryServer(s grpc.ServiceRegistrar, srv InventoryServer) { + s.RegisterService(&Inventory_ServiceDesc, srv) +} + +func _Inventory_GetByID_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetByIDRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InventoryServer).GetByID(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Inventory_GetByID_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InventoryServer).GetByID(ctx, req.(*GetByIDRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Inventory_ServiceDesc is the grpc.ServiceDesc for Inventory service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Inventory_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.inventory.v1.Inventory", + HandlerType: (*InventoryServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetByID", + Handler: _Inventory_GetByID_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "api/inventory/v1/inventory.proto", +} diff --git a/6_micro-cluster/example-2-mono-repo/api/product/v1/product.pb.go b/6_micro-cluster/example-2-mono-repo/api/product/v1/product.pb.go new file mode 100644 index 0000000..b375fa9 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/product/v1/product.pb.go @@ -0,0 +1,321 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/product/v1/product.proto + +package v1 + +import ( + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type GetByIDRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` +} + +func (x *GetByIDRequest) Reset() { + *x = GetByIDRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_product_v1_product_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetByIDRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetByIDRequest) ProtoMessage() {} + +func (x *GetByIDRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_product_v1_product_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetByIDRequest.ProtoReflect.Descriptor instead. +func (*GetByIDRequest) Descriptor() ([]byte, []int) { + return file_api_product_v1_product_proto_rawDescGZIP(), []int{0} +} + +func (x *GetByIDRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type ProductDetail struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name"` + Price float32 `protobuf:"fixed32,3,opt,name=price,proto3" json:"price"` + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description"` +} + +func (x *ProductDetail) Reset() { + *x = ProductDetail{} + if protoimpl.UnsafeEnabled { + mi := &file_api_product_v1_product_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ProductDetail) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ProductDetail) ProtoMessage() {} + +func (x *ProductDetail) ProtoReflect() protoreflect.Message { + mi := &file_api_product_v1_product_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ProductDetail.ProtoReflect.Descriptor instead. +func (*ProductDetail) Descriptor() ([]byte, []int) { + return file_api_product_v1_product_proto_rawDescGZIP(), []int{1} +} + +func (x *ProductDetail) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *ProductDetail) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ProductDetail) GetPrice() float32 { + if x != nil { + return x.Price + } + return 0 +} + +func (x *ProductDetail) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +type GetByIDReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ProductDetail *ProductDetail `protobuf:"bytes,1,opt,name=productDetail,proto3" json:"productDetail"` + InventoryID uint64 `protobuf:"varint,2,opt,name=inventoryID,proto3" json:"inventoryID"` +} + +func (x *GetByIDReply) Reset() { + *x = GetByIDReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_product_v1_product_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetByIDReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetByIDReply) ProtoMessage() {} + +func (x *GetByIDReply) ProtoReflect() protoreflect.Message { + mi := &file_api_product_v1_product_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetByIDReply.ProtoReflect.Descriptor instead. +func (*GetByIDReply) Descriptor() ([]byte, []int) { + return file_api_product_v1_product_proto_rawDescGZIP(), []int{2} +} + +func (x *GetByIDReply) GetProductDetail() *ProductDetail { + if x != nil { + return x.ProductDetail + } + return nil +} + +func (x *GetByIDReply) GetInventoryID() uint64 { + if x != nil { + return x.InventoryID + } + return 0 +} + +var File_api_product_v1_product_proto protoreflect.FileDescriptor + +var file_api_product_v1_product_proto_rawDesc = []byte{ + 0x0a, 0x1c, 0x61, 0x70, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x2f, 0x76, 0x31, + 0x2f, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0e, + 0x61, 0x70, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x2e, 0x76, 0x31, 0x1a, 0x17, + 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x29, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x42, 0x79, + 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x02, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x28, 0x01, 0x52, 0x02, + 0x69, 0x64, 0x22, 0x6b, 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x44, 0x65, 0x74, + 0x61, 0x69, 0x6c, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, + 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x72, 0x69, 0x63, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x05, 0x70, 0x72, 0x69, 0x63, 0x65, 0x12, 0x20, 0x0a, + 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, + 0x75, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, + 0x43, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x72, 0x6f, + 0x64, 0x75, 0x63, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x44, + 0x65, 0x74, 0x61, 0x69, 0x6c, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x44, 0x65, + 0x74, 0x61, 0x69, 0x6c, 0x12, 0x20, 0x0a, 0x0b, 0x69, 0x6e, 0x76, 0x65, 0x6e, 0x74, 0x6f, 0x72, + 0x79, 0x49, 0x44, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x69, 0x6e, 0x76, 0x65, 0x6e, + 0x74, 0x6f, 0x72, 0x79, 0x49, 0x44, 0x32, 0x54, 0x0a, 0x07, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, + 0x74, 0x12, 0x49, 0x0a, 0x07, 0x47, 0x65, 0x74, 0x42, 0x79, 0x49, 0x44, 0x12, 0x1e, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, + 0x74, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, + 0x74, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, 0x19, 0x5a, 0x17, + 0x65, 0x73, 0x68, 0x6f, 0x70, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, + 0x74, 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_product_v1_product_proto_rawDescOnce sync.Once + file_api_product_v1_product_proto_rawDescData = file_api_product_v1_product_proto_rawDesc +) + +func file_api_product_v1_product_proto_rawDescGZIP() []byte { + file_api_product_v1_product_proto_rawDescOnce.Do(func() { + file_api_product_v1_product_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_product_v1_product_proto_rawDescData) + }) + return file_api_product_v1_product_proto_rawDescData +} + +var file_api_product_v1_product_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_api_product_v1_product_proto_goTypes = []interface{}{ + (*GetByIDRequest)(nil), // 0: api.product.v1.GetByIDRequest + (*ProductDetail)(nil), // 1: api.product.v1.ProductDetail + (*GetByIDReply)(nil), // 2: api.product.v1.GetByIDReply +} +var file_api_product_v1_product_proto_depIdxs = []int32{ + 1, // 0: api.product.v1.GetByIDReply.productDetail:type_name -> api.product.v1.ProductDetail + 0, // 1: api.product.v1.Product.GetByID:input_type -> api.product.v1.GetByIDRequest + 2, // 2: api.product.v1.Product.GetByID:output_type -> api.product.v1.GetByIDReply + 2, // [2:3] is the sub-list for method output_type + 1, // [1:2] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_api_product_v1_product_proto_init() } +func file_api_product_v1_product_proto_init() { + if File_api_product_v1_product_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_product_v1_product_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetByIDRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_product_v1_product_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ProductDetail); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_product_v1_product_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetByIDReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_product_v1_product_proto_rawDesc, + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_product_v1_product_proto_goTypes, + DependencyIndexes: file_api_product_v1_product_proto_depIdxs, + MessageInfos: file_api_product_v1_product_proto_msgTypes, + }.Build() + File_api_product_v1_product_proto = out.File + file_api_product_v1_product_proto_rawDesc = nil + file_api_product_v1_product_proto_goTypes = nil + file_api_product_v1_product_proto_depIdxs = nil +} diff --git a/6_micro-cluster/example-2-mono-repo/api/product/v1/product.pb.validate.go b/6_micro-cluster/example-2-mono-repo/api/product/v1/product.pb.validate.go new file mode 100644 index 0000000..456b6e3 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/product/v1/product.pb.validate.go @@ -0,0 +1,385 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/product/v1/product.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on GetByIDRequest with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *GetByIDRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on GetByIDRequest with the rules defined +// in the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in GetByIDRequestMultiError, +// or nil if none found. +func (m *GetByIDRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *GetByIDRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() < 1 { + err := GetByIDRequestValidationError{ + field: "Id", + reason: "value must be greater than or equal to 1", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return GetByIDRequestMultiError(errors) + } + + return nil +} + +// GetByIDRequestMultiError is an error wrapping multiple validation errors +// returned by GetByIDRequest.ValidateAll() if the designated constraints +// aren't met. +type GetByIDRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m GetByIDRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m GetByIDRequestMultiError) AllErrors() []error { return m } + +// GetByIDRequestValidationError is the validation error returned by +// GetByIDRequest.Validate if the designated constraints aren't met. +type GetByIDRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e GetByIDRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e GetByIDRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e GetByIDRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e GetByIDRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e GetByIDRequestValidationError) ErrorName() string { return "GetByIDRequestValidationError" } + +// Error satisfies the builtin error interface +func (e GetByIDRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sGetByIDRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = GetByIDRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = GetByIDRequestValidationError{} + +// Validate checks the field values on ProductDetail with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ProductDetail) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ProductDetail with the rules defined +// in the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in ProductDetailMultiError, or +// nil if none found. +func (m *ProductDetail) ValidateAll() error { + return m.validate(true) +} + +func (m *ProductDetail) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Id + + // no validation rules for Name + + // no validation rules for Price + + // no validation rules for Description + + if len(errors) > 0 { + return ProductDetailMultiError(errors) + } + + return nil +} + +// ProductDetailMultiError is an error wrapping multiple validation errors +// returned by ProductDetail.ValidateAll() if the designated constraints +// aren't met. +type ProductDetailMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ProductDetailMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ProductDetailMultiError) AllErrors() []error { return m } + +// ProductDetailValidationError is the validation error returned by +// ProductDetail.Validate if the designated constraints aren't met. +type ProductDetailValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ProductDetailValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ProductDetailValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ProductDetailValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ProductDetailValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ProductDetailValidationError) ErrorName() string { return "ProductDetailValidationError" } + +// Error satisfies the builtin error interface +func (e ProductDetailValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sProductDetail.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ProductDetailValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ProductDetailValidationError{} + +// Validate checks the field values on GetByIDReply with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *GetByIDReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on GetByIDReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in GetByIDReplyMultiError, or +// nil if none found. +func (m *GetByIDReply) ValidateAll() error { + return m.validate(true) +} + +func (m *GetByIDReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if all { + switch v := interface{}(m.GetProductDetail()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, GetByIDReplyValidationError{ + field: "ProductDetail", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, GetByIDReplyValidationError{ + field: "ProductDetail", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetProductDetail()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return GetByIDReplyValidationError{ + field: "ProductDetail", + reason: "embedded message failed validation", + cause: err, + } + } + } + + // no validation rules for InventoryID + + if len(errors) > 0 { + return GetByIDReplyMultiError(errors) + } + + return nil +} + +// GetByIDReplyMultiError is an error wrapping multiple validation errors +// returned by GetByIDReply.ValidateAll() if the designated constraints aren't met. +type GetByIDReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m GetByIDReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m GetByIDReplyMultiError) AllErrors() []error { return m } + +// GetByIDReplyValidationError is the validation error returned by +// GetByIDReply.Validate if the designated constraints aren't met. +type GetByIDReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e GetByIDReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e GetByIDReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e GetByIDReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e GetByIDReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e GetByIDReplyValidationError) ErrorName() string { return "GetByIDReplyValidationError" } + +// Error satisfies the builtin error interface +func (e GetByIDReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sGetByIDReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = GetByIDReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = GetByIDReplyValidationError{} diff --git a/6_micro-cluster/example-2-mono-repo/api/product/v1/product.proto b/6_micro-cluster/example-2-mono-repo/api/product/v1/product.proto new file mode 100644 index 0000000..826fceb --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/product/v1/product.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; + +package api.product.v1; + +import "validate/validate.proto"; + +option go_package = "eshop/api/product/v1;v1"; + +service Product { + // get product by id + rpc GetByID(GetByIDRequest) returns (GetByIDReply) {} +} + +message GetByIDRequest { + uint64 id = 1 [(validate.rules).uint64.gte = 1]; +} + +message ProductDetail { + uint64 id = 1; + string name = 2; + float price = 3; + string description = 4; +} + +message GetByIDReply { + ProductDetail productDetail = 1; + uint64 inventoryID = 2; +} diff --git a/6_micro-cluster/example-2-mono-repo/api/product/v1/product_grpc.pb.go b/6_micro-cluster/example-2-mono-repo/api/product/v1/product_grpc.pb.go new file mode 100644 index 0000000..13e797e --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/api/product/v1/product_grpc.pb.go @@ -0,0 +1,111 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.2 +// source: api/product/v1/product.proto + +package v1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Product_GetByID_FullMethodName = "/api.product.v1.Product/GetByID" +) + +// ProductClient is the client API for Product service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ProductClient interface { + // get product by id + GetByID(ctx context.Context, in *GetByIDRequest, opts ...grpc.CallOption) (*GetByIDReply, error) +} + +type productClient struct { + cc grpc.ClientConnInterface +} + +func NewProductClient(cc grpc.ClientConnInterface) ProductClient { + return &productClient{cc} +} + +func (c *productClient) GetByID(ctx context.Context, in *GetByIDRequest, opts ...grpc.CallOption) (*GetByIDReply, error) { + out := new(GetByIDReply) + err := c.cc.Invoke(ctx, Product_GetByID_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ProductServer is the server API for Product service. +// All implementations must embed UnimplementedProductServer +// for forward compatibility +type ProductServer interface { + // get product by id + GetByID(context.Context, *GetByIDRequest) (*GetByIDReply, error) + mustEmbedUnimplementedProductServer() +} + +// UnimplementedProductServer must be embedded to have forward compatible implementations. +type UnimplementedProductServer struct { +} + +func (UnimplementedProductServer) GetByID(context.Context, *GetByIDRequest) (*GetByIDReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetByID not implemented") +} +func (UnimplementedProductServer) mustEmbedUnimplementedProductServer() {} + +// UnsafeProductServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ProductServer will +// result in compilation errors. +type UnsafeProductServer interface { + mustEmbedUnimplementedProductServer() +} + +func RegisterProductServer(s grpc.ServiceRegistrar, srv ProductServer) { + s.RegisterService(&Product_ServiceDesc, srv) +} + +func _Product_GetByID_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetByIDRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductServer).GetByID(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Product_GetByID_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductServer).GetByID(ctx, req.(*GetByIDRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Product_ServiceDesc is the grpc.ServiceDesc for Product service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Product_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.product.v1.Product", + HandlerType: (*ProductServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetByID", + Handler: _Product_GetByID_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "api/product/v1/product.proto", +} diff --git a/b_sponge-dtm-msg/.gitignore b/6_micro-cluster/example-2-mono-repo/comment/.gitignore similarity index 93% rename from b_sponge-dtm-msg/.gitignore rename to 6_micro-cluster/example-2-mono-repo/comment/.gitignore index 7d1533d..d3b12ee 100644 --- a/b_sponge-dtm-msg/.gitignore +++ b/6_micro-cluster/example-2-mono-repo/comment/.gitignore @@ -22,5 +22,5 @@ dist/ *.ipr *.iws -cmd/transfer/transfer +cmd/comment/comment diff --git a/a_micro-grpc-http-protobuf/.golangci.yml b/6_micro-cluster/example-2-mono-repo/comment/.golangci.yml similarity index 99% rename from a_micro-grpc-http-protobuf/.golangci.yml rename to 6_micro-cluster/example-2-mono-repo/comment/.golangci.yml index 543ad88..d17ff22 100644 --- a/a_micro-grpc-http-protobuf/.golangci.yml +++ b/6_micro-cluster/example-2-mono-repo/comment/.golangci.yml @@ -1,4 +1,4 @@ -# This file configures user. +# This file configures eshop. run: # timeout for analysis, e.g. 30s, 5m, default is 1m @@ -42,7 +42,7 @@ linters: - goimports - gofmt - unused - - depguard + #- depguard - dogsled - errcheck #- gochecknoinits @@ -153,9 +153,9 @@ linters-settings: goconst: # minimal length of string constant, 3 by default - min-len: 3 + min-len: 4 # minimal occurrences count to trigger, 3 by default - min-occurrences: 3 + min-occurrences: 4 gocyclo: # minimal code complexity to report, 30 by default (but we recommend 10-20) @@ -176,7 +176,7 @@ linters-settings: goimports: # put imports beginning with prefix after 3rd-party packages; # it's a comma-separated list of prefixes - local-prefixes: user + local-prefixes: eshop gomnd: settings: diff --git a/a_micro-grpc-http-protobuf/Jenkinsfile b/6_micro-cluster/example-2-mono-repo/comment/Jenkinsfile similarity index 100% rename from a_micro-grpc-http-protobuf/Jenkinsfile rename to 6_micro-cluster/example-2-mono-repo/comment/Jenkinsfile diff --git a/6_micro-cluster/example-2-mono-repo/comment/Makefile b/6_micro-cluster/example-2-mono-repo/comment/Makefile new file mode 100644 index 0000000..9bbd4f9 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/Makefile @@ -0,0 +1,183 @@ +SHELL := /bin/bash + +PROJECT_NAME := "eshop" +PKG := "$(PROJECT_NAME)" +PKG_LIST := $(shell go list ${PKG}/... | grep -v /vendor/ | grep -v /api/) + + + + +.PHONY: ci-lint +# Check code formatting, naming conventions, security, maintainability, etc. the rules in the .golangci.yml file +ci-lint: + @gofmt -s -w . + golangci-lint run ./... + + +.PHONY: test +# Test *_test.go files, the parameter -count=1 means that caching is disabled +test: + go test -count=1 -short ${PKG_LIST} + + +.PHONY: cover +# Generate test coverage +cover: + go test -short -coverprofile=cover.out -covermode=atomic ${PKG_LIST} + go tool cover -html=cover.out + + +.PHONY: graph +# Generate interactive visual function dependency graphs +graph: + @echo "generating graph ......" + @cp -f cmd/comment/main.go . + go-callvis -skipbrowser -format=svg -nostd -file=comment eshop + @rm -f main.go comment.gv + + + +.PHONY: proto +# Generate *.go and template code by proto files, the default is all the proto files in the api directory. you can specify the proto file, multiple files are separated by commas, e.g. make proto FILES=api/user/v1/user.proto +proto: + @bash scripts/protoc.sh $(FILES) + go mod tidy + @gofmt -s -w . + + +.PHONY: proto-doc +# Generate doc from *.proto files +proto-doc: + @bash scripts/proto-doc.sh + + +.PHONY: build +# Build comment for linux amd64 binary +build: + @echo "building 'comment', linux binary file will output to 'cmd/comment'" + @cd cmd/comment && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build + + + +.PHONY: run +# Build and run service +run: + @bash scripts/run.sh + + +.PHONY: run-nohup +# Run service with nohup in local, if you want to stop the server, pass the parameter stop, e.g. make run-nohup CMD=stop +run-nohup: + @bash scripts/run-nohup.sh $(CMD) + + +.PHONY: run-docker +# Run service in local docker, if you want to update the service, run the make run-docker command again +run-docker: image-build-local + @bash scripts/deploy-docker.sh + + +.PHONY: binary-package +# Packaged binary files +binary-package: build + @bash scripts/binary-package.sh + + +.PHONY: deploy-binary +# Deploy binary to remote linux server, e.g. make deploy-binary USER=root PWD=123456 IP=192.168.1.10 +deploy-binary: binary-package + @expect scripts/deploy-binary.sh $(USER) $(PWD) $(IP) + + +.PHONY: image-build-local +# Build image for local docker, tag=latest, use binary files to build +image-build-local: build + @bash scripts/image-build-local.sh + + +.PHONY: image-build +# Build image for remote repositories, use binary files to build, e.g. make image-build REPO_HOST=addr TAG=latest +image-build: + @bash scripts/image-build.sh $(REPO_HOST) $(TAG) + + +.PHONY: image-build2 +# Build image for remote repositories, phase II build, e.g. make image-build2 REPO_HOST=addr TAG=latest +image-build2: + @bash scripts/image-build2.sh $(REPO_HOST) $(TAG) + + +.PHONY: image-push +# Push docker image to remote repositories, e.g. make image-push REPO_HOST=addr TAG=latest +image-push: + @bash scripts/image-push.sh $(REPO_HOST) $(TAG) + + +.PHONY: deploy-k8s +# Deploy service to k8s +deploy-k8s: + @bash scripts/deploy-k8s.sh + + +.PHONY: image-build-rpc-test +# Build grpc test image for remote repositories, e.g. make image-build-rpc-test REPO_HOST=addr TAG=latest +image-build-rpc-test: + @bash scripts/image-rpc-test.sh $(REPO_HOST) $(TAG) + + +.PHONY: patch +# Patch some dependent code, e.g. make patch TYPE=types-pb , make patch TYPE=init-, your_db_driver is mysql, mongodb, postgresql, tidb, sqlite, for example: make patch TYPE=init-mysql +patch: + @bash scripts/patch.sh $(TYPE) + + +.PHONY: copy-proto +# Copy proto file from the grpc server directory, multiple directories or proto files separated by commas. default is to copy all proto files, e.g. make copy-proto SERVER=yourServerDir, copy specified proto files, e.g. make copy-proto SERVER=yourServerDir PROTO_FILE=yourProtoFile1,yourProtoFile2 +copy-proto: + @sponge patch copy-proto --server-dir=$(SERVER) --proto-file=$(PROTO_FILE) + + +.PHONY: modify-proto-pkg-name +# Modify the 'package' and 'go_package' names of all proto files in the 'api' directory +modify-proto-pkg-name: + @sponge patch modify-proto-package --dir=api --server-dir=. + + +.PHONY: update-config +# Update internal/config code base on yaml file +update-config: + @sponge config --server-dir=. + + +.PHONY: clean +# Clean binary file, cover.out, template file +clean: + @rm -vrf cmd/comment/comment* + @rm -vrf cover.out + @rm -vrf main.go comment.gv + @rm -vrf internal/ecode/*.go.gen* + @rm -vrf internal/routers/*.go.gen* + @rm -vrf internal/handler/*.go.gen* + @rm -vrf internal/service/*.go.gen* + @rm -rf comment-binary.tar.gz + @echo "clean finished" + + +# Show help +help: + @echo '' + @echo 'Usage:' + @echo ' make ' + @echo '' + @echo 'Targets:' + @awk '/^[a-zA-Z\-_0-9]+:/ { \ + helpMessage = match(lastLine, /^# (.*)/); \ + if (helpMessage) { \ + helpCommand = substr($$1, 0, index($$1, ":")-1); \ + helpMessage = substr(lastLine, RSTART + 2, RLENGTH); \ + printf "\033[1;36m %-22s\033[0m %s\n", helpCommand,helpMessage; \ + } \ + } \ + { lastLine = $$0 }' $(MAKEFILE_LIST) + +.DEFAULT_GOAL := all diff --git a/6_micro-cluster/example-2-mono-repo/comment/README.md b/6_micro-cluster/example-2-mono-repo/comment/README.md new file mode 100644 index 0000000..5a24e34 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/README.md @@ -0,0 +1,9 @@ +## comment + +| Feature | Value | +| :----------------: | :-----------: | +| Server name | `comment` | +| Server type | `grpc-pb` | +| Go module name | `eshop` | +| Repository type | `mono-repo` | + diff --git a/b_sponge-dtm-msg/cmd/transfer/initial/close.go b/6_micro-cluster/example-2-mono-repo/comment/cmd/comment/initial/close.go similarity index 87% rename from b_sponge-dtm-msg/cmd/transfer/initial/close.go rename to 6_micro-cluster/example-2-mono-repo/comment/cmd/comment/initial/close.go index 1e3772f..b78b338 100644 --- a/b_sponge-dtm-msg/cmd/transfer/initial/close.go +++ b/6_micro-cluster/example-2-mono-repo/comment/cmd/comment/initial/close.go @@ -7,8 +7,8 @@ import ( "github.com/zhufuyi/sponge/pkg/app" "github.com/zhufuyi/sponge/pkg/tracer" - "transfer/internal/config" - //"transfer/internal/model" + "eshop/comment/internal/config" + //"eshop/comment/internal/model" ) // Close releasing resources after service exit @@ -20,9 +20,9 @@ func Close(servers []app.IServer) []app.Close { closes = append(closes, s.Stop) } - // close mysql + // close database //closes = append(closes, func() error { - // return model.CloseMysql() + // return model.CloseDB() //}) // close redis diff --git a/b_sponge-dtm-msg/cmd/transfer/initial/createService.go b/6_micro-cluster/example-2-mono-repo/comment/cmd/comment/initial/createService.go similarity index 83% rename from b_sponge-dtm-msg/cmd/transfer/initial/createService.go rename to 6_micro-cluster/example-2-mono-repo/comment/cmd/comment/initial/createService.go index e1b42aa..91fe609 100644 --- a/b_sponge-dtm-msg/cmd/transfer/initial/createService.go +++ b/6_micro-cluster/example-2-mono-repo/comment/cmd/comment/initial/createService.go @@ -3,7 +3,6 @@ package initial import ( "fmt" "strconv" - "time" "github.com/zhufuyi/sponge/pkg/app" "github.com/zhufuyi/sponge/pkg/logger" @@ -12,8 +11,8 @@ import ( "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" - "transfer/internal/config" - "transfer/internal/server" + "eshop/comment/internal/config" + "eshop/comment/internal/server" ) // CreateServices create grpc or http service @@ -23,10 +22,8 @@ func CreateServices() []app.IServer { // creating grpc service grpcAddr := ":" + strconv.Itoa(cfg.Grpc.Port) - grpcRegistry, grpcInstance := registryService("grpc", cfg.App.Host, cfg.Grpc.Port) + grpcRegistry, grpcInstance := registerService("grpc", cfg.App.Host, cfg.Grpc.Port) grpcServer := server.NewGRPCServer(grpcAddr, - server.WithGrpcReadTimeout(time.Duration(cfg.Grpc.ReadTimeout)*time.Second), - server.WithGrpcWriteTimeout(time.Duration(cfg.Grpc.WriteTimeout)*time.Second), server.WithGrpcRegistry(grpcRegistry, grpcInstance), ) servers = append(servers, grpcServer) @@ -34,7 +31,7 @@ func CreateServices() []app.IServer { return servers } -func registryService(scheme string, host string, port int) (registry.Registry, *registry.ServiceInstance) { +func registerService(scheme string, host string, port int) (registry.Registry, *registry.ServiceInstance) { var ( instanceEndpoint = fmt.Sprintf("%s://%s:%d", scheme, host, port) cfg = config.Get() @@ -43,7 +40,7 @@ func registryService(scheme string, host string, port int) (registry.Registry, * instance *registry.ServiceInstance err error - id = cfg.App.Name + "_" + scheme + "_" + host + "_" + strconv.Itoa(port) + id = cfg.App.Name + "_" + scheme + "_" + host logField logger.Field ) diff --git a/6_micro-cluster/example-2-mono-repo/comment/cmd/comment/initial/initApp.go b/6_micro-cluster/example-2-mono-repo/comment/cmd/comment/initial/initApp.go new file mode 100644 index 0000000..4a19381 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/cmd/comment/initial/initApp.go @@ -0,0 +1,132 @@ +// Package initial is the package that starts the service to initialize the service, including +// the initialization configuration, service configuration, connecting to the database, and +// resource release needed when shutting down the service. +package initial + +import ( + "flag" + "fmt" + "strconv" + + "github.com/jinzhu/copier" + + "github.com/zhufuyi/sponge/pkg/conf" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/nacoscli" + "github.com/zhufuyi/sponge/pkg/stat" + "github.com/zhufuyi/sponge/pkg/tracer" + + "eshop/comment/configs" + "eshop/comment/internal/config" + //"eshop/comment/internal/model" +) + +var ( + version string + configFile string + enableConfigCenter bool +) + +// InitApp initial app configuration +func InitApp() { + initConfig() + cfg := config.Get() + + // initializing log + _, err := logger.Init( + logger.WithLevel(cfg.Logger.Level), + logger.WithFormat(cfg.Logger.Format), + logger.WithSave( + cfg.Logger.IsSave, + //logger.WithFileName(cfg.Logger.LogFileConfig.Filename), + //logger.WithFileMaxSize(cfg.Logger.LogFileConfig.MaxSize), + //logger.WithFileMaxBackups(cfg.Logger.LogFileConfig.MaxBackups), + //logger.WithFileMaxAge(cfg.Logger.LogFileConfig.MaxAge), + //logger.WithFileIsCompression(cfg.Logger.LogFileConfig.IsCompression), + ), + ) + if err != nil { + panic(err) + } + logger.Debug(config.Show()) + logger.Info("[logger] was initialized") + + // initializing tracing + if cfg.App.EnableTrace { + tracer.InitWithConfig( + cfg.App.Name, + cfg.App.Env, + cfg.App.Version, + cfg.Jaeger.AgentHost, + strconv.Itoa(cfg.Jaeger.AgentPort), + cfg.App.TracingSamplingRate, + ) + logger.Info("[tracer] was initialized") + } + + // initializing the print system and process resources + if cfg.App.EnableStat { + stat.Init( + stat.WithLog(logger.Get()), + stat.WithAlarm(), // invalid if it is windows, the default threshold for cpu and memory is 0.8, you can modify them + stat.WithPrintField(logger.String("service_name", cfg.App.Name), logger.String("host", cfg.App.Host)), + ) + logger.Info("[resource statistics] was initialized") + } + + // initializing database + //model.InitDB() + //logger.Infof("[%s] was initialized", cfg.Database.Driver) + //model.InitCache(cfg.App.CacheType) + //if cfg.App.CacheType != "" { + // logger.Infof("[%s] was initialized", cfg.App.CacheType) + //} +} + +func initConfig() { + flag.StringVar(&version, "version", "", "service Version Number") + flag.BoolVar(&enableConfigCenter, "enable-cc", false, "whether to get from the configuration center, "+ + "if true, the '-c' parameter indicates the configuration center") + flag.StringVar(&configFile, "c", "", "configuration file") + flag.Parse() + + if enableConfigCenter { + // get the configuration from the configuration center (first get the nacos configuration, + // then read the service configuration according to the nacos configuration center) + if configFile == "" { + configFile = configs.Path("comment_cc.yml") + } + nacosConfig, err := config.NewCenter(configFile) + if err != nil { + panic(err) + } + appConfig := &config.Config{} + params := &nacoscli.Params{} + _ = copier.Copy(params, &nacosConfig.Nacos) + format, data, err := nacoscli.GetConfig(params) + if err != nil { + panic(fmt.Sprintf("connect to configuration center err, %v", err)) + } + err = conf.ParseConfigData(data, format, appConfig) + if err != nil { + panic(fmt.Sprintf("parse configuration data err, %v", err)) + } + if appConfig.App.Name == "" { + panic("read the config from center error, config data is empty") + } + config.Set(appConfig) + } else { + // get configuration from local configuration file + if configFile == "" { + configFile = configs.Path("comment.yml") + } + err := config.Init(configFile) + if err != nil { + panic("init config error: " + err.Error()) + } + } + + if version != "" { + config.Get().App.Version = version + } +} diff --git a/b_sponge-dtm-msg/cmd/transfer/main.go b/6_micro-cluster/example-2-mono-repo/comment/cmd/comment/main.go similarity index 53% rename from b_sponge-dtm-msg/cmd/transfer/main.go rename to 6_micro-cluster/example-2-mono-repo/comment/cmd/comment/main.go index ea8ed3c..df6a9aa 100644 --- a/b_sponge-dtm-msg/cmd/transfer/main.go +++ b/6_micro-cluster/example-2-mono-repo/comment/cmd/comment/main.go @@ -4,14 +4,14 @@ package main import ( "github.com/zhufuyi/sponge/pkg/app" - "transfer/cmd/transfer/initial" + "eshop/comment/cmd/comment/initial" ) func main() { initial.InitApp() - servers := initial.CreateServices() - closes := initial.Close(servers) + services := initial.CreateServices() + closes := initial.Close(services) - a := app.New(servers, closes) + a := app.New(services, closes) a.Run() } diff --git a/a_micro-grpc-http-protobuf/configs/user.yml b/6_micro-cluster/example-2-mono-repo/comment/configs/comment.yml similarity index 92% rename from a_micro-grpc-http-protobuf/configs/user.yml rename to 6_micro-cluster/example-2-mono-repo/comment/configs/comment.yml index 8e55cfe..8c32a22 100644 --- a/a_micro-grpc-http-protobuf/configs/user.yml +++ b/6_micro-cluster/example-2-mono-repo/comment/configs/comment.yml @@ -2,7 +2,7 @@ # app settings app: - name: "user" # server name + name: "comment" # server name env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment version: "v0.0.0" host: "127.0.0.1" # domain or ip, for service registration @@ -17,16 +17,10 @@ app: cacheType: "" # cache type, if empty, the cache is not used, support for "memory" and "redis", if set to redis, must set redis configuration -# http server settings -http: - port: 8080 # listen port - timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, if enableHTTPProfile is true, it needs to set 0 or greater than 60s - - # grpc server settings grpc: - port: 8282 # listen port - httpPort: 8283 # profile and metrics ports + port: 18282 # listen port + httpPort: 18283 # profile and metrics ports enableToken: false # whether to enable server-side token authentication, default appID=grpc, appKey=123456 # serverSecure parameter setting # if type="", it means no secure connection, no need to fill in any parameters @@ -77,7 +71,7 @@ logger: #isCompression: true # Whether to compress/archive old files (default is false) -# set database configuration. Reference: https://github.com/zhufuyi/sponge/blob/main/configs/serverNameExample.yml#L87 +# set database configuration. reference-db-config-url database: driver: "mysql" # database driver # mysql settings diff --git a/6_micro-cluster/example-2-mono-repo/comment/configs/comment_cc.yml b/6_micro-cluster/example-2-mono-repo/comment/configs/comment_cc.yml new file mode 100644 index 0000000..bb4bc98 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/configs/comment_cc.yml @@ -0,0 +1,13 @@ +# Generate the go struct command: sponge config --server-dir=./serverDir +# App config from nacos + +# nacos settings +nacos: + ipAddr: "192.168.3.37" # server address + port: 8848 # listening port + scheme: "http" # http or grpc + contextPath: "/nacos" # path + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id + group: "dev" # group name: dev, prod, test + dataID: "comment.yml" # config file id + format: "yaml" # configuration file type: json,yaml,toml diff --git a/a_micro-grpc-http-protobuf/configs/location.go b/6_micro-cluster/example-2-mono-repo/comment/configs/location.go similarity index 100% rename from a_micro-grpc-http-protobuf/configs/location.go rename to 6_micro-cluster/example-2-mono-repo/comment/configs/location.go diff --git a/6_micro-cluster/example-2-mono-repo/comment/deployments/binary/README.md b/6_micro-cluster/example-2-mono-repo/comment/deployments/binary/README.md new file mode 100644 index 0000000..23298f8 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/deployments/binary/README.md @@ -0,0 +1,26 @@ + +copy the configuration file to the configs directory and binary file before starting the service. + +``` +├── configs +│ └── comment.yml +├── comment +├── deploy.sh +└── run.sh +``` + +### Running and stopping service manually + +Running service: + +> ./run.sh + +Stopping the service: + +> ./run.sh stop + +
+ +### Automated deployment service + +> ./deploy.sh diff --git a/6_micro-cluster/example-2-mono-repo/comment/deployments/binary/deploy.sh b/6_micro-cluster/example-2-mono-repo/comment/deployments/binary/deploy.sh new file mode 100644 index 0000000..9fe9518 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/deployments/binary/deploy.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +serviceName="comment" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# determine if the startup service script run.sh exists +runFile="~/app/${serviceName}/run.sh" +if [ ! -f "$runFile" ]; then + # if it does not exist, copy the entire directory + mkdir -p ~/app + cp -rf /tmp/${serviceName}-binary ~/app/ + checkResult $? + rm -rf /tmp/${serviceName}-binary* +else + # replace only the binary file if it exists + cp -f ${serviceName}-binary/${serviceName} ~/app/${serviceName}-binary/${serviceName} + checkResult $? + rm -rf /tmp/${serviceName}-binary* +fi + +# running service +cd ~/app/${serviceName}-binary +chmod +x run.sh +./run.sh +checkResult $? + +echo "server directory is ~/app/${serviceName}-binary" diff --git a/6_micro-cluster/example-2-mono-repo/comment/deployments/binary/run.sh b/6_micro-cluster/example-2-mono-repo/comment/deployments/binary/run.sh new file mode 100644 index 0000000..d03ecb4 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/deployments/binary/run.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +serviceName="comment" +cmdStr="./${serviceName} -c configs/${serviceName}.yml" + +chmod +x ./${serviceName} + +stopService(){ + NAME=$1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + for id in $ID + do + kill -9 $id + echo "Stopped ${NAME} service successfully, process ID=${ID}" + done + fi +} + +startService() { + NAME=$1 + + nohup ${cmdStr} > ${serviceName}.log 2>&1 & + sleep 1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + echo "Start the ${NAME} service ...... process ID=${ID}" + else + echo "Failed to start ${NAME} service" + return 1 + fi + return 0 +} + + +stopService ${serviceName} +if [ "$1"x != "stop"x ] ;then + sleep 1 + startService ${serviceName} + exit $? + echo "" +else + echo "Service ${serviceName} has stopped" +fi diff --git a/6_micro-cluster/example-2-mono-repo/comment/deployments/docker-compose/README.md b/6_micro-cluster/example-2-mono-repo/comment/deployments/docker-compose/README.md new file mode 100644 index 0000000..3ea34c0 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/deployments/docker-compose/README.md @@ -0,0 +1,12 @@ + +copy the configuration file to the configs directory before starting the service. + +``` +├── configs +│ └── comment.yml +└── docker-compose.yml +``` + +running service: + +> docker-compose up -d diff --git a/6_micro-cluster/example-2-mono-repo/comment/deployments/docker-compose/docker-compose.yml b/6_micro-cluster/example-2-mono-repo/comment/deployments/docker-compose/docker-compose.yml new file mode 100644 index 0000000..d404209 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/deployments/docker-compose/docker-compose.yml @@ -0,0 +1,21 @@ +version: "3.7" + +services: + comment: + image: eshop/comment:latest + container_name: comment + restart: always + command: ["./comment", "-c", "/app/configs/comment.yml"] + volumes: + - $PWD/configs:/app/configs + + ports: + - "8282:8282" # grpc port + - "8283:8283" # grpc metrics or pprof port + healthcheck: + test: ["CMD", "grpc_health_probe", "-addr=localhost:8282"] # grpc health check, note: the image must contain the grpc_health_probe command + + interval: 10s # interval time + timeout: 5s # timeout time + retries: 3 # number of retries + start_period: 10s # how long after start-up does the check begin diff --git a/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/README.md b/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/README.md new file mode 100644 index 0000000..94cc9e6 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/README.md @@ -0,0 +1,32 @@ +Before deploying the service to k8s, create a Secret that pulls image permissions for k8s in a docker host that is already logged into the image repository, with the following command. + +```bash +kubectl create secret generic docker-auth-secret \ + --from-file=.dockerconfigjson=/root/.docker/config.json \ + --type=kubernetes.io/dockerconfigjson +``` + +
+ +run server: + +```bash +cd deployments + +kubectl apply -f ./*namespace.yml + +kubectl apply -f ./ +``` + +view the start-up status. + +> kubectl get all -n eshop + +
+ +simple test of http port + +```bash +# mapping to the http port of the service on the local port +kubectl port-forward --address=0.0.0.0 service/ 8080:8080 -n +``` diff --git a/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-configmap.yml b/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-configmap.yml new file mode 100644 index 0000000..07ca740 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-configmap.yml @@ -0,0 +1,124 @@ +kind: ConfigMap +apiVersion: v1 +metadata: + name: comment-config + namespace: eshop +data: + comment.yml: |- + # Generate the go struct command: sponge config --server-dir=./serverDir + + # app settings + app: + name: "comment" # server name + env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment + version: "v0.0.0" + host: "127.0.0.1" # domain or ip, for service registration + enableStat: true # whether to turn on printing statistics, true:enable, false:disable + enableMetrics: true # whether to turn on indicator collection, true:enable, false:disable + enableHTTPProfile: false # whether to turn on performance analysis, true:enable, false:disable + enableLimit: false # whether to turn on rate limiting (adaptive), true:on, false:off + enableCircuitBreaker: false # whether to turn on circuit breaker(adaptive), true:on, false:off + enableTrace: false # whether to turn on trace, true:enable, false:disable, if true jaeger configuration must be set + tracingSamplingRate: 1.0 # tracing sampling rate, between 0 and 1, 0 means no sampling, 1 means sampling all links + registryDiscoveryType: "" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used + cacheType: "" # cache type, if empty, the cache is not used, support for "memory" and "redis", if set to redis, must set redis configuration + + + # grpc server settings + grpc: + port: 8282 # listen port + httpPort: 8283 # profile and metrics ports + enableToken: false # whether to enable server-side token authentication, default appID=grpc, appKey=123456 + # serverSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'certFile' and 'keyFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + serverSecure: + type: "" # secures type, "", "one-way", "two-way" + caFile: "" # ca certificate file, valid only in "two-way", absolute path + certFile: "" # server side cert file, absolute path + keyFile: "" # server side key file, absolute path + + + # grpc client-side settings, support for setting up multiple grpc clients. + grpcClient: + - name: "your_grpc_service_name" # grpc service name, used for service discovery + host: "127.0.0.1" # grpc service address, used for direct connection + port: 8282 # grpc service port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, valid only for unary grpc type + registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port + enableLoadBalance: true # whether to turn on the load balancer + # clientSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'serverName' and 'certFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + clientSecure: + type: "" # secures type, "", "one-way", "two-way" + serverName: "" # server name, e.g. *.foo.com + caFile: "" # client side ca file, valid only in "two-way", absolute path + certFile: "" # client side cert file, absolute path, if secureType="one-way", fill in server side cert file here + keyFile: "" # client side key file, valid only in "two-way", absolute path + clientToken: + enable: false # whether to enable token authentication + appID: "" # app id + appKey: "" # app key + + + + # logger settings + logger: + level: "info" # output log levels debug, info, warn, error, default is debug + format: "console" # output format, console or json, default is console + isSave: false # false:output to terminal, true:output to file, default is false + #logFileConfig: # Effective when isSave=true + #filename: "out.log" # File name (default is out.log) + #maxSize: 20 # Maximum file size (MB, default is 10MB) + #maxBackups: 50 # Maximum number of old files to retain (default is 100) + #maxAge: 15 # Maximum number of days to retain old files (default is 30 days) + #isCompression: true # Whether to compress/archive old files (default is false) + + + # set database configuration. reference-db-config-url + database: + driver: "mysql" # database driver + # mysql settings + mysql: + # dsn format, :@(:)/?[k=v& ......] + dsn: "root:123456@(192.168.3.37:3306)/account?parseTime=true&loc=Local&charset=utf8,utf8mb4" + enableLog: true # whether to turn on printing of all logs + maxIdleConns: 10 # set the maximum number of connections in the idle connection pool + maxOpenConns: 100 # set the maximum number of open database connections + connMaxLifetime: 30 # sets the maximum time for which the connection can be reused, in minutes + + + + # redis settings + redis: + # dsn format, [user]:@127.0.0.1:6379/[db], the default user is default, redis version 6.0 and above only supports user. + dsn: "default:123456@192.168.3.37:6379/0" + dialTimeout: 10 # connection timeout, unit(second) + readTimeout: 2 # read timeout, unit(second) + writeTimeout: 2 # write timeout, unit(second) + + + # jaeger settings + jaeger: + agentHost: "192.168.3.37" + agentPort: 6831 + + + # consul settings + consul: + addr: "192.168.3.37:8500" + + + # etcd settings + etcd: + addrs: ["192.168.3.37:2379"] + + + # nacos settings, used in service registration discovery + nacosRd: + ipAddr: "192.168.3.37" + port: 8848 + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id diff --git a/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-deployment.yml b/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-deployment.yml new file mode 100644 index 0000000..44d3b8f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-deployment.yml @@ -0,0 +1,63 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: comment-dm + namespace: eshop +spec: + replicas: 1 + selector: + matchLabels: + app: comment + template: + metadata: + name: comment-pod + labels: + app: comment + spec: + containers: + - name: comment + image: /eshop/comment:latest + # If using a local image, use Never, default is Always + #imagePullPolicy: Never + command: ["./comment", "-c", "/app/configs/comment.yml"] + resources: + requests: + cpu: 10m + memory: 10Mi + limits: + cpu: 1000m + memory: 1000Mi + volumeMounts: + - name: comment-vl + mountPath: /app/configs/ + readOnly: true + + ports: + - name: grpc-port + containerPort: 8282 + - name: metrics-port + containerPort: 8283 + readinessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:8282"] + initialDelaySeconds: 10 + timeoutSeconds: 2 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + livenessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:8282"] + + initialDelaySeconds: 10 + timeoutSeconds: 2 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + # todo for private repositories, you need to create a secret (here docker-auth-secret) to store the account and password to log into docker + imagePullSecrets: + - name: docker-auth-secret + volumes: + - name: comment-vl + configMap: + name: comment-config diff --git a/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-svc.yml b/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-svc.yml new file mode 100644 index 0000000..4158615 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/comment-svc.yml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: Service +metadata: + name: comment-svc + namespace: eshop +spec: + selector: + app: comment + type: ClusterIP + ports: + - name: comment-svc-grpc-port + port: 8282 + targetPort: 8282 + - name: comment-svc-grpc-metrics-port + port: 8283 + targetPort: 8283 + diff --git a/a_micro-grpc-http-protobuf/deployments/kubernetes/edusys-namespace.yml b/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/eshop-namespace.yml similarity index 73% rename from a_micro-grpc-http-protobuf/deployments/kubernetes/edusys-namespace.yml rename to 6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/eshop-namespace.yml index 2b9bbc8..eba474f 100644 --- a/a_micro-grpc-http-protobuf/deployments/kubernetes/edusys-namespace.yml +++ b/6_micro-cluster/example-2-mono-repo/comment/deployments/kubernetes/eshop-namespace.yml @@ -1,4 +1,4 @@ apiVersion: v1 kind: Namespace metadata: - name: edusys + name: eshop diff --git a/6_micro-cluster/example-2-mono-repo/comment/docs/gen.info b/6_micro-cluster/example-2-mono-repo/comment/docs/gen.info new file mode 100644 index 0000000..c8e7c7a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/docs/gen.info @@ -0,0 +1 @@ +eshop,comment,true \ No newline at end of file diff --git a/a_micro-grpc-http-protobuf/internal/config/user.go b/6_micro-cluster/example-2-mono-repo/comment/internal/config/comment.go similarity index 99% rename from a_micro-grpc-http-protobuf/internal/config/user.go rename to 6_micro-cluster/example-2-mono-repo/comment/internal/config/comment.go index b4090b5..2991648 100644 --- a/a_micro-grpc-http-protobuf/internal/config/user.go +++ b/6_micro-cluster/example-2-mono-repo/comment/internal/config/comment.go @@ -1,4 +1,4 @@ -// code generated by https://user +// code generated by https://eshop package config diff --git a/a_micro-grpc-http-protobuf/internal/config/user_cc.go b/6_micro-cluster/example-2-mono-repo/comment/internal/config/comment_cc.go similarity index 95% rename from a_micro-grpc-http-protobuf/internal/config/user_cc.go rename to 6_micro-cluster/example-2-mono-repo/comment/internal/config/comment_cc.go index c01b1b0..326e1bc 100644 --- a/a_micro-grpc-http-protobuf/internal/config/user_cc.go +++ b/6_micro-cluster/example-2-mono-repo/comment/internal/config/comment_cc.go @@ -1,4 +1,4 @@ -// code generated by https://user +// code generated by https://eshop package config diff --git a/6_micro-cluster/example-2-mono-repo/comment/internal/config/comment_test.go b/6_micro-cluster/example-2-mono-repo/comment/internal/config/comment_test.go new file mode 100644 index 0000000..d4340e5 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/internal/config/comment_test.go @@ -0,0 +1,45 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/zhufuyi/sponge/pkg/gofile" + + "eshop/comment/configs" +) + +func TestInit(t *testing.T) { + configFile := configs.Path("comment.yml") + err := Init(configFile) + if gofile.IsExists(configFile) { + assert.NoError(t, err) + } else { + assert.Error(t, err) + } + + c := Get() + assert.NotNil(t, c) + + str := Show() + assert.NotEmpty(t, str) + t.Log(str) + + // set nil + Set(nil) + defer func() { + recover() + }() + Get() +} + +func TestInitNacos(t *testing.T) { + configFile := configs.Path("comment_cc.yml") + _, err := NewCenter(configFile) + if gofile.IsExists(configFile) { + assert.NoError(t, err) + } else { + assert.Error(t, err) + } +} diff --git a/6_micro-cluster/example-2-mono-repo/comment/internal/ecode/comment_rpc.go b/6_micro-cluster/example-2-mono-repo/comment/internal/ecode/comment_rpc.go new file mode 100644 index 0000000..1d9cb4a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/internal/ecode/comment_rpc.go @@ -0,0 +1,19 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// comment business-level rpc error codes. +// the _commentNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + _commentNO = 24 + _commentName = "comment" + _commentBaseCode = errcode.RCode(_commentNO) + + StatusListByProductIDComment = errcode.NewRPCStatus(_commentBaseCode+1, "failed to ListByProductID "+_commentName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/a_micro-grpc-http-protobuf/internal/ecode/systemCode_rpc.go b/6_micro-cluster/example-2-mono-repo/comment/internal/ecode/systemCode_rpc.go similarity index 85% rename from a_micro-grpc-http-protobuf/internal/ecode/systemCode_rpc.go rename to 6_micro-cluster/example-2-mono-repo/comment/internal/ecode/systemCode_rpc.go index e8d872b..8a88afd 100644 --- a/a_micro-grpc-http-protobuf/internal/ecode/systemCode_rpc.go +++ b/6_micro-cluster/example-2-mono-repo/comment/internal/ecode/systemCode_rpc.go @@ -31,9 +31,16 @@ var ( StatusLimitExceed = errcode.StatusLimitExceed StatusMethodNotAllowed = errcode.StatusMethodNotAllowed StatusAccessDenied = errcode.StatusAccessDenied + StatusConflict = errcode.StatusConflict ) // Any kev-value func Any(key string, val interface{}) errcode.Detail { return errcode.Any(key, val) } + +// StatusSkipResponse is only use for grpc-gateway +var StatusSkipResponse = errcode.SkipResponse + +// GetStatusCode get status code from error returned by RPC invoke +var GetStatusCode = errcode.GetStatusCode diff --git a/b_sponge-dtm-msg/internal/server/grpc.go b/6_micro-cluster/example-2-mono-repo/comment/internal/server/grpc.go similarity index 87% rename from b_sponge-dtm-msg/internal/server/grpc.go rename to 6_micro-cluster/example-2-mono-repo/comment/internal/server/grpc.go index 85ad78e..2599ab6 100644 --- a/b_sponge-dtm-msg/internal/server/grpc.go +++ b/6_micro-cluster/example-2-mono-repo/comment/internal/server/grpc.go @@ -22,16 +22,16 @@ import ( "github.com/zhufuyi/sponge/pkg/prof" "github.com/zhufuyi/sponge/pkg/servicerd/registry" - "transfer/internal/config" - "transfer/internal/ecode" - "transfer/internal/service" + "eshop/comment/internal/config" + "eshop/comment/internal/ecode" + "eshop/comment/internal/service" ) var _ app.IServer = (*grpcServer)(nil) var ( defaultTokenAppID = "grpc" - defaultTokenAppKey = "123456" + defaultTokenAppKey = "mko09ijn" ) type grpcServer struct { @@ -124,7 +124,7 @@ func (s *grpcServer) secureServerOption() grpc.ServerOption { if err != nil { panic(err) } - logger.Info("rpc security type: sever-side certification") + logger.Info("grpc security type: sever-side certification") return grpc.Creds(credentials) case "two-way": // both client and server side certification @@ -136,11 +136,11 @@ func (s *grpcServer) secureServerOption() grpc.ServerOption { if err != nil { panic(err) } - logger.Info("rpc security type: both client-side and server-side certification") + logger.Info("grpc security type: both client-side and server-side certification") return grpc.Creds(credentials) } - logger.Info("rpc security type: insecure") + logger.Info("grpc security type: insecure") return nil } @@ -151,7 +151,7 @@ func (s *grpcServer) unaryServerOptions() grpc.ServerOption { interceptor.UnaryServerRequestID(), } - // logger interceptor + // logger interceptor, to print simple messages, replace interceptor.UnaryServerLog with interceptor.UnaryServerSimpleLog unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerLog( logger.Get(), interceptor.WithReplaceGRPCLogger(), @@ -171,8 +171,11 @@ func (s *grpcServer) unaryServerOptions() grpc.ServerOption { // jwt token interceptor //unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerJwtAuth( - // // set ignore rpc methods(full path) for jwt token - // interceptor.WithAuthIgnoreMethods("/api.user.v1.User/Register", "/api.user.v1.User/Login"), + // // choose a verification method as needed + //interceptor.WithStandardVerify(standardVerifyFn), // standard verify (default), you can set standardVerifyFn to nil if you don't need it + //interceptor.WithCustomVerify(customVerifyFn), // custom verify + // // specify the grpc API to ignore token verification(full path) + //interceptor.WithAuthIgnoreMethods("/api.user.v1.User/Register", "/api.user.v1.User/Login"), //)) // metrics interceptor @@ -210,7 +213,7 @@ func (s *grpcServer) streamServerOptions() grpc.ServerOption { //interceptor.StreamServerRequestID(), } - // logger interceptor + // logger interceptor, to print simple messages, replace interceptor.StreamServerLog with interceptor.StreamServerSimpleLog streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerLog( logger.Get(), interceptor.WithReplaceGRPCLogger(), @@ -230,14 +233,16 @@ func (s *grpcServer) streamServerOptions() grpc.ServerOption { // jwt token interceptor //streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerJwtAuth( - // // set ignore rpc methods(full path) for jwt token + // // choose a verification method as needed + //interceptor.WithStandardVerify(standardVerifyFn), // standard verify (default), you can set standardVerifyFn to nil if you don't need it + //interceptor.WithCustomVerify(customVerifyFn), // custom verify + // // specify the grpc API to ignore token verification(full path) // interceptor.WithAuthIgnoreMethods("/api.user.v1.User/Register", "/api.user.v1.User/Login"), //)) // metrics interceptor if config.Get().App.EnableMetrics { streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerMetrics()) - s.registerMetricsMuxAndMethodFunc = s.registerMetricsMuxAndMethod() } // limit interceptor diff --git a/a_micro-grpc-http-protobuf/internal/server/grpc_option.go b/6_micro-cluster/example-2-mono-repo/comment/internal/server/grpc_option.go similarity index 100% rename from a_micro-grpc-http-protobuf/internal/server/grpc_option.go rename to 6_micro-cluster/example-2-mono-repo/comment/internal/server/grpc_option.go diff --git a/b_sponge-dtm-msg/internal/server/grpc_test.go b/6_micro-cluster/example-2-mono-repo/comment/internal/server/grpc_test.go similarity index 91% rename from b_sponge-dtm-msg/internal/server/grpc_test.go rename to 6_micro-cluster/example-2-mono-repo/comment/internal/server/grpc_test.go index 050b858..186fbf7 100644 --- a/b_sponge-dtm-msg/internal/server/grpc_test.go +++ b/6_micro-cluster/example-2-mono-repo/comment/internal/server/grpc_test.go @@ -14,12 +14,12 @@ import ( "github.com/zhufuyi/sponge/pkg/servicerd/registry" "github.com/zhufuyi/sponge/pkg/utils" - "transfer/configs" - "transfer/internal/config" + "eshop/comment/configs" + "eshop/comment/internal/config" ) func TestGRPCServer(t *testing.T) { - err := config.Init(configs.Path("transfer.yml")) + err := config.Init(configs.Path("comment.yml")) if err != nil { t.Fatal(err) } @@ -37,8 +37,6 @@ func TestGRPCServer(t *testing.T) { utils.SafeRunWithTimeout(time.Second*2, func(cancel context.CancelFunc) { server := NewGRPCServer(addr, - WithGrpcReadTimeout(time.Second), - WithGrpcWriteTimeout(time.Second), WithGrpcRegistry(nil, instance), ) assert.NotNil(t, server) @@ -47,7 +45,7 @@ func TestGRPCServer(t *testing.T) { } func TestGRPCServerMock(t *testing.T) { - err := config.Init(configs.Path("transfer.yml")) + err := config.Init(configs.Path("comment.yml")) if err != nil { t.Fatal(err) } @@ -101,7 +99,7 @@ func (g gRegistry) Deregister(ctx context.Context, service *registry.ServiceInst } func Test_grpcServer_getOptions(t *testing.T) { - err := config.Init(configs.Path("transfer.yml")) + err := config.Init(configs.Path("comment.yml")) if err != nil { t.Fatal(err) } diff --git a/6_micro-cluster/example-2-mono-repo/comment/internal/service/comment.go b/6_micro-cluster/example-2-mono-repo/comment/internal/service/comment.go new file mode 100644 index 0000000..924469d --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/internal/service/comment.go @@ -0,0 +1,74 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package service + +import ( + "context" + + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/logger" + + commentV1 "eshop/api/comment/v1" + "eshop/comment/internal/ecode" +) + +func init() { + registerFns = append(registerFns, func(server *grpc.Server) { + commentV1.RegisterCommentServer(server, NewCommentServer()) + }) +} + +var _ commentV1.CommentServer = (*comment)(nil) + +type comment struct { + commentV1.UnimplementedCommentServer + + // example: + // iDao dao.CommentDao +} + +// NewCommentServer create a server +func NewCommentServer() commentV1.CommentServer { + return &comment{ + // example: + // iDao: dao.NewCommentDao( + // model.GetDB(), + // cache.NewCommentCache(model.GetCacheType()), + // ), + } +} + +// ListByProductID list of comments by product id +func (s *comment) ListByProductID(ctx context.Context, req *commentV1.ListByProductIDRequest) (*commentV1.ListByProductIDReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + + // fill in the business logic code here + + return &commentV1.ListByProductIDReply{ + Total: 3, + ProductID: 1, + CommentDetails: []*commentV1.CommentDetail{ + { + Id: 1, + Username: "Mr Zhang", + Content: "good", + }, + { + Id: 2, + Username: "Mr Li", + Content: "good", + }, + { + Id: 3, + Username: "Mr Wang", + Content: "not good", + }, + }, + }, nil +} diff --git a/6_micro-cluster/example-2-mono-repo/comment/internal/service/comment_client_test.go b/6_micro-cluster/example-2-mono-repo/comment/internal/service/comment_client_test.go new file mode 100644 index 0000000..d56c45b --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/internal/service/comment_client_test.go @@ -0,0 +1,112 @@ +// Code generated by https://github.com/zhufuyi/sponge +// Test_service_comment_methods is used to test the comment api +// Test_service_comment_benchmark is used to performance test the comment api + +package service + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/zhufuyi/sponge/pkg/grpc/benchmark" + + commentV1 "eshop/api/comment/v1" + "eshop/comment/configs" + "eshop/comment/internal/config" +) + +// Test service comment api via grpc client +func Test_service_comment_methods(t *testing.T) { + conn := getRPCClientConnForTest() + cli := commentV1.NewCommentClient(conn) + ctx, _ := context.WithTimeout(context.Background(), time.Second*30) + + tests := []struct { + name string + fn func() (interface{}, error) + wantErr bool + }{ + + { + name: "ListByProductID", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &commentV1.ListByProductIDRequest{ + ProductID: 0, + } + + return cli.ListByProductID(ctx, req) + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + data, _ := json.MarshalIndent(got, "", " ") + fmt.Println(string(data)) + }) + } +} + +// performance test service comment api, copy the report to +// the browser to view when the pressure test is finished. +func Test_service_comment_benchmark(t *testing.T) { + err := config.Init(configs.Path("comment.yml")) + if err != nil { + panic(err) + } + + grpcClientCfg := getGRPCClientCfg() + host := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + protoFile := configs.Path("../api/comment/v1/comment.proto") + // If third-party dependencies are missing during the press test, + // copy them to the project's third_party directory. + dependentProtoFilePath := []string{ + configs.Path("../third_party"), // third_party directory + configs.Path(".."), // Previous level of third_party + } + + tests := []struct { + name string + fn func() error + wantErr bool + }{ + + { + name: "ListByProductID", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &commentV1.ListByProductIDRequest{ + ProductID: 0, + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "ListByProductID", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + }) + } +} diff --git a/a_micro-grpc-http-protobuf/internal/service/service.go b/6_micro-cluster/example-2-mono-repo/comment/internal/service/service.go similarity index 100% rename from a_micro-grpc-http-protobuf/internal/service/service.go rename to 6_micro-cluster/example-2-mono-repo/comment/internal/service/service.go diff --git a/b_sponge-dtm-msg/internal/service/service_test.go b/6_micro-cluster/example-2-mono-repo/comment/internal/service/service_test.go similarity index 69% rename from b_sponge-dtm-msg/internal/service/service_test.go rename to 6_micro-cluster/example-2-mono-repo/comment/internal/service/service_test.go index de50dc7..738aef4 100644 --- a/b_sponge-dtm-msg/internal/service/service_test.go +++ b/6_micro-cluster/example-2-mono-repo/comment/internal/service/service_test.go @@ -2,6 +2,7 @@ package service import ( "context" + "io" "strconv" "testing" "time" @@ -19,10 +20,12 @@ import ( "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" "github.com/zhufuyi/sponge/pkg/utils" - "transfer/configs" - "transfer/internal/config" + "eshop/comment/configs" + "eshop/comment/internal/config" ) +var ioEOF = io.EOF + func TestRegisterAllService(t *testing.T) { utils.SafeRunWithTimeout(time.Second*2, func(cancel context.CancelFunc) { server := grpc.NewServer() @@ -31,38 +34,42 @@ func TestRegisterAllService(t *testing.T) { }) } +// The default is to connect to the local grpc server, if you want to connect to a remote grpc server, +// pass in the parameter grpcClient. func getRPCClientConnForTest(grpcClient ...config.GrpcClient) *grpc.ClientConn { - err := config.Init(configs.Path("transfer.yml")) + err := config.Init(configs.Path("comment.yml")) if err != nil { panic(err) } + grpcClientCfg := getGRPCClientCfg(grpcClient...) - var grpcClientCfg config.GrpcClient + var cliOptions []grpccli.Option - if len(grpcClient) == 0 { - // default config from configuration file serverNameExample.yml - grpcClientCfg = config.GrpcClient{ - Host: config.Get().App.Host, - Port: config.Get().Grpc.Port, - // If RegistryDiscoveryType is not empty, service discovery is used, and Host and Port values are invalid - RegistryDiscoveryType: config.Get().App.RegistryDiscoveryType, // supports consul, etcd and nacos - Name: config.Get().App.Name, - } - if grpcClientCfg.RegistryDiscoveryType != "" { - grpcClientCfg.EnableLoadBalance = true - } - } else { - // custom config - grpcClientCfg = grpcClient[0] + if grpcClientCfg.Timeout > 0 { + cliOptions = append(cliOptions, grpccli.WithTimeout(time.Second*time.Duration(grpcClientCfg.Timeout))) } - var cliOptions []grpccli.Option - // load balance if grpcClientCfg.EnableLoadBalance { cliOptions = append(cliOptions, grpccli.WithEnableLoadBalance()) } + // secure + cliOptions = append(cliOptions, grpccli.WithSecure( + grpcClientCfg.ClientSecure.Type, + grpcClientCfg.ClientSecure.ServerName, + grpcClientCfg.ClientSecure.CaFile, + grpcClientCfg.ClientSecure.CertFile, + grpcClientCfg.ClientSecure.KeyFile, + )) + + // token + cliOptions = append(cliOptions, grpccli.WithToken( + grpcClientCfg.ClientToken.Enable, + grpcClientCfg.ClientToken.AppID, + grpcClientCfg.ClientToken.AppKey, + )) + cliOptions = append(cliOptions, grpccli.WithEnableRequestID(), grpccli.WithEnableLog(logger.Get()), @@ -128,3 +135,39 @@ func getRPCClientConnForTest(grpcClient ...config.GrpcClient) *grpc.ClientConn { return conn } + +func getGRPCClientCfg(grpcClient ...config.GrpcClient) config.GrpcClient { + var grpcClientCfg config.GrpcClient + + // custom config + if len(grpcClient) > 0 { + // parameter config, highest priority + grpcClientCfg = grpcClient[0] + } else { + // grpcClient config in the yaml file, second priority + if len(config.Get().GrpcClient) > 0 { + for _, v := range config.Get().GrpcClient { + if v.Name == config.Get().App.Name { // match the current app name + grpcClientCfg = v + break + } + } + } + } + + // if there is no custom configuration, use the default configuration + if grpcClientCfg.Name == "" { + grpcClientCfg = config.GrpcClient{ + Host: config.Get().App.Host, + Port: config.Get().Grpc.Port, + // If RegistryDiscoveryType is not empty, service discovery is used, and Host and Port values are invalid + RegistryDiscoveryType: config.Get().App.RegistryDiscoveryType, // supports consul, etcd and nacos + Name: config.Get().App.Name, + } + if grpcClientCfg.RegistryDiscoveryType != "" { + grpcClientCfg.EnableLoadBalance = true + } + } + + return grpcClientCfg +} diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/binary-package.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/binary-package.sh new file mode 100644 index 0000000..ecc33f4 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/binary-package.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +serviceName="comment" + +mkdir -p ${serviceName}-binary/configs + +cp -f deployments/binary/run.sh ${serviceName}-binary +chmod +x ${serviceName}-binary/run.sh + +cp -f deployments/binary/deploy.sh ${serviceName}-binary +chmod +x ${serviceName}-binary/deploy.sh + +cp -f cmd/${serviceName}/${serviceName} ${serviceName}-binary +cp -f configs/${serviceName}.yml ${serviceName}-binary/configs +cp -f configs/${serviceName}_cc.yml ${serviceName}-binary/configs + +# compressing binary file +#upx -9 ${serviceName} + +tar zcvf ${serviceName}-binary.tar.gz ${serviceName}-binary +rm -rf ${serviceName}-binary + +echo "" +echo "package binary successfully, output file = ${serviceName}-binary.tar.gz" diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile b/6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile new file mode 100644 index 0000000..084820f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile @@ -0,0 +1,26 @@ +FROM alpine:latest +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# set the time zone to Shanghai +RUN apk add tzdata \ + && cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && echo "Asia/Shanghai" > /etc/timezone \ + && apk del tzdata + +# add grpc_health_probe for health check of grpc services +COPY grpc_health_probe /bin/grpc_health_probe +RUN chmod +x /bin/grpc_health_probe + +COPY configs/ /app/configs/ +COPY comment /app/comment +RUN chmod +x /app/comment + +# grpc and http port +EXPOSE 8282 8283 + + +WORKDIR /app + +CMD ["./comment", "-c", "configs/comment.yml"] +# if you use the Configuration Center, comment.yml is changed to the Configuration Center configuration. +#CMD ["./comment", "-c", "configs/comment.yml", "-enable-cc"] diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile_build b/6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile_build new file mode 100644 index 0000000..eebda67 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile_build @@ -0,0 +1,47 @@ +# Need to package the code first `tar zcf comment.tar.gz $(ls)` and move it to the same directory as Dokerfile + +# Compile the go code, you can specify the golang version +FROM golang:1.21-alpine as build +COPY . /go/src/comment +WORKDIR /go/src/comment +RUN tar zxf comment.tar.gz +RUN go env -w GOPROXY=https://goproxy.cn,direct +RUN go mod download +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o /comment cmd/comment/main.go + +# install grpc-health-probe, for health check of grpc service +RUN go install github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 +RUN cd $GOPATH/pkg/mod/github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 \ + && go mod download \ + && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "all=-s -w" -o /grpc_health_probe + +# compressing binary files +#cd / +#upx -9 comment +#upx -9 grpc_health_probe + + +# building images with binary +FROM alpine:latest +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# set the time zone to Shanghai +RUN apk add tzdata \ + && cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && echo "Asia/Shanghai" > /etc/timezone \ + && apk del tzdata + +# add grpc_health_probe for health check of grpc services +COPY --from=build /grpc_health_probe /bin/grpc_health_probe +COPY --from=build /comment /app/comment +COPY --from=build /go/src/comment/configs/comment.yml /app/configs/comment.yml + +# grpc and http port +EXPOSE 8282 8283 + + +WORKDIR /app + +CMD ["./comment", "-c", "configs/comment.yml"] +# if you use the Configuration Center, comment.yml is changed to the Configuration Center configuration. +#CMD ["./comment", "-c", "configs/comment.yml", "-enable-cc"] diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile_test b/6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile_test new file mode 100644 index 0000000..5491e60 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/build/Dockerfile_test @@ -0,0 +1,16 @@ +# Need to package the code first `tar zcf comment.tar.gz $(ls)` and move it to the same directory as Dokerfile +# rpc server source code, used to test rpc methods +FROM golang:1.21-alpine +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# go test dependency packages +RUN apk add bash alpine-sdk build-base gcc + +COPY . /go/src/comment +WORKDIR /go/src/comment +RUN tar zxf comment.tar.gz +RUN go env -w GOPROXY=https://goproxy.cn,direct +RUN go mod download +RUN rm -f comment.tar.gz + +CMD ["sleep","86400"] diff --git a/a_micro-grpc-http-protobuf/scripts/build/README.md b/6_micro-cluster/example-2-mono-repo/comment/scripts/build/README.md similarity index 100% rename from a_micro-grpc-http-protobuf/scripts/build/README.md rename to 6_micro-cluster/example-2-mono-repo/comment/scripts/build/README.md diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-binary.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-binary.sh new file mode 100644 index 0000000..a6b3d3f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-binary.sh @@ -0,0 +1,35 @@ +#!/usr/bin/expect + +set serviceName "comment" + +# parameters +set username [lindex $argv 0] +set password [lindex $argv 1] +set hostname [lindex $argv 2] + +set timeout 30 + +spawn scp -r ./${serviceName}-binary.tar.gz ${username}@${hostname}:/tmp/ +#expect "*yes/no*" +#send "yes\r" +expect "*password:*" +send "${password}\r" +expect eof + +spawn ssh ${username}@${hostname} +#expect "*yes/no*" +#send "yes\r" +expect "*password:*" +send "${password}\r" + +# execute a command or script +expect "*${username}@*" +send "cd /tmp && tar zxvf ${serviceName}-binary.tar.gz\r" +expect "*${username}@*" +send "bash /tmp/${serviceName}-binary/deploy.sh\r" + +# logging out of a session +expect "*${username}@*" +send "exit\r" + +expect eof diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-docker.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-docker.sh new file mode 100644 index 0000000..88d7d31 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-docker.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +dockerComposeFilePath="deployments/docker-compose" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +mkdir -p ${dockerComposeFilePath}/configs +if [ ! -f "${dockerComposeFilePath}/configs/comment.yml" ];then + cp configs/comment.yml ${dockerComposeFilePath}/configs +fi + +# shellcheck disable=SC2164 +cd ${dockerComposeFilePath} + +docker-compose down +checkResult $? + +docker-compose up -d +checkResult $? + +colorCyan='\033[1;36m' +highBright='\033[1m' +markEnd='\033[0m' + +echo "" +echo -e "run service successfully, if you want to stop the service, go into the ${highBright}${dockerComposeFilePath}${markEnd} directory and execute the command ${colorCyan}docker-compose down${markEnd}." +echo "" diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-k8s.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-k8s.sh new file mode 100644 index 0000000..48f8141 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/deploy-k8s.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +SERVER_NAME="comment" +DEPLOY_FILE="deployments/kubernetes/${SERVER_NAME}-deployment.yml" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# Determining whether a file exists +if [ ! -f "${DEPLOY_FILE}" ];then + echo "Deployment file file ${DEPLOY_FILE} does not exist" + checkResult 1 +fi + +# Check if you are authorised to operate k8s +echo "kubectl version" +kubectl version +checkResult $? + +echo "kubectl delete -f ${DEPLOY_FILE} --ignore-not-found" +kubectl delete -f ${DEPLOY_FILE} --ignore-not-found +checkResult $? + +sleep 1 + +echo "kubectl apply -f ${DEPLOY_FILE}" +kubectl apply -f ${DEPLOY_FILE} diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/image-build-local.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/image-build-local.sh new file mode 100644 index 0000000..3de9b9d --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/image-build-local.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +# build the image for local docker, using the binaries, if you want to reduce the size of the image, +# use upx to compress the binaries before building the image. + +serverName="comment" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/comment" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" + +mv -f cmd/${serverName}/${serverName} ${DOCKERFILE_PATH}/${serverName} + +# install grpc-health-probe, for health check of grpc service +rootDockerFilePath=$(pwd)/${DOCKERFILE_PATH} +go install github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 +cd $GOPATH/pkg/mod/github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 \ + && go mod download \ + && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "all=-s -w" -o "${rootDockerFilePath}/grpc_health_probe" +cd - + +# compressing binary file +#cd ${DOCKERFILE_PATH} +#upx -9 ${serverName} +#upx -9 grpc_health_probe +#cd - + +mkdir -p ${DOCKERFILE_PATH}/configs && cp -f configs/${serverName}.yml ${DOCKERFILE_PATH}/configs/ +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME}:latest ${DOCKERFILE_PATH}" +docker build -f ${DOCKERFILE} -t ${IMAGE_NAME}:latest ${DOCKERFILE_PATH} + +if [ -f "${DOCKERFILE_PATH}/grpc_health_probe" ]; then + rm -f ${DOCKERFILE_PATH}/grpc_health_probe +fi + + +if [ -f "${DOCKERFILE_PATH}/${serverName}" ]; then + rm -f ${DOCKERFILE_PATH}/${serverName} +fi + +if [ -d "${DOCKERFILE_PATH}/configs" ]; then + rm -rf ${DOCKERFILE_PATH}/configs +fi + +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/image-build.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/image-build.sh new file mode 100644 index 0000000..83bbec8 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/image-build.sh @@ -0,0 +1,71 @@ +#!/bin/bash + +# build the docker image using the binaries, if you want to reduce the size of the image, +# use upx to compress the binaries before building the image. + +serverName="comment" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/comment" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-build.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +# binary executable files +BIN_FILE="cmd/${serverName}/${serverName}" +# configuration file directory +CONFIG_PATH="configs" + +CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o ${BIN_FILE} cmd/${serverName}/*.go +mv -f ${BIN_FILE} ${DOCKERFILE_PATH} +mkdir -p ${DOCKERFILE_PATH}/${CONFIG_PATH} && cp -f ${CONFIG_PATH}/${serverName}.yml ${DOCKERFILE_PATH}/${CONFIG_PATH} + +# install grpc-health-probe, for health check of grpc service +rootDockerFilePath=$(pwd)/${DOCKERFILE_PATH} +go install github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 +cd $GOPATH/pkg/mod/github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 \ + && go mod download \ + && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "all=-s -w" -o "${rootDockerFilePath}/grpc_health_probe" +cd - + +# compressing binary file +#cd ${DOCKERFILE_PATH} +#upx -9 ${serverName} +#upx -9 grpc_health_probe +#cd - + +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} + +if [ -f "${DOCKERFILE_PATH}/grpc_health_probe" ]; then + rm -f ${DOCKERFILE_PATH}/grpc_health_probe +fi + + +if [ -f "${DOCKERFILE_PATH}/${serverName}" ]; then + rm -f ${DOCKERFILE_PATH}/${serverName} +fi + +if [ -d "${DOCKERFILE_PATH}/configs" ]; then + rm -rf ${DOCKERFILE_PATH}/configs +fi + +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/image-build2.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/image-build2.sh new file mode 100644 index 0000000..d9d6f9c --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/image-build2.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +# two-stage build docker image + +serverName="comment" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/comment" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_build" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-build.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +PROJECT_FILES=$(ls) +tar zcf ${serverName}.tar.gz ${PROJECT_FILES} +mv -f ${serverName}.tar.gz ${DOCKERFILE_PATH} +echo "docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} +rm -rf ${DOCKERFILE_PATH}/${serverName}.tar.gz +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 + diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/image-push.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/image-push.sh new file mode 100644 index 0000000..28a213b --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/image-push.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +# image name, prohibit uppercase letters in names. +IMAGE_NAME="eshop/comment" + +# image repo address, passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-push.sh hub.docker.com v1.0.0" + exit 1 +fi + +# version tag, passed in via the second parameter, if empty, defaults to latest +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# image repository host, https://index.docker.io/v1 is the official docker image repository +IMAGE_REPO_HOST="https://index.docker.io/v1" +# check if you are authorized to log into docker +function checkLogin() { + loginStatus=$(cat /root/.docker/config.json | grep "${IMAGE_REPO_HOST}") + if [ "X${loginStatus}" = "X" ];then + echo "docker is not logged into the image repository" + checkResult 1 + fi +} + +checkLogin + +# push image to image repository +echo "docker push ${IMAGE_NAME_TAG}" +docker push ${IMAGE_NAME_TAG} +checkResult $? +echo "docker push image success." + +sleep 1 + +# delete image +echo "docker rmi -f ${IMAGE_NAME_TAG}" +docker rmi -f ${IMAGE_NAME_TAG} +checkResult $? +echo "docker remove image success." diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/image-rpc-test.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/image-rpc-test.sh new file mode 100644 index 0000000..cf6b53c --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/image-rpc-test.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# build rpc service test image + +serverName="comment" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/comment.rpc-test" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_test" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-rpc-test.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +PROJECT_FILES=$(ls) +tar zcf ${serverName}.tar.gz ${PROJECT_FILES} +mv -f ${serverName}.tar.gz ${DOCKERFILE_PATH} + +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} + +rm -rf ${DOCKERFILE_PATH}/${serverName}.tar.gz diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/patch-mono.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/patch-mono.sh new file mode 100644 index 0000000..7d00974 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/patch-mono.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +goModFile="go.mod" +thirdPartyProtoDir="third_party" +genServerType="grpc-pb" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +if [ ! -f "../$goModFile" ]; then + sponge patch copy-go-mod -f + checkResult $? + mv -f go.mod .. + mv -f go.sum .. +fi + +if [ "$genServerType"x != "http"x ]; then + if [ ! -d "../$thirdPartyProtoDir" ]; then + sponge patch copy-third-party-proto + checkResult $? + mv -f $thirdPartyProtoDir .. + fi +fi + +if [ "$genServerType"x = "grpc"x ]; then + if [ ! -d "../api/types" ]; then + sponge patch gen-types-pb --out=. + checkResult $? + mv -f api/types ../api + rmdir api + fi +fi diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/patch.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/patch.sh new file mode 100644 index 0000000..f06f10a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/patch.sh @@ -0,0 +1,81 @@ +#!/bin/bash + +patchType=$1 +typesPb="types-pb" +initMysql="init-mysql" +initMongodb="init-mongodb" +initTidb="init-tidb" +initPostgresql="init-postgresql" +initSqlite="init-sqlite" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +function importPkg() { + go mod tidy +} + +function generateTypesPbCode() { + + if [ ! -d "../api/types" ]; then + sponge patch gen-types-pb --out=./ + checkResult $? + mv -f api/types ../api + rmdir api + fi + checkResult $? +} + +function generateInitMysqlCode() { + sponge patch gen-db-init --db-driver=mysql --out=./ + checkResult $? + importPkg +} + +function generateInitMongodbCode() { + sponge patch gen-db-init --db-driver=mongodb --out=./ + checkResult $? + importPkg +} + +function generateInitTidbCode() { + sponge patch gen-db-init --db-driver=tidb --out=./ + checkResult $? + importPkg +} + +function generateInitPostgresqlCode() { + sponge patch gen-db-init --db-driver=postgresql --out=./ + checkResult $? + importPkg +} + +function generateInitSqliteCode() { + sponge patch gen-db-init --db-driver=sqlite --out=./ + checkResult $? + importPkg +} + +if [ "$patchType" = "$typesPb" ]; then + generateTypesPbCode +elif [ "$patchType" = "$initMysql" ]; then + generateInitMysqlCode +elif [ "$patchType" = "$initMongodb" ]; then + generateInitMongodbCode +elif [ "$patchType" = "$initTidb" ]; then + generateInitTidbCode +elif [ "$patchType" = "$initPostgresql" ]; then + generateInitPostgresqlCode +elif [ "$patchType" = "$initSqlite" ]; then + generateInitSqliteCode +else + echo "invalid patch type: '$patchType'" + echo "supported types: $initMysql, $initMongodb, $initTidb, $initPostgresql, $initSqlite, $typesPb" + echo "e.g. make patch TYPE=init-mysql" + echo "" + exit 1 +fi diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/proto-doc.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/proto-doc.sh new file mode 100644 index 0000000..ad2b8d1 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/proto-doc.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# the directory where the proto files are located +bash scripts/patch-mono.sh +cd .. + +protoBasePath="api" +allProtoFiles="" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +function listFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + listFiles $item + else + if [ "${item#*.}"x = "proto"x ];then + file=$(pwd)/${item} + protoFile="${protoBasePath}${file#*${protoBasePath}}" + allProtoFiles="${allProtoFiles} ${protoFile}" + fi + fi + done + cd .. +} + +# get all proto file paths +listFiles $protoBasePath + +protoc --proto_path=. --proto_path=./third_party \ + --doc_out=. --doc_opt=html,apis.html \ + $allProtoFiles + +checkResult $? + +mv -f apis.html comment/docs/apis.html + +echo "generate proto doc file successfully, view in comment/docs/apis.html" diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/protoc.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/protoc.sh new file mode 100644 index 0000000..8b919a6 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/protoc.sh @@ -0,0 +1,211 @@ +#!/bin/bash + +bash scripts/patch-mono.sh +cd .. + +protoBasePath="api" +allProtoFiles="" + +specifiedProtoFilePath=$1 +specifiedProtoFilePaths="" + +colorGray='\033[1;30m' +colorGreen='\033[1;32m' +colorMagenta='\033[1;35m' +colorCyan='\033[1;36m' +highBright='\033[1m' +markEnd='\033[0m' + +tipMsg="" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# get specified proto files, if empty, return 0 else return 1 +function getSpecifiedProtoFiles() { + if [ "$specifiedProtoFilePath"x = x ];then + return 0 + fi + + specifiedProtoFilePaths=${specifiedProtoFilePath//,/ } + + for v in $specifiedProtoFilePaths; do + if [ ! -f "$v" ];then + echo "Error: not found specified proto file $v" + echo "example: make proto FILES=api/user/v1/user.proto,api/types/types.proto" + checkResult 1 + fi + done + + return 1 +} + +# add the import of useless packages from the generated *.pb.go code here +function deleteUnusedPkg() { + file=$1 + osType=$(uname -s) + if [ "${osType}"x = "Darwin"x ];then + sed -i '' 's#_ \"github.com/envoyproxy/protoc-gen-validate/validate\"##g' ${file} + sed -i '' 's#_ \"github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options\"##g' ${file} + sed -i '' 's#_ \"github.com/srikrsna/protoc-gen-gotag/tagger\"##g' ${file} + sed -i '' 's#_ \"google.golang.org/genproto/googleapis/api/annotations\"##g' ${file} + else + sed -i "s#_ \"github.com/envoyproxy/protoc-gen-validate/validate\"##g" ${file} + sed -i "s#_ \"github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options\"##g" ${file} + sed -i "s#_ \"github.com/srikrsna/protoc-gen-gotag/tagger\"##g" ${file} + sed -i "s#_ \"google.golang.org/genproto/googleapis/api/annotations\"##g" ${file} + fi + checkResult $? +} + +function listProtoFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + listProtoFiles $item + else + if [ "${item#*.}"x = "proto"x ];then + file=$(pwd)/${item} + protoFile="${protoBasePath}${file#*${protoBasePath}}" + allProtoFiles="${allProtoFiles} ${protoFile}" + fi + fi + done + cd .. +} + +function handlePbGoFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + handlePbGoFiles $item + else + if [ "${item#*.}"x = "pb.go"x ];then + deleteUnusedPkg $item + fi + fi + done + cd .. +} + +function generateByAllProto(){ + getSpecifiedProtoFiles + if [ $? -eq 0 ]; then + listProtoFiles $protoBasePath + else + allProtoFiles=$specifiedProtoFilePaths + fi + + if [ "$allProtoFiles"x = x ];then + echo "Error: not found proto file in path $protoBasePath" + exit 1 + fi + echo -e "generate *pb.go by proto files: ${colorGray}$allProtoFiles${markEnd}" + echo "" + + # generate files *_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --go_out=. --go_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + # generate files *_grpc_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --go-grpc_out=. --go-grpc_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + + # generate the file *_pb.validate.go + protoc --proto_path=. --proto_path=./third_party \ + --validate_out=lang=go:. --validate_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + # embed the tag field into *_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --gotag_out=:. --gotag_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? +} + +function generateBySpecifiedProto(){ + # get the proto file of the comment server + allProtoFiles="" + listProtoFiles ${protoBasePath}/comment + cd .. + specifiedProtoFiles="" + getSpecifiedProtoFiles + if [ $? -eq 0 ]; then + specifiedProtoFiles=$allProtoFiles + else + for v1 in $specifiedProtoFilePaths; do + for v2 in $allProtoFiles; do + if [ "$v1"x = "$v2"x ];then + specifiedProtoFiles="$specifiedProtoFiles $v1" + fi + done + done + fi + + if [ "$specifiedProtoFiles"x = x ];then + return + fi + echo -e "generate template code by proto files: ${colorMagenta}$specifiedProtoFiles${markEnd}" + echo "" + + moduleName=$(cat comment/docs/gen.info | head -1 | cut -d , -f 1) + serverName=$(cat comment/docs/gen.info | head -1 | cut -d , -f 2) + suitedMonoRepo=$(cat comment/docs/gen.info | head -1 | cut -d , -f 3) + + protoc --proto_path=. --proto_path=./third_party \ + --go-rpc-tmpl_out=. --go-rpc-tmpl_opt=paths=source_relative \ + --go-rpc-tmpl_opt=moduleName=${moduleName} --go-rpc-tmpl_opt=serverName=${serverName} --go-rpc-tmpl_opt=suitedMonoRepo=${suitedMonoRepo} \ + $specifiedProtoFiles + + checkResult $? + + sponge merge rpc-pb --dir=comment + checkResult $? + + tipMsg="${highBright}Tip:${markEnd} execute the command ${colorCyan}make run${markEnd} and then test grpc api in the file ${colorCyan}internal/service/xxx_client_test.go${markEnd}." + + + + if [ "$suitedMonoRepo" == "true" ]; then + sponge patch adapt-mono-repo --dir=comment + fi +} + +# generate pb.go by all proto files +generateByAllProto + +# generate pb.go by specified proto files +generateBySpecifiedProto + +# delete unused packages in pb.go +handlePbGoFiles $protoBasePath + +# delete json tag omitempty +sponge patch del-omitempty --dir=$protoBasePath --suffix-name=pb.go > /dev/null + +# modify duplicate numbers and error codes +sponge patch modify-dup-num --dir=comment/internal/ecode +sponge patch modify-dup-err-code --dir=comment/internal/ecode + +echo -e "${colorGreen}generated code done.${markEnd}" +echo "" +echo -e $tipMsg +echo "" diff --git a/b_sponge-dtm-msg/scripts/run-nohup.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/run-nohup.sh similarity index 96% rename from b_sponge-dtm-msg/scripts/run-nohup.sh rename to 6_micro-cluster/example-2-mono-repo/comment/scripts/run-nohup.sh index 408ed8f..de84526 100644 --- a/b_sponge-dtm-msg/scripts/run-nohup.sh +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/run-nohup.sh @@ -1,9 +1,9 @@ #!/bin/bash # chkconfig: - 85 15 -# description: transfer +# description: comment -serverName="transfer" +serverName="comment" cmdStr="cmd/${serverName}/${serverName}" diff --git a/6_micro-cluster/example-2-mono-repo/comment/scripts/run.sh b/6_micro-cluster/example-2-mono-repo/comment/scripts/run.sh new file mode 100644 index 0000000..563e868 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/comment/scripts/run.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +serverName="comment" + +binaryFile="cmd/${serverName}/${serverName}" + +osType=$(uname -s) +if [ "${osType%%_*}"x = "MINGW64"x ];then + binaryFile="${binaryFile}.exe" +fi + +if [ -f "${binaryFile}" ] ;then + rm "${binaryFile}" +fi + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +sleep 0.2 + +go build -o ${binaryFile} cmd/${serverName}/main.go +checkResult $? + +# running server +./${binaryFile} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/.gitignore b/6_micro-cluster/example-2-mono-repo/eshop_gw/.gitignore new file mode 100644 index 0000000..17fcf76 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/.gitignore @@ -0,0 +1,26 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib +*.log + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +vendor/ +dist/ + +# idea +.idea +*.iml +*.ipr +*.iws + +cmd/eshop_gw/eshop_gw + diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/.golangci.yml b/6_micro-cluster/example-2-mono-repo/eshop_gw/.golangci.yml new file mode 100644 index 0000000..d17ff22 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/.golangci.yml @@ -0,0 +1,342 @@ +# This file configures eshop. + +run: + # timeout for analysis, e.g. 30s, 5m, default is 1m + timeout: 10m + # default concurrency is available CPU number + concurrency: 4 + # include test files or not, default is true + tests: false + # which dirs to skip: issues from them won't be reported; + # can use regexp here: generated.*, regexp is applied on full path; + # default value is empty list, but default dirs are skipped independently + # from this option's value (see skip-dirs-use-default). + skip-dirs: + - docs + - api + # which files to skip: they will be analyzed, but issues from them + # won't be reported. Default value is empty list, but there is + # no need to include all autogenerated files, we confidently recognize + # autogenerated files. If it's not please let us know. + skip-files: + - _test.go + + # exit code when at least one issue was found, default is 1 + issues-exit-code: 1 + + # list of build tags, all linters use it. Default is empty list. + build-tags: + - mytag + + # default is true. Enables skipping of directories: + # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ + skip-dirs-use-default: true + + +linters: + # please, do not use `enable-all`: it's deprecated and will be removed soon. + # inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint + disable-all: true + enable: + - revive + - goimports + - gofmt + - unused + #- depguard + - dogsled + - errcheck + #- gochecknoinits + - goconst + - gocyclo + - gosimple + - govet + - lll + - misspell + - typecheck + - unconvert + - whitespace + - staticcheck + #- bodyclose + #- dupl + #- goprintffuncname + #- gosec + #- unparam + #- ineffassign + + +linters-settings: + revive: + rules: + - name: argument-limit + arguments: [ 8 ] + - name: atomic + - name: bare-return + - name: blank-imports + - name: bool-literal-in-expr + - name: call-to-gc + - name: confusing-naming + - name: confusing-results + - name: constant-logical-expr + - name: context-as-argument + - name: context-keys-type + - name: deep-exit + - name: defer + - name: dot-imports + - name: duplicated-imports + - name: early-return + - name: empty-block + #- name: empty-lines + - name: error-naming + - name: error-return + - name: error-strings + - name: errorf + - name: function-result-limit + arguments: [ 3 ] + - name: identical-branches + - name: if-return + - name: import-shadowing + - name: increment-decrement + - name: indent-error-flow + - name: modifies-parameter + - name: modifies-value-receiver + - name: package-comments + - name: range + - name: range-val-address + - name: range-val-in-closure + - name: receiver-naming + - name: redefines-builtin-id + - name: string-of-int + - name: struct-tag + - name: superfluous-else + - name: time-naming + - name: unconditional-recursion + - name: unexported-naming + - name: unnecessary-stmt + - name: unreachable-code + - name: unused-parameter + - name: var-declaration + - name: var-naming + - name: waitgroup-by-value + + dogsled: + # checks assignments with too many blank identifiers; default is 2 + max-blank-identifiers: 2 + + dupl: + # tokens count to trigger issue, 150 by default + threshold: 100 + + errcheck: + # report about not checking of errors in type assertions: `a := b.(MyStruct)`; + # default is false: such cases aren't reported by default. + check-type-assertions: false + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; + # default is false: such cases aren't reported by default. + check-blank: false + + # [deprecated] comma-separated list of pairs of the form pkg:regex + # the regex is used to ignore names within pkg. (default "fmt:.*"). + # see https://github.com/kisielk/errcheck#the-deprecated-method for details + ignore: fmt:.*,io/ioutil:^Read.* + + # path to a file containing a list of functions to exclude from checking + # see https://github.com/kisielk/errcheck#excluding-functions for details + # exclude: /path/to/file.txt + funlen: + lines: 60 + statements: 40 + + gocognit: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 10 + + goconst: + # minimal length of string constant, 3 by default + min-len: 4 + # minimal occurrences count to trigger, 3 by default + min-occurrences: 4 + + gocyclo: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 20 + + godox: + # report any comments starting with keywords, this is useful for TODO or FIXME comments that + # might be left in the code accidentally and should be resolved before merging + keywords: # default keywords are TODO, BUG, and FIXME, these can be overwritten by this setting + - NOTE + - OPTIMIZE # marks code that should be optimized before merging + - HACK # marks hack-arounds that should be removed before merging + + gofmt: + # simplify code: gofmt with `-s` option, true by default + simplify: true + + goimports: + # put imports beginning with prefix after 3rd-party packages; + # it's a comma-separated list of prefixes + local-prefixes: eshop + + gomnd: + settings: + mnd: + # the list of enabled checks, see https://github.com/tommy-muehle/go-mnd/#checks for description. + checks: argument,case,condition,operation,return,assign + + govet: + # report about shadowed variables + check-shadowing: true + + # settings per analyzer + settings: + printf: # analyzer name, run `go tool vet help` to see all analyzers + funcs: # run `go tool vet help printf` to see available settings for `printf` analyzer + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf + + # enable or disable analyzers by name + enable: + - atomicalign + enable-all: false + disable: + - shadow + disable-all: false + + depguard: + list-type: blacklist + include-go-root: false + #packages: + # - github.com/user/name + #packages-with-error-message: + # specify an error message to output when a blacklisted package is used + # - github.com/user/name: "logging is allowed only by logutils.Log" + + lll: + # max line length, lines longer will be reported. Default is 120. + # '\t' is counted as 1 character by default, and can be changed with the tab-width option + line-length: 200 + # tab width in spaces. Default to 1. + tab-width: 1 + + maligned: + # print struct with more effective memory layout or not, false by default + suggest-new: true + + misspell: + # Correct spellings using locale preferences for US or UK. + # Default is to use a neutral variety of English. + # Setting locale to US will correct the British spelling of 'colour' to 'color'. + locale: US + ignore-words: + - someword + + nakedret: + # make an issue if func has more lines of code than this setting and it has naked returns; default is 30 + max-func-lines: 30 + + prealloc: + # XXX: we don't recommend using this linter before doing performance profiling. + # For most programs usage of prealloc will be a premature optimization. + + # Report preallocation suggestions only on simple loops that have no returns/breaks/continues/gotos in them. + # True by default. + simple: true + range-loops: true # Report preallocation suggestions on range loops, true by default + for-loops: false # Report preallocation suggestions on for loops, false by default + + #rowserrcheck: + # packages: + # - github.com/user/name + + unparam: + # Inspect exported functions, default is false. Set to true if no external program/library imports your code. + # XXX: if you enable this setting, unparam will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find external interfaces. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + unused: + # treat code as a program (not a library) and report unused exported identifiers; default is false. + # XXX: if you enable this setting, unused will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find funcs usages. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + whitespace: + multi-if: false # Enforces newlines (or comments) after every multi-line if statement + multi-func: false # Enforces newlines (or comments) after every multi-line function signature + + wsl: + # If true append is only allowed to be cuddled if appending value is + # matching variables, fields or types on line above. Default is true. + strict-append: true + # Allow calls and assignments to be cuddled as long as the lines have any + # matching variables, fields or types. Default is true. + allow-assign-and-call: true + # Allow multiline assignments to be cuddled. Default is true. + allow-multiline-assign: true + # Allow declarations (var) to be cuddled. + allow-cuddle-declarations: false + # Allow trailing comments in ending of blocks + allow-trailing-comment: false + # Force newlines in end of case at this limit (0 = never). + force-case-trailing-whitespace: 0 + +issues: + # List of regexps of issue texts to exclude, empty list by default. + # But independently from this option we use default exclude patterns, + # it can be disabled by `exclude-use-default: false`. To list all + # excluded by default patterns execute `golangci-lint run --help` + exclude: + - abcdef + + # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: + # Exclude some linters from running on tests files. + - path: _test\.go + linters: + - gocyclo + - errcheck + - dupl + - gosec + + # Exclude known linters from partially hard-vendored code, + # which is impossible to exclude via "nolint" comments. + - path: internal/hmac/ + text: "weak cryptographic primitive" + linters: + - gosec + + # Exclude lll issues for long lines with go:generate + - linters: + - lll + source: "^//go:generate " + + # Independently from option `exclude` we use default exclude patterns, + # it can be disabled by this option. To list all + # excluded by default patterns execute `golangci-lint run --help`. + # Default value for this option is true. + exclude-use-default: false + + # Maximum issues count per one linter. Set to 0 to disable. Default is 50. + max-issues-per-linter: 0 + + # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. + max-same-issues: 0 + + # Show only new issues: if there are unstaged changes or untracked files, + # only those changes are analyzed, else only changes in HEAD~ are analyzed. + # It's a super-useful option for integration of golangci-lint into existing + # large codebase. It's not practical to fix all existing issues at the moment + # of integration: much better don't allow issues in new code. + # Default is false. + new: false + + # Show only new issues created after git revision `REV` + new-from-rev: "" + +service: + golangci-lint-version: 1.48.0 # use the fixed version to not introduce new linters unexpectedly diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/Jenkinsfile b/6_micro-cluster/example-2-mono-repo/eshop_gw/Jenkinsfile new file mode 100644 index 0000000..cc76915 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/Jenkinsfile @@ -0,0 +1,200 @@ +pipeline { + agent any + + stages { + stage("Check Build Branch") { + steps { + echo "Checking build branch in progress ......" + script { + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + echo "building production environment, tag=${env.GIT_BRANCH}" + } else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + echo "building test environment, tag=${env.GIT_BRANCH}" + } else if (env.GIT_BRANCH ==~ /(origin\/develop)/) { + echo "building development environment, /origin/develop" + } else { + echo "The build branch ${env.GIT_BRANCH} is not legal, allowing to build the development environment branch (/origin/develop), the test environment branch (e.g. test-1.0.0), and the production environment branch (e.g. v1.0.0)" + sh 'exit 1' + } + } + echo "Check build branch complete." + } + } + + stage("Check Code") { + steps { + echo "Checking code in progress ......" + sh 'make ci-lint' + echo "Check code complete." + } + } + + stage("Unit Testing") { + steps { + echo "Unit testing in progress ......" + sh 'make test' + echo "Unit testing complete." + } + } + + stage("Compile Code") { + steps { + echo "Compiling code in progress ......" + sh 'make build' + echo "compile code complete." + } + } + + stage("Build Image") { + steps { + echo "building image in progress ......" + script { + registryHost="" + tagName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.PROD_REPO_HOST == null) { + echo "The value of environment variable PROD_REPO_HOST is empty, please set the value of PROD_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the production environment image repository ${env.PROD_REPO_HOST}" + registryHost=env.PROD_REPO_HOST + tagName=env.GIT_BRANCH + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.TEST_REPO_HOST == null) { + echo "The value of environment variable TEST_REPO_HOST is empty, please set the value of TEST_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the test environment image repository ${env.TEST_REPO_HOST}" + registryHost=env.TEST_REPO_HOST + tagName=env.GIT_BRANCH + } + else { + if (env.DEV_REPO_HOST == null) { + echo "The value of environment variable DEV_REPO_HOST is empty, please set the value of DEV_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Using the development environment ${env.DEV_REPO_HOST}" + registryHost=env.DEV_REPO_HOST + } + sh "make image-build REPO_HOST=$registryHost TAG=$tagName" + } + echo "Build image complete" + } + } + + stage("Push Image") { + steps { + echo "pushing image in progress ......" + script { + registryHost="" + tagName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.PROD_REPO_HOST == null) { + echo "The value of environment variable PROD_REPO_HOST is empty, please set the value of PROD_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the production environment image repository ${env.PROD_REPO_HOST}" + registryHost=env.PROD_REPO_HOST + tagName=env.GIT_BRANCH + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.TEST_REPO_HOST == null) { + echo "The value of environment variable TEST_REPO_HOST is empty, please set the value of TEST_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the test environment image repository ${env.TEST_REPO_HOST}" + registryHost=env.TEST_REPO_HOST + tagName=env.GIT_BRANCH + } + else { + if (env.DEV_REPO_HOST == null) { + echo "The value of environment variable DEV_REPO_HOST is empty, please set the value of DEV_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Using the development environment ${env.DEV_REPO_HOST}" + registryHost=env.DEV_REPO_HOST + } + sh "make image-push REPO_HOST=$registryHost TAG=$tagName" + } + echo "push image complete, clear image complete." + } + } + + stage("Deploy to k8s") { + when { expression { return env.GIT_BRANCH ==~ /(origin\/staging|origin\/develop)/ } } + steps { + echo "Deploying to k8s in progress ......" + sh 'make deploy-k8s' + echo "Deploy to k8s complete." + } + } + } + + post { + always { + echo 'One way or another, I have finished' + echo sh(returnStdout: true, script: 'env') + deleteDir() /* clean up our workspace */ + } + success { + SendDingding("success") + //SendEmail("success") + echo 'structure success' + } + failure { + SendDingding("failure") + //SendEmail("failure") + echo 'structure failure' + } + } +} + +// Notifications using dingding +void SendDingding(res) +{ + // Fill in the corresponding cell phone number and specify a person to be notified in the pinned group + tel_num="xxxxxxxxxxx" + dingding_url="https://oapi.dingtalk.com/robot/send\\?access_token\\=your dingding robot token" + + branchName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + branchName="${env.SERVER_PLATFORM} production environment, tag=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/){ + branchName="${env.SERVER_PLATFORM} test environment, tag=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + else { + branchName="${env.SERVER_PLATFORM} develop environment, branch=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + + json_msg="" + if( res == "success" ) { + json_msg='{\\"msgtype\\":\\"text\\",\\"text\\":{\\"content\\":\\"@' + tel_num +' [OK] ' + "${branchName} ${env.BUILD_NUMBER}th " + 'build success. \\"},\\"at\\":{\\"atMobiles\\":[\\"' + tel_num + '\\"],\\"isAtAll\\":false}}' + } + else { + json_msg='{\\"msgtype\\":\\"text\\",\\"text\\":{\\"content\\":\\"@' + tel_num +' [cry] ' + "${branchName} ${env.BUILD_NUMBER}th " + 'build failed, please deal with it promptly! \\"},\\"at\\":{\\"atMobiles\\":[\\"' + tel_num + '\\"],\\"isAtAll\\":false}}' + } + + post_header="Content-Type:application/json;charset=utf-8" + sh_cmd="curl -X POST " + dingding_url + " -H " + "\'" + post_header + "\'" + " -d " + "\"" + json_msg + "\"" + sh sh_cmd +} + +// Notifications using email +void SendEmail(res) +{ + emailAddr="xxx@xxx.com" + if( res == "success" ) + { + mail to: emailAddr, + subject: "Build Success: ${currentBuild.fullDisplayName}", + body: "\nJob name: ${env.JOB_NAME} ${env.BUILD_NUMBER}th build. \n\n For more information, please see: ${env.BUILD_URL}" + } + else + { + mail to: emailAddr, + subject: "Build Failed: ${currentBuild.fullDisplayName}", + body: "\nJob name: ${env.JOB_NAME} ${env.BUILD_NUMBER}th build. \n\n For more information, please see: ${env.BUILD_URL}" + } +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/Makefile b/6_micro-cluster/example-2-mono-repo/eshop_gw/Makefile new file mode 100644 index 0000000..97fa14b --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/Makefile @@ -0,0 +1,183 @@ +SHELL := /bin/bash + +PROJECT_NAME := "eshop" +PKG := "$(PROJECT_NAME)" +PKG_LIST := $(shell go list ${PKG}/... | grep -v /vendor/ | grep -v /api/) + + + + +.PHONY: ci-lint +# Check code formatting, naming conventions, security, maintainability, etc. the rules in the .golangci.yml file +ci-lint: + @gofmt -s -w . + golangci-lint run ./... + + +.PHONY: test +# Test *_test.go files, the parameter -count=1 means that caching is disabled +test: + go test -count=1 -short ${PKG_LIST} + + +.PHONY: cover +# Generate test coverage +cover: + go test -short -coverprofile=cover.out -covermode=atomic ${PKG_LIST} + go tool cover -html=cover.out + + +.PHONY: graph +# Generate interactive visual function dependency graphs +graph: + @echo "generating graph ......" + @cp -f cmd/eshop_gw/main.go . + go-callvis -skipbrowser -format=svg -nostd -file=eshop_gw eshop + @rm -f main.go eshop_gw.gv + + + +.PHONY: proto +# Generate *.go and template code by proto files, the default is all the proto files in the api directory. you can specify the proto file, multiple files are separated by commas, e.g. make proto FILES=api/user/v1/user.proto +proto: + @bash scripts/protoc.sh $(FILES) + go mod tidy + @gofmt -s -w . + + +.PHONY: proto-doc +# Generate doc from *.proto files +proto-doc: + @bash scripts/proto-doc.sh + + +.PHONY: build +# Build eshop_gw for linux amd64 binary +build: + @echo "building 'eshop_gw', linux binary file will output to 'cmd/eshop_gw'" + @cd cmd/eshop_gw && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build + + + +.PHONY: run +# Build and run service +run: + @bash scripts/run.sh + + +.PHONY: run-nohup +# Run service with nohup in local, if you want to stop the server, pass the parameter stop, e.g. make run-nohup CMD=stop +run-nohup: + @bash scripts/run-nohup.sh $(CMD) + + +.PHONY: run-docker +# Run service in local docker, if you want to update the service, run the make run-docker command again +run-docker: image-build-local + @bash scripts/deploy-docker.sh + + +.PHONY: binary-package +# Packaged binary files +binary-package: build + @bash scripts/binary-package.sh + + +.PHONY: deploy-binary +# Deploy binary to remote linux server, e.g. make deploy-binary USER=root PWD=123456 IP=192.168.1.10 +deploy-binary: binary-package + @expect scripts/deploy-binary.sh $(USER) $(PWD) $(IP) + + +.PHONY: image-build-local +# Build image for local docker, tag=latest, use binary files to build +image-build-local: build + @bash scripts/image-build-local.sh + + +.PHONY: image-build +# Build image for remote repositories, use binary files to build, e.g. make image-build REPO_HOST=addr TAG=latest +image-build: + @bash scripts/image-build.sh $(REPO_HOST) $(TAG) + + +.PHONY: image-build2 +# Build image for remote repositories, phase II build, e.g. make image-build2 REPO_HOST=addr TAG=latest +image-build2: + @bash scripts/image-build2.sh $(REPO_HOST) $(TAG) + + +.PHONY: image-push +# Push docker image to remote repositories, e.g. make image-push REPO_HOST=addr TAG=latest +image-push: + @bash scripts/image-push.sh $(REPO_HOST) $(TAG) + + +.PHONY: deploy-k8s +# Deploy service to k8s +deploy-k8s: + @bash scripts/deploy-k8s.sh + + +.PHONY: image-build-rpc-test +# Build grpc test image for remote repositories, e.g. make image-build-rpc-test REPO_HOST=addr TAG=latest +image-build-rpc-test: + @bash scripts/image-rpc-test.sh $(REPO_HOST) $(TAG) + + +.PHONY: patch +# Patch some dependent code, e.g. make patch TYPE=types-pb , make patch TYPE=init-, your_db_driver is mysql, mongodb, postgresql, tidb, sqlite, for example: make patch TYPE=init-mysql +patch: + @bash scripts/patch.sh $(TYPE) + + +.PHONY: copy-proto +# Copy proto file from the grpc server directory, multiple directories or proto files separated by commas. default is to copy all proto files, e.g. make copy-proto SERVER=yourServerDir, copy specified proto files, e.g. make copy-proto SERVER=yourServerDir PROTO_FILE=yourProtoFile1,yourProtoFile2 +copy-proto: + @sponge patch copy-proto --server-dir=$(SERVER) --proto-file=$(PROTO_FILE) + + +.PHONY: modify-proto-pkg-name +# Modify the 'package' and 'go_package' names of all proto files in the 'api' directory +modify-proto-pkg-name: + @sponge patch modify-proto-package --dir=api --server-dir=. + + +.PHONY: update-config +# Update internal/config code base on yaml file +update-config: + @sponge config --server-dir=. + + +.PHONY: clean +# Clean binary file, cover.out, template file +clean: + @rm -vrf cmd/eshop_gw/eshop_gw* + @rm -vrf cover.out + @rm -vrf main.go eshop_gw.gv + @rm -vrf internal/ecode/*.go.gen* + @rm -vrf internal/routers/*.go.gen* + @rm -vrf internal/handler/*.go.gen* + @rm -vrf internal/service/*.go.gen* + @rm -rf eshop_gw-binary.tar.gz + @echo "clean finished" + + +# Show help +help: + @echo '' + @echo 'Usage:' + @echo ' make ' + @echo '' + @echo 'Targets:' + @awk '/^[a-zA-Z\-_0-9]+:/ { \ + helpMessage = match(lastLine, /^# (.*)/); \ + if (helpMessage) { \ + helpCommand = substr($$1, 0, index($$1, ":")-1); \ + helpMessage = substr(lastLine, RSTART + 2, RLENGTH); \ + printf "\033[1;36m %-22s\033[0m %s\n", helpCommand,helpMessage; \ + } \ + } \ + { lastLine = $$0 }' $(MAKEFILE_LIST) + +.DEFAULT_GOAL := all diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/README.md b/6_micro-cluster/example-2-mono-repo/eshop_gw/README.md new file mode 100644 index 0000000..15c5b65 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/README.md @@ -0,0 +1,9 @@ +## eshop_gw + +| Feature | Value | +| :----------------: | :-----------: | +| Server name | `eshop_gw` | +| Server type | `grpc-gw-pb` | +| Go module name | `eshop` | +| Repository type | `mono-repo` | + diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/close.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/close.go new file mode 100644 index 0000000..61aaa40 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/close.go @@ -0,0 +1,38 @@ +package initial + +import ( + "context" + "time" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/tracer" + + "eshop/eshop_gw/internal/config" + //"eshop/eshop_gw/internal/rpcclient" +) + +// Close releasing resources after service exit +func Close(servers []app.IServer) []app.Close { + var closes []app.Close + + // close server + for _, s := range servers { + closes = append(closes, s.Stop) + } + + // close the rpc client connection + // example: + //closes = append(closes, func() error { + // return rpcclient.CloseEshop_gwRPCConn() + //}) + + // close tracing + if config.Get().App.EnableTrace { + closes = append(closes, func() error { + ctx, _ := context.WithTimeout(context.Background(), 2*time.Second) //nolint + return tracer.Close(ctx) + }) + } + + return closes +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/createService.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/createService.go new file mode 100644 index 0000000..5fc8467 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/createService.go @@ -0,0 +1,98 @@ +package initial + +import ( + "fmt" + "strconv" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/consul" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" + + "eshop/eshop_gw/internal/config" + "eshop/eshop_gw/internal/server" +) + +// CreateServices create grpc or http service +func CreateServices() []app.IServer { + var cfg = config.Get() + var servers []app.IServer + + // creating http service + httpAddr := ":" + strconv.Itoa(cfg.HTTP.Port) + httpRegistry, httpInstance := registerService("http", cfg.App.Host, cfg.HTTP.Port) + httpServer := server.NewHTTPServer(httpAddr, + server.WithHTTPRegistry(httpRegistry, httpInstance), + server.WithHTTPIsProd(cfg.App.Env == "prod"), + ) + servers = append(servers, httpServer) + + return servers +} + +func registerService(scheme string, host string, port int) (registry.Registry, *registry.ServiceInstance) { + var ( + instanceEndpoint = fmt.Sprintf("%s://%s:%d", scheme, host, port) + cfg = config.Get() + + iRegistry registry.Registry + instance *registry.ServiceInstance + err error + + id = cfg.App.Name + "_" + scheme + "_" + host + logField logger.Field + ) + + switch cfg.App.RegistryDiscoveryType { + // registering service with consul + case "consul": + iRegistry, instance, err = consul.NewRegistry( + cfg.Consul.Addr, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.Any("consulAddress", cfg.Consul.Addr) + + // registering service with etcd + case "etcd": + iRegistry, instance, err = etcd.NewRegistry( + cfg.Etcd.Addrs, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.Any("etcdAddress", cfg.Etcd.Addrs) + + // registering service with nacos + case "nacos": + iRegistry, instance, err = nacos.NewRegistry( + cfg.NacosRd.IPAddr, + cfg.NacosRd.Port, + cfg.NacosRd.NamespaceID, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.String("nacosAddress", fmt.Sprintf("%v:%d", cfg.NacosRd.IPAddr, cfg.NacosRd.Port)) + } + + if instance != nil { + msg := fmt.Sprintf("register service address to %s", cfg.App.RegistryDiscoveryType) + logger.Info(msg, logField, logger.String("id", id), logger.String("name", cfg.App.Name), logger.String("endpoint", instanceEndpoint)) + return iRegistry, instance + } + + return nil, nil +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/initApp.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/initApp.go new file mode 100644 index 0000000..db7b1a4 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/initial/initApp.go @@ -0,0 +1,128 @@ +// Package initial is the package that starts the service to initialize the service, including +// the initialization configuration, service configuration, connecting to the database, and +// resource release needed when shutting down the service. +package initial + +import ( + "flag" + "fmt" + "strconv" + + "github.com/jinzhu/copier" + + "github.com/zhufuyi/sponge/pkg/conf" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/nacoscli" + "github.com/zhufuyi/sponge/pkg/stat" + "github.com/zhufuyi/sponge/pkg/tracer" + + "eshop/eshop_gw/configs" + "eshop/eshop_gw/internal/config" + //"eshop/eshop_gw/internal/rpcclient" +) + +var ( + version string + configFile string + enableConfigCenter bool +) + +// InitApp initial app configuration +func InitApp() { + initConfig() + cfg := config.Get() + + // initializing log + _, err := logger.Init( + logger.WithLevel(cfg.Logger.Level), + logger.WithFormat(cfg.Logger.Format), + logger.WithSave( + cfg.Logger.IsSave, + //logger.WithFileName(cfg.Logger.LogFileConfig.Filename), + //logger.WithFileMaxSize(cfg.Logger.LogFileConfig.MaxSize), + //logger.WithFileMaxBackups(cfg.Logger.LogFileConfig.MaxBackups), + //logger.WithFileMaxAge(cfg.Logger.LogFileConfig.MaxAge), + //logger.WithFileIsCompression(cfg.Logger.LogFileConfig.IsCompression), + ), + ) + if err != nil { + panic(err) + } + logger.Debug(config.Show()) + logger.Info("[logger] was initialized") + + // initializing tracing + if cfg.App.EnableTrace { + tracer.InitWithConfig( + cfg.App.Name, + cfg.App.Env, + cfg.App.Version, + cfg.Jaeger.AgentHost, + strconv.Itoa(cfg.Jaeger.AgentPort), + cfg.App.TracingSamplingRate, + ) + logger.Info("[tracer] was initialized") + } + + // initializing the print system and process resources + if cfg.App.EnableStat { + stat.Init( + stat.WithLog(logger.Get()), + stat.WithAlarm(), // invalid if it is windows, the default threshold for cpu and memory is 0.8, you can modify them + stat.WithPrintField(logger.String("service_name", cfg.App.Name), logger.String("host", cfg.App.Host)), + ) + logger.Info("[resource statistics] was initialized") + } + + // initializing the rpc server connection + // example: + //rpcclient.NewEshop_gwRPCConn() +} + +func initConfig() { + flag.StringVar(&version, "version", "", "service Version Number") + flag.BoolVar(&enableConfigCenter, "enable-cc", false, "whether to get from the configuration center, "+ + "if true, the '-c' parameter indicates the configuration center") + flag.StringVar(&configFile, "c", "", "configuration file") + flag.Parse() + + if enableConfigCenter { + // get the configuration from the configuration center (first get the nacos configuration, + // then read the service configuration according to the nacos configuration center) + if configFile == "" { + configFile = configs.Path("eshop_gw_cc.yml") + } + nacosConfig, err := config.NewCenter(configFile) + if err != nil { + panic(err) + } + appConfig := &config.Config{} + params := &nacoscli.Params{} + _ = copier.Copy(params, &nacosConfig.Nacos) + format, data, err := nacoscli.GetConfig(params) + if err != nil { + panic(fmt.Sprintf("connect to configuration center err, %v", err)) + } + err = conf.ParseConfigData(data, format, appConfig) + if err != nil { + panic(fmt.Sprintf("parse configuration data err, %v", err)) + } + if appConfig.App.Name == "" { + panic("read the config from center error, config data is empty") + } + config.Set(appConfig) + } else { + // get configuration from local configuration file + if configFile == "" { + configFile = configs.Path("eshop_gw.yml") + } + err := config.Init(configFile) + if err != nil { + panic("init config error: " + err.Error()) + } + } + + if version != "" { + config.Get().App.Version = version + } +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/main.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/main.go new file mode 100644 index 0000000..fe5e3a5 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/cmd/eshop_gw/main.go @@ -0,0 +1,17 @@ +// Package main is the grpc gateway server of the application. +package main + +import ( + "github.com/zhufuyi/sponge/pkg/app" + + "eshop/eshop_gw/cmd/eshop_gw/initial" +) + +func main() { + initial.InitApp() + services := initial.CreateServices() + closes := initial.Close(services) + + a := app.New(services, closes) + a.Run() +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/configs/eshop_gw.yml b/6_micro-cluster/example-2-mono-repo/eshop_gw/configs/eshop_gw.yml new file mode 100644 index 0000000..0a2a13f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/configs/eshop_gw.yml @@ -0,0 +1,101 @@ +# Generate the go struct command: sponge config --server-dir=./serverDir + +# app settings +app: + name: "eshop_gw" # server name + env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment + version: "v0.0.0" + host: "127.0.0.1" # domain or ip, for service registration + enableStat: true # whether to turn on printing statistics, true:enable, false:disable + enableMetrics: true # whether to turn on indicator collection, true:enable, false:disable + enableHTTPProfile: false # whether to turn on performance analysis, true:enable, false:disable + enableLimit: false # whether to turn on rate limiting (adaptive), true:on, false:off + enableCircuitBreaker: false # whether to turn on circuit breaker(adaptive), true:on, false:off + enableTrace: false # whether to turn on trace, true:enable, false:disable, if true jaeger configuration must be set + tracingSamplingRate: 1.0 # tracing sampling rate, between 0 and 1, 0 means no sampling, 1 means sampling all links + registryDiscoveryType: "" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used + cacheType: "" # cache type, if empty, the cache is not used, support for "memory" and "redis", if set to redis, must set redis configuration + + +# http server settings +http: + port: 8080 # listen port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, if enableHTTPProfile is true, it needs to set 0 or greater than 60s + + +# grpc client-side settings, support for setting up multiple grpc clients. +grpcClient: + - name: "comment" # rpc service name, used for service discovery + host: "127.0.0.1" # rpc service address, used for direct connection + port: 18282 # rpc service port + registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port + enableLoadBalance: true # whether to turn on the load balancer + - name: "inventory" # rpc service name, used for service discovery + host: "127.0.0.1" # rpc service address, used for direct connection + port: 28282 # rpc service port + registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port + enableLoadBalance: true # whether to turn on the load balancer + - name: "product" # rpc service name, used for service discovery + host: "127.0.0.1" # rpc service address, used for direct connection + port: 38282 # rpc service port + registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port + enableLoadBalance: true # whether to turn on the load balancer + + +# logger settings +logger: + level: "info" # output log levels debug, info, warn, error, default is debug + format: "console" # output format, console or json, default is console + isSave: false # false:output to terminal, true:output to file, default is false + #logFileConfig: # Effective when isSave=true + #filename: "out.log" # File name (default is out.log) + #maxSize: 20 # Maximum file size (MB, default is 10MB) + #maxBackups: 50 # Maximum number of old files to retain (default is 100) + #maxAge: 15 # Maximum number of days to retain old files (default is 30 days) + #isCompression: true # Whether to compress/archive old files (default is false) + + +# set database configuration. reference-db-config-url +database: + driver: "mysql" # database driver + # mysql settings + mysql: + # dsn format, :@(:)/?[k=v& ......] + dsn: "root:123456@(192.168.3.37:3306)/account?parseTime=true&loc=Local&charset=utf8,utf8mb4" + enableLog: true # whether to turn on printing of all logs + maxIdleConns: 10 # set the maximum number of connections in the idle connection pool + maxOpenConns: 100 # set the maximum number of open database connections + connMaxLifetime: 30 # sets the maximum time for which the connection can be reused, in minutes + + + +# redis settings +redis: + # dsn format, [user]:@127.0.0.1:6379/[db], the default user is default, redis version 6.0 and above only supports user. + dsn: "default:123456@192.168.3.37:6379/0" + dialTimeout: 10 # connection timeout, unit(second) + readTimeout: 2 # read timeout, unit(second) + writeTimeout: 2 # write timeout, unit(second) + + +# jaeger settings +jaeger: + agentHost: "192.168.3.37" + agentPort: 6831 + + +# consul settings +consul: + addr: "192.168.3.37:8500" + + +# etcd settings +etcd: + addrs: ["192.168.3.37:2379"] + + +# nacos settings, used in service registration discovery +nacosRd: + ipAddr: "192.168.3.37" + port: 8848 + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/configs/eshop_gw_cc.yml b/6_micro-cluster/example-2-mono-repo/eshop_gw/configs/eshop_gw_cc.yml new file mode 100644 index 0000000..97c3e3d --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/configs/eshop_gw_cc.yml @@ -0,0 +1,13 @@ +# Generate the go struct command: sponge config --server-dir=./serverDir +# App config from nacos + +# nacos settings +nacos: + ipAddr: "192.168.3.37" # server address + port: 8848 # listening port + scheme: "http" # http or grpc + contextPath: "/nacos" # path + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id + group: "dev" # group name: dev, prod, test + dataID: "eshop_gw.yml" # config file id + format: "yaml" # configuration file type: json,yaml,toml diff --git a/b_sponge-dtm-msg/configs/location.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/configs/location.go similarity index 100% rename from b_sponge-dtm-msg/configs/location.go rename to 6_micro-cluster/example-2-mono-repo/eshop_gw/configs/location.go diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/README.md b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/README.md new file mode 100644 index 0000000..5bb0159 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/README.md @@ -0,0 +1,26 @@ + +copy the configuration file to the configs directory and binary file before starting the service. + +``` +├── configs +│ └── eshop_gw.yml +├── eshop_gw +├── deploy.sh +└── run.sh +``` + +### Running and stopping service manually + +Running service: + +> ./run.sh + +Stopping the service: + +> ./run.sh stop + +
+ +### Automated deployment service + +> ./deploy.sh diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/deploy.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/deploy.sh new file mode 100644 index 0000000..bff7edb --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/deploy.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +serviceName="eshop_gw" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# determine if the startup service script run.sh exists +runFile="~/app/${serviceName}/run.sh" +if [ ! -f "$runFile" ]; then + # if it does not exist, copy the entire directory + mkdir -p ~/app + cp -rf /tmp/${serviceName}-binary ~/app/ + checkResult $? + rm -rf /tmp/${serviceName}-binary* +else + # replace only the binary file if it exists + cp -f ${serviceName}-binary/${serviceName} ~/app/${serviceName}-binary/${serviceName} + checkResult $? + rm -rf /tmp/${serviceName}-binary* +fi + +# running service +cd ~/app/${serviceName}-binary +chmod +x run.sh +./run.sh +checkResult $? + +echo "server directory is ~/app/${serviceName}-binary" diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/run.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/run.sh new file mode 100644 index 0000000..8571e61 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/binary/run.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +serviceName="eshop_gw" +cmdStr="./${serviceName} -c configs/${serviceName}.yml" + +chmod +x ./${serviceName} + +stopService(){ + NAME=$1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + for id in $ID + do + kill -9 $id + echo "Stopped ${NAME} service successfully, process ID=${ID}" + done + fi +} + +startService() { + NAME=$1 + + nohup ${cmdStr} > ${serviceName}.log 2>&1 & + sleep 1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + echo "Start the ${NAME} service ...... process ID=${ID}" + else + echo "Failed to start ${NAME} service" + return 1 + fi + return 0 +} + + +stopService ${serviceName} +if [ "$1"x != "stop"x ] ;then + sleep 1 + startService ${serviceName} + exit $? + echo "" +else + echo "Service ${serviceName} has stopped" +fi diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/docker-compose/README.md b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/docker-compose/README.md new file mode 100644 index 0000000..691418b --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/docker-compose/README.md @@ -0,0 +1,12 @@ + +copy the configuration file to the configs directory before starting the service. + +``` +├── configs +│ └── eshop_gw.yml +└── docker-compose.yml +``` + +running service: + +> docker-compose up -d diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/docker-compose/docker-compose.yml b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/docker-compose/docker-compose.yml new file mode 100644 index 0000000..16f77b7 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/docker-compose/docker-compose.yml @@ -0,0 +1,19 @@ +version: "3.7" + +services: + eshop-gw: + image: eshop/eshop-gw:latest + container_name: eshop-gw + restart: always + command: ["./eshop_gw", "-c", "/app/configs/eshop_gw.yml"] + volumes: + - $PWD/configs:/app/configs + ports: + - "8080:8080" # http port + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8080/health"] # http health check, note: mirror must contain curl command + + interval: 10s # interval time + timeout: 5s # timeout time + retries: 3 # number of retries + start_period: 10s # how long after start-up does the check begin diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/README.md b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/README.md new file mode 100644 index 0000000..ce3586b --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/README.md @@ -0,0 +1,32 @@ +Before deploying the service to k8s, create a Secret that pulls image permissions for k8s in a docker host that is already logged into the image repository, with the following command. + +```bash +kubectl create secret generic docker-auth-secret \ + --from-file=.dockerconfigjson=/root/.docker/config.json \ + --type=kubernetes.io/dockerconfigjson +``` + +
+ +run server: + +```bash +cd deployments + +kubectl apply -f ./*namespace.yml + +kubectl apply -f ./ +``` + +view the start-up status. + +> kubectl get all -n eshop + +
+ +simple test of http port + +```bash +# mapping to the http port of the service on the local port +kubectl port-forward --address=0.0.0.0 service/ 8080:8080 -n +``` diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop-namespace.yml b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop-namespace.yml new file mode 100644 index 0000000..eba474f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop-namespace.yml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: eshop diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-configmap.yml b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-configmap.yml new file mode 100644 index 0000000..c516e99 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-configmap.yml @@ -0,0 +1,114 @@ +kind: ConfigMap +apiVersion: v1 +metadata: + name: eshop-gw-config + namespace: eshop +data: + eshop_gw.yml: |- + # Generate the go struct command: sponge config --server-dir=./serverDir + + # app settings + app: + name: "eshop_gw" # server name + env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment + version: "v0.0.0" + host: "127.0.0.1" # domain or ip, for service registration + enableStat: true # whether to turn on printing statistics, true:enable, false:disable + enableMetrics: true # whether to turn on indicator collection, true:enable, false:disable + enableHTTPProfile: false # whether to turn on performance analysis, true:enable, false:disable + enableLimit: false # whether to turn on rate limiting (adaptive), true:on, false:off + enableCircuitBreaker: false # whether to turn on circuit breaker(adaptive), true:on, false:off + enableTrace: false # whether to turn on trace, true:enable, false:disable, if true jaeger configuration must be set + tracingSamplingRate: 1.0 # tracing sampling rate, between 0 and 1, 0 means no sampling, 1 means sampling all links + registryDiscoveryType: "" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used + cacheType: "" # cache type, if empty, the cache is not used, support for "memory" and "redis", if set to redis, must set redis configuration + + + # http server settings + http: + port: 8080 # listen port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, if enableHTTPProfile is true, it needs to set 0 or greater than 60s + + + # grpc client-side settings, support for setting up multiple grpc clients. + grpcClient: + - name: "your_grpc_service_name" # grpc service name, used for service discovery + host: "127.0.0.1" # grpc service address, used for direct connection + port: 8282 # grpc service port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, valid only for unary grpc type + registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port + enableLoadBalance: true # whether to turn on the load balancer + # clientSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'serverName' and 'certFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + clientSecure: + type: "" # secures type, "", "one-way", "two-way" + serverName: "" # server name, e.g. *.foo.com + caFile: "" # client side ca file, valid only in "two-way", absolute path + certFile: "" # client side cert file, absolute path, if secureType="one-way", fill in server side cert file here + keyFile: "" # client side key file, valid only in "two-way", absolute path + clientToken: + enable: false # whether to enable token authentication + appID: "" # app id + appKey: "" # app key + + + + # logger settings + logger: + level: "info" # output log levels debug, info, warn, error, default is debug + format: "console" # output format, console or json, default is console + isSave: false # false:output to terminal, true:output to file, default is false + #logFileConfig: # Effective when isSave=true + #filename: "out.log" # File name (default is out.log) + #maxSize: 20 # Maximum file size (MB, default is 10MB) + #maxBackups: 50 # Maximum number of old files to retain (default is 100) + #maxAge: 15 # Maximum number of days to retain old files (default is 30 days) + #isCompression: true # Whether to compress/archive old files (default is false) + + + # set database configuration. reference-db-config-url + database: + driver: "mysql" # database driver + # mysql settings + mysql: + # dsn format, :@(:)/?[k=v& ......] + dsn: "root:123456@(192.168.3.37:3306)/account?parseTime=true&loc=Local&charset=utf8,utf8mb4" + enableLog: true # whether to turn on printing of all logs + maxIdleConns: 10 # set the maximum number of connections in the idle connection pool + maxOpenConns: 100 # set the maximum number of open database connections + connMaxLifetime: 30 # sets the maximum time for which the connection can be reused, in minutes + + + + # redis settings + redis: + # dsn format, [user]:@127.0.0.1:6379/[db], the default user is default, redis version 6.0 and above only supports user. + dsn: "default:123456@192.168.3.37:6379/0" + dialTimeout: 10 # connection timeout, unit(second) + readTimeout: 2 # read timeout, unit(second) + writeTimeout: 2 # write timeout, unit(second) + + + # jaeger settings + jaeger: + agentHost: "192.168.3.37" + agentPort: 6831 + + + # consul settings + consul: + addr: "192.168.3.37:8500" + + + # etcd settings + etcd: + addrs: ["192.168.3.37:2379"] + + + # nacos settings, used in service registration discovery + nacosRd: + ipAddr: "192.168.3.37" + port: 8848 + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-deployment.yml b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-deployment.yml new file mode 100644 index 0000000..611c81a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-deployment.yml @@ -0,0 +1,63 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: eshop-gw-dm + namespace: eshop +spec: + replicas: 1 + selector: + matchLabels: + app: eshop-gw + template: + metadata: + name: eshop-gw-pod + labels: + app: eshop-gw + spec: + containers: + - name: eshop-gw + image: /eshop/eshop-gw:latest + # If using a local image, use Never, default is Always + #imagePullPolicy: Never + command: ["./eshop_gw", "-c", "/app/configs/eshop_gw.yml"] + resources: + requests: + cpu: 10m + memory: 10Mi + limits: + cpu: 1000m + memory: 1000Mi + volumeMounts: + - name: eshop-gw-vl + mountPath: /app/configs/ + readOnly: true + + ports: + - name: http-port + containerPort: 8080 + readinessProbe: + httpGet: + port: http-port + path: /health + initialDelaySeconds: 10 + timeoutSeconds: 2 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + livenessProbe: + httpGet: + port: http-port + path: /health + + initialDelaySeconds: 10 + timeoutSeconds: 2 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + # todo for private repositories, you need to create a secret (here docker-auth-secret) to store the account and password to log into docker + imagePullSecrets: + - name: docker-auth-secret + volumes: + - name: eshop-gw-vl + configMap: + name: eshop-gw-config diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-svc.yml b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-svc.yml new file mode 100644 index 0000000..8e307f1 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/deployments/kubernetes/eshop_gw-svc.yml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: Service +metadata: + name: eshop-gw-svc + namespace: eshop +spec: + selector: + app: eshop-gw + type: ClusterIP + ports: + - name: eshop-gw-svc-http-port + port: 8080 + targetPort: 8080 + diff --git a/a_micro-grpc-http-protobuf/docs/apis.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/docs/apis.go similarity index 100% rename from a_micro-grpc-http-protobuf/docs/apis.go rename to 6_micro-cluster/example-2-mono-repo/eshop_gw/docs/apis.go diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/docs/apis.swagger.json b/6_micro-cluster/example-2-mono-repo/eshop_gw/docs/apis.swagger.json new file mode 100644 index 0000000..e9d4d08 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/docs/apis.swagger.json @@ -0,0 +1,168 @@ +{ + "swagger": "2.0", + "info": { + "title": "user api docs", + "version": "2.0" + }, + "tags": [ + { + "name": "EShopGw" + } + ], + "host": "localhost:8080", + "schemes": [ + "http", + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/api/v1/detail": { + "get": { + "summary": "get detail", + "description": "get detail by product id", + "operationId": "EShopGw_GetDetailsByProductID", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1GetDetailsByProductIDReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "productID", + "in": "query", + "required": false, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "EShopGw" + ], + "security": [ + { + "BearerAuth": [] + } + ] + } + } + }, + "definitions": { + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string" + } + }, + "additionalProperties": {} + }, + "rpcStatus": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/protobufAny" + } + } + } + }, + "v1CommentDetail": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "uint64" + }, + "username": { + "type": "string" + }, + "content": { + "type": "string" + } + } + }, + "v1GetDetailsByProductIDReply": { + "type": "object", + "properties": { + "productDetail": { + "$ref": "#/definitions/v1ProductDetail" + }, + "inventoryDetail": { + "$ref": "#/definitions/v1InventoryDetail" + }, + "commentDetails": { + "type": "array", + "items": { + "$ref": "#/definitions/v1CommentDetail" + } + } + } + }, + "v1InventoryDetail": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "uint64" + }, + "num": { + "type": "number", + "format": "float" + }, + "soldNum": { + "type": "integer", + "format": "int32" + } + } + }, + "v1ProductDetail": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "uint64" + }, + "name": { + "type": "string" + }, + "price": { + "type": "number", + "format": "float" + }, + "description": { + "type": "string" + } + } + } + }, + "securityDefinitions": { + "BearerAuth": { + "type": "apiKey", + "description": "Input a \"Bearer your-jwt-token\" to Value", + "name": "Authorization", + "in": "header" + } + } +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/docs/gen.info b/6_micro-cluster/example-2-mono-repo/eshop_gw/docs/gen.info new file mode 100644 index 0000000..90ecd02 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/docs/gen.info @@ -0,0 +1 @@ +eshop,eshop_gw,true \ No newline at end of file diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw.go new file mode 100644 index 0000000..2991648 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw.go @@ -0,0 +1,173 @@ +// code generated by https://eshop + +package config + +import ( + "github.com/zhufuyi/sponge/pkg/conf" +) + +var config *Config + +func Init(configFile string, fs ...func()) error { + config = &Config{} + return conf.Parse(configFile, config, fs...) +} + +func Show(hiddenFields ...string) string { + return conf.Show(config, hiddenFields...) +} + +func Get() *Config { + if config == nil { + panic("config is nil, please call config.Init() first") + } + return config +} + +func Set(conf *Config) { + config = conf +} + +type Config struct { + App App `yaml:"app" json:"app"` + Consul Consul `yaml:"consul" json:"consul"` + Database Database `yaml:"database" json:"database"` + Etcd Etcd `yaml:"etcd" json:"etcd"` + Grpc Grpc `yaml:"grpc" json:"grpc"` + GrpcClient []GrpcClient `yaml:"grpcClient" json:"grpcClient"` + HTTP HTTP `yaml:"http" json:"http"` + Jaeger Jaeger `yaml:"jaeger" json:"jaeger"` + Logger Logger `yaml:"logger" json:"logger"` + NacosRd NacosRd `yaml:"nacosRd" json:"nacosRd"` + Redis Redis `yaml:"redis" json:"redis"` +} + +type Consul struct { + Addr string `yaml:"addr" json:"addr"` +} + +type Etcd struct { + Addrs []string `yaml:"addrs" json:"addrs"` +} + +type Jaeger struct { + AgentHost string `yaml:"agentHost" json:"agentHost"` + AgentPort int `yaml:"agentPort" json:"agentPort"` +} + +type ClientToken struct { + AppID string `yaml:"appID" json:"appID"` + AppKey string `yaml:"appKey" json:"appKey"` + Enable bool `yaml:"enable" json:"enable"` +} + +type ClientSecure struct { + CaFile string `yaml:"caFile" json:"caFile"` + CertFile string `yaml:"certFile" json:"certFile"` + KeyFile string `yaml:"keyFile" json:"keyFile"` + ServerName string `yaml:"serverName" json:"serverName"` + Type string `yaml:"type" json:"type"` +} + +type ServerSecure struct { + CaFile string `yaml:"caFile" json:"caFile"` + CertFile string `yaml:"certFile" json:"certFile"` + KeyFile string `yaml:"keyFile" json:"keyFile"` + Type string `yaml:"type" json:"type"` +} + +type App struct { + CacheType string `yaml:"cacheType" json:"cacheType"` + EnableCircuitBreaker bool `yaml:"enableCircuitBreaker" json:"enableCircuitBreaker"` + EnableHTTPProfile bool `yaml:"enableHTTPProfile" json:"enableHTTPProfile"` + EnableLimit bool `yaml:"enableLimit" json:"enableLimit"` + EnableMetrics bool `yaml:"enableMetrics" json:"enableMetrics"` + EnableStat bool `yaml:"enableStat" json:"enableStat"` + EnableTrace bool `yaml:"enableTrace" json:"enableTrace"` + Env string `yaml:"env" json:"env"` + Host string `yaml:"host" json:"host"` + Name string `yaml:"name" json:"name"` + RegistryDiscoveryType string `yaml:"registryDiscoveryType" json:"registryDiscoveryType"` + TracingSamplingRate float64 `yaml:"tracingSamplingRate" json:"tracingSamplingRate"` + Version string `yaml:"version" json:"version"` +} + +type GrpcClient struct { + ClientSecure ClientSecure `yaml:"clientSecure" json:"clientSecure"` + ClientToken ClientToken `yaml:"clientToken" json:"clientToken"` + EnableLoadBalance bool `yaml:"enableLoadBalance" json:"enableLoadBalance"` + Host string `yaml:"host" json:"host"` + Name string `yaml:"name" json:"name"` + Port int `yaml:"port" json:"port"` + RegistryDiscoveryType string `yaml:"registryDiscoveryType" json:"registryDiscoveryType"` + Timeout int `yaml:"timeout" json:"timeout"` +} + +type Sqlite struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + DBFile string `yaml:"dbFile" json:"dbFile"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` +} + +type Mysql struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + Dsn string `yaml:"dsn" json:"dsn"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MastersDsn []string `yaml:"mastersDsn" json:"mastersDsn"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` + SlavesDsn []string `yaml:"slavesDsn" json:"slavesDsn"` +} + +type Postgresql struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + Dsn string `yaml:"dsn" json:"dsn"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` +} + +type Redis struct { + DialTimeout int `yaml:"dialTimeout" json:"dialTimeout"` + Dsn string `yaml:"dsn" json:"dsn"` + ReadTimeout int `yaml:"readTimeout" json:"readTimeout"` + WriteTimeout int `yaml:"writeTimeout" json:"writeTimeout"` +} + +type Database struct { + Driver string `yaml:"driver" json:"driver"` + Mongodb Mongodb `yaml:"mongodb" json:"mongodb"` + Mysql Mysql `yaml:"mysql" json:"mysql"` + Postgresql Mysql `yaml:"postgresql" json:"postgresql"` + Sqlite Sqlite `yaml:"sqlite" json:"sqlite"` +} + +type Mongodb struct { + Dsn string `yaml:"dsn" json:"dsn"` +} + +type Grpc struct { + EnableToken bool `yaml:"enableToken" json:"enableToken"` + HTTPPort int `yaml:"httpPort" json:"httpPort"` + Port int `yaml:"port" json:"port"` + ServerSecure ServerSecure `yaml:"serverSecure" json:"serverSecure"` +} + +type Logger struct { + Format string `yaml:"format" json:"format"` + IsSave bool `yaml:"isSave" json:"isSave"` + Level string `yaml:"level" json:"level"` +} + +type NacosRd struct { + IPAddr string `yaml:"ipAddr" json:"ipAddr"` + NamespaceID string `yaml:"namespaceID" json:"namespaceID"` + Port int `yaml:"port" json:"port"` +} + +type HTTP struct { + Port int `yaml:"port" json:"port"` + Timeout int `yaml:"timeout" json:"timeout"` +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw_cc.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw_cc.go new file mode 100644 index 0000000..326e1bc --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw_cc.go @@ -0,0 +1,28 @@ +// code generated by https://eshop + +package config + +import ( + "github.com/zhufuyi/sponge/pkg/conf" +) + +func NewCenter(configFile string) (*Center, error) { + nacosConf := &Center{} + err := conf.Parse(configFile, nacosConf) + return nacosConf, err +} + +type Center struct { + Nacos Nacos `yaml:"nacos" json:"nacos"` +} + +type Nacos struct { + ContextPath string `yaml:"contextPath" json:"contextPath"` + DataID string `yaml:"dataID" json:"dataID"` + Format string `yaml:"format" json:"format"` + Group string `yaml:"group" json:"group"` + IPAddr string `yaml:"ipAddr" json:"ipAddr"` + NamespaceID string `yaml:"namespaceID" json:"namespaceID"` + Port int `yaml:"port" json:"port"` + Scheme string `yaml:"scheme" json:"scheme"` +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw_test.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw_test.go new file mode 100644 index 0000000..f653a80 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/config/eshop_gw_test.go @@ -0,0 +1,45 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/zhufuyi/sponge/pkg/gofile" + + "eshop/eshop_gw/configs" +) + +func TestInit(t *testing.T) { + configFile := configs.Path("eshop_gw.yml") + err := Init(configFile) + if gofile.IsExists(configFile) { + assert.NoError(t, err) + } else { + assert.Error(t, err) + } + + c := Get() + assert.NotNil(t, c) + + str := Show() + assert.NotEmpty(t, str) + t.Log(str) + + // set nil + Set(nil) + defer func() { + recover() + }() + Get() +} + +func TestInitNacos(t *testing.T) { + configFile := configs.Path("eshop_gw_cc.yml") + _, err := NewCenter(configFile) + if gofile.IsExists(configFile) { + assert.NoError(t, err) + } else { + assert.Error(t, err) + } +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/ecode/eshop_gw_rpc.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/ecode/eshop_gw_rpc.go new file mode 100644 index 0000000..c91a187 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/ecode/eshop_gw_rpc.go @@ -0,0 +1,19 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// eShopGw business-level rpc error codes. +// the eShopGwNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + _eShopGwNO = 75 + _eShopGwName = "eShopGw" + _eShopGwBaseCode = errcode.RCode(_eShopGwNO) + + StatusGetDetailsByProductIDEShopGw = errcode.NewRPCStatus(_eShopGwBaseCode+1, "failed to GetDetailsByProductID "+_eShopGwName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/ecode/systemCode_rpc.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/ecode/systemCode_rpc.go new file mode 100644 index 0000000..8a88afd --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/ecode/systemCode_rpc.go @@ -0,0 +1,46 @@ +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// rpc system level error code, with status prefix, error code range 30000~40000 +var ( + StatusSuccess = errcode.StatusSuccess + + StatusCanceled = errcode.StatusCanceled + StatusUnknown = errcode.StatusUnknown + StatusInvalidParams = errcode.StatusInvalidParams + StatusDeadlineExceeded = errcode.StatusDeadlineExceeded + StatusNotFound = errcode.StatusNotFound + StatusAlreadyExists = errcode.StatusAlreadyExists + StatusPermissionDenied = errcode.StatusPermissionDenied + StatusResourceExhausted = errcode.StatusResourceExhausted + StatusFailedPrecondition = errcode.StatusFailedPrecondition + StatusAborted = errcode.StatusAborted + StatusOutOfRange = errcode.StatusOutOfRange + StatusUnimplemented = errcode.StatusUnimplemented + StatusInternalServerError = errcode.StatusInternalServerError + StatusServiceUnavailable = errcode.StatusServiceUnavailable + StatusDataLoss = errcode.StatusDataLoss + StatusUnauthorized = errcode.StatusUnauthorized + + StatusTimeout = errcode.StatusTimeout + StatusTooManyRequests = errcode.StatusTooManyRequests + StatusForbidden = errcode.StatusForbidden + StatusLimitExceed = errcode.StatusLimitExceed + StatusMethodNotAllowed = errcode.StatusMethodNotAllowed + StatusAccessDenied = errcode.StatusAccessDenied + StatusConflict = errcode.StatusConflict +) + +// Any kev-value +func Any(key string, val interface{}) errcode.Detail { + return errcode.Any(key, val) +} + +// StatusSkipResponse is only use for grpc-gateway +var StatusSkipResponse = errcode.SkipResponse + +// GetStatusCode get status code from error returned by RPC invoke +var GetStatusCode = errcode.GetStatusCode diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/routers/eshop_gw_router.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/routers/eshop_gw_router.go new file mode 100644 index 0000000..9eeae04 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/routers/eshop_gw_router.go @@ -0,0 +1,70 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "context" + + "github.com/gin-gonic/gin" + "google.golang.org/grpc/metadata" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/logger" + + eshop_gwV1 "eshop/api/eshop_gw/v1" + "eshop/eshop_gw/internal/service" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + eShopGwMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + eShopGwRouter(r, groupPathMiddlewares, singlePathMiddlewares, service.NewEShopGwClient()) + }) +} + +func eShopGwRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService eshop_gwV1.EShopGwLogicer) { + ctxFn := func(c *gin.Context) context.Context { + md := metadata.New(map[string]string{ + // set metadata to be passed from http to rpc + middleware.ContextRequestIDKey: middleware.GCtxRequestID(c), // request_id + //middleware.HeaderAuthorizationKey: c.GetHeader(middleware.HeaderAuthorizationKey), // authorization + }) + return metadata.NewOutgoingContext(c.Request.Context(), md) + } + + eshop_gwV1.RegisterEShopGwRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + eshop_gwV1.WithEShopGwLogger(logger.Get()), + eshop_gwV1.WithEShopGwRPCResponse(), + eshop_gwV1.WithEShopGwWrapCtx(ctxFn), + eshop_gwV1.WithEShopGwRPCStatusToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.StatusInternalServerError and ecode.StatusServiceUnavailable + // example: + // ecode.StatusUnimplemented, ecode.StatusAborted, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func eShopGwMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/eShopGw/:id will take effect + // c.setGroupPath("/api/v1/eShopGw", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("GET", "/api/v1/detail", middleware.Auth()) // GetDetailsByProductID get page detail by product id +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/routers/routers.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/routers/routers.go new file mode 100644 index 0000000..2d1ae63 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/routers/routers.go @@ -0,0 +1,167 @@ +package routers + +import ( + "net/http" + "strings" + "time" + + "github.com/gin-gonic/gin" + "github.com/gin-gonic/gin/binding" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/handlerfunc" + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/gin/middleware/metrics" + "github.com/zhufuyi/sponge/pkg/gin/prof" + "github.com/zhufuyi/sponge/pkg/gin/swagger" + "github.com/zhufuyi/sponge/pkg/gin/validator" + "github.com/zhufuyi/sponge/pkg/jwt" + "github.com/zhufuyi/sponge/pkg/logger" + + "eshop/eshop_gw/docs" + "eshop/eshop_gw/internal/config" +) + +type routeFns = []func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) + +var ( + // all route functions + allRouteFns = make(routeFns, 0) + // all middleware functions + allMiddlewareFns = []func(c *middlewareConfig){} +) + +// NewRouter create a new router +func NewRouter() *gin.Engine { //nolint + r := gin.New() + + r.Use(gin.Recovery()) + r.Use(middleware.Cors()) + + if config.Get().HTTP.Timeout > 0 { + // if you need more fine-grained control over your routes, set the timeout in your routes, unsetting the timeout globally here. + r.Use(middleware.Timeout(time.Second * time.Duration(config.Get().HTTP.Timeout))) + } + + // request id middleware + r.Use(middleware.RequestID()) + + // logger middleware, to print simple messages, replace middleware.Logging with middleware.SimpleLog + r.Use(middleware.Logging( + middleware.WithLog(logger.Get()), + middleware.WithRequestIDFromContext(), + middleware.WithIgnoreRoutes("/metrics"), // ignore path + )) + + // init jwt middleware + jwt.Init( + //jwt.WithExpire(time.Hour*24), + //jwt.WithSigningKey("123456"), + //jwt.WithSigningMethod(jwt.HS384), + ) + + // metrics middleware + if config.Get().App.EnableMetrics { + r.Use(metrics.Metrics(r, + //metrics.WithMetricsPath("/metrics"), // default is /metrics + metrics.WithIgnoreStatusCodes(http.StatusNotFound), // ignore 404 status codes + )) + } + + // limit middleware + if config.Get().App.EnableLimit { + r.Use(middleware.RateLimit()) + } + + // circuit breaker middleware + if config.Get().App.EnableCircuitBreaker { + r.Use(middleware.CircuitBreaker( + // set http code for circuit breaker, default already includes 500 and 503 + middleware.WithValidCode(errcode.InternalServerError.Code()), + middleware.WithValidCode(errcode.ServiceUnavailable.Code()), + )) + } + + // trace middleware + if config.Get().App.EnableTrace { + r.Use(middleware.Tracing(config.Get().App.Name)) + } + + // profile performance analysis + if config.Get().App.EnableHTTPProfile { + prof.Register(r, prof.WithIOWaitTime()) + } + + // validator + binding.Validator = validator.Init() + + r.GET("/health", handlerfunc.CheckHealth) + r.GET("/ping", handlerfunc.Ping) + r.GET("/codes", handlerfunc.ListCodes) + r.GET("/config", gin.WrapF(errcode.ShowConfig([]byte(config.Show())))) + + // access path /apis/swagger/index.html + swagger.CustomRouter(r, "apis", docs.ApiDocs) + + c := newMiddlewareConfig() + + // set up all middlewares + for _, fn := range allMiddlewareFns { + fn(c) + } + + // register all routes + for _, fn := range allRouteFns { + fn(r, c.groupPathMiddlewares, c.singlePathMiddlewares) + } + + return r +} + +type middlewareConfig struct { + groupPathMiddlewares map[string][]gin.HandlerFunc // middleware functions corresponding to route group + singlePathMiddlewares map[string][]gin.HandlerFunc // middleware functions corresponding to a single route +} + +func newMiddlewareConfig() *middlewareConfig { + return &middlewareConfig{ + groupPathMiddlewares: make(map[string][]gin.HandlerFunc), + singlePathMiddlewares: make(map[string][]gin.HandlerFunc), + } +} + +func (c *middlewareConfig) setGroupPath(groupPath string, handlers ...gin.HandlerFunc) { //nolint + if groupPath == "" { + return + } + if groupPath[0] != '/' { + groupPath = "/" + groupPath + } + + handlerFns, ok := c.groupPathMiddlewares[groupPath] + if !ok { + c.groupPathMiddlewares[groupPath] = handlers + return + } + + c.groupPathMiddlewares[groupPath] = append(handlerFns, handlers...) +} + +func (c *middlewareConfig) setSinglePath(method string, singlePath string, handlers ...gin.HandlerFunc) { //nolint + if method == "" || singlePath == "" { + return + } + + key := getSinglePathKey(method, singlePath) + handlerFns, ok := c.singlePathMiddlewares[key] + if !ok { + c.singlePathMiddlewares[key] = handlers + return + } + + c.singlePathMiddlewares[key] = append(handlerFns, handlers...) +} + +func getSinglePathKey(method string, singlePath string) string { //nolint + return strings.ToUpper(method) + "->" + singlePath +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/comment.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/comment.go new file mode 100644 index 0000000..4ee532d --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/comment.go @@ -0,0 +1,159 @@ +package rpcclient + +import ( + "context" + "fmt" + "strings" + "sync" + "time" + + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/consulcli" + "github.com/zhufuyi/sponge/pkg/etcdcli" + "github.com/zhufuyi/sponge/pkg/grpc/grpccli" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/nacoscli" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/consul" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" + + "eshop/eshop_gw/internal/config" +) + +var ( + commentConn *grpc.ClientConn + commentOnce sync.Once +) + +// NewCommentRPCConn instantiate rpc client connection +func NewCommentRPCConn() { + cfg := config.Get() + + serverName := "comment" + var grpcClientCfg config.GrpcClient + for _, cli := range cfg.GrpcClient { + if strings.EqualFold(cli.Name, serverName) { + grpcClientCfg = cli + break + } + } + if grpcClientCfg.Name == "" { + panic(fmt.Sprintf("not found grpc service name '%v' in configuration file(yaml), "+ + "please add gprc service configuration in the configuration file(yaml) under the field grpcClient.", serverName)) + } + + var cliOptions = []grpccli.Option{ + grpccli.WithEnableRequestID(), + grpccli.WithEnableLog(logger.Get()), + } + + if grpcClientCfg.Timeout > 0 { + cliOptions = append(cliOptions, grpccli.WithTimeout(time.Second*time.Duration(grpcClientCfg.Timeout))) + } + + // load balance + if grpcClientCfg.EnableLoadBalance { + cliOptions = append(cliOptions, grpccli.WithEnableLoadBalance()) + } + + // secure + cliOptions = append(cliOptions, grpccli.WithSecure( + grpcClientCfg.ClientSecure.Type, + grpcClientCfg.ClientSecure.ServerName, + grpcClientCfg.ClientSecure.CaFile, + grpcClientCfg.ClientSecure.CertFile, + grpcClientCfg.ClientSecure.KeyFile, + )) + + // token + cliOptions = append(cliOptions, grpccli.WithToken( + grpcClientCfg.ClientToken.Enable, + grpcClientCfg.ClientToken.AppID, + grpcClientCfg.ClientToken.AppKey, + )) + + // if service discovery is not used, connect directly to the rpc service using the ip and port + endpoint := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + + isUseDiscover := false + switch grpcClientCfg.RegistryDiscoveryType { + // discovering services using consul + case "consul": + endpoint = "discovery:///" + grpcClientCfg.Name // connecting to grpc services by service name + cli, err := consulcli.Init(cfg.Consul.Addr, consulcli.WithWaitTime(time.Second*5)) + if err != nil { + panic(fmt.Sprintf("consulcli.Init error: %v, addr: %s", err, cfg.Consul.Addr)) + } + iDiscovery := consul.New(cli) + cliOptions = append(cliOptions, grpccli.WithDiscovery(iDiscovery)) + isUseDiscover = true + // discovering services using etcd + case "etcd": + endpoint = "discovery:///" + grpcClientCfg.Name // Connecting to grpc services by service name + cli, err := etcdcli.Init(cfg.Etcd.Addrs, etcdcli.WithDialTimeout(time.Second*5)) + if err != nil { + panic(fmt.Sprintf("etcdcli.Init error: %v, addr: %v", err, cfg.Etcd.Addrs)) + } + iDiscovery := etcd.New(cli) + cliOptions = append(cliOptions, grpccli.WithDiscovery(iDiscovery)) + isUseDiscover = true + // discovering services using nacos + case "nacos": + // example: endpoint = "discovery:///serverName.scheme" + endpoint = "discovery:///" + grpcClientCfg.Name + ".grpc" // Connecting to grpc services by service name + cli, err := nacoscli.NewNamingClient( + cfg.NacosRd.IPAddr, + cfg.NacosRd.Port, + cfg.NacosRd.NamespaceID) + if err != nil { + panic(fmt.Sprintf("nacoscli.NewNamingClient error: %v, ipAddr: %s, port: %d", + err, cfg.NacosRd.IPAddr, cfg.NacosRd.Port)) + } + iDiscovery := nacos.New(cli) + cliOptions = append(cliOptions, grpccli.WithDiscovery(iDiscovery)) + isUseDiscover = true + } + + if cfg.App.EnableTrace { + cliOptions = append(cliOptions, grpccli.WithEnableTrace()) + } + if cfg.App.EnableCircuitBreaker { + cliOptions = append(cliOptions, grpccli.WithEnableCircuitBreaker()) + } + if cfg.App.EnableMetrics { + cliOptions = append(cliOptions, grpccli.WithEnableMetrics()) + } + + msg := "dial grpc server" + if isUseDiscover { + msg += " with service discovery from " + grpcClientCfg.RegistryDiscoveryType + } + logger.Info(msg, logger.String("name", serverName), logger.String("endpoint", endpoint)) + + var err error + commentConn, err = grpccli.Dial(context.Background(), endpoint, cliOptions...) + if err != nil { + panic(fmt.Sprintf("dial rpc server failed: %v, name: %s, endpoint: %s", err, serverName, endpoint)) + } +} + +// GetCommentRPCConn get client conn +func GetCommentRPCConn() *grpc.ClientConn { + if commentConn == nil { + commentOnce.Do(func() { + NewCommentRPCConn() + }) + } + + return commentConn +} + +// CloseCommentRPCConn Close tears down the ClientConn and all underlying connections. +func CloseCommentRPCConn() error { + if commentConn == nil { + return nil + } + + return commentConn.Close() +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/inventory.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/inventory.go new file mode 100644 index 0000000..b702bab --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/inventory.go @@ -0,0 +1,159 @@ +package rpcclient + +import ( + "context" + "fmt" + "strings" + "sync" + "time" + + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/consulcli" + "github.com/zhufuyi/sponge/pkg/etcdcli" + "github.com/zhufuyi/sponge/pkg/grpc/grpccli" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/nacoscli" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/consul" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" + + "eshop/eshop_gw/internal/config" +) + +var ( + inventoryConn *grpc.ClientConn + inventoryOnce sync.Once +) + +// NewInventoryRPCConn instantiate rpc client connection +func NewInventoryRPCConn() { + cfg := config.Get() + + serverName := "inventory" + var grpcClientCfg config.GrpcClient + for _, cli := range cfg.GrpcClient { + if strings.EqualFold(cli.Name, serverName) { + grpcClientCfg = cli + break + } + } + if grpcClientCfg.Name == "" { + panic(fmt.Sprintf("not found grpc service name '%v' in configuration file(yaml), "+ + "please add gprc service configuration in the configuration file(yaml) under the field grpcClient.", serverName)) + } + + var cliOptions = []grpccli.Option{ + grpccli.WithEnableRequestID(), + grpccli.WithEnableLog(logger.Get()), + } + + if grpcClientCfg.Timeout > 0 { + cliOptions = append(cliOptions, grpccli.WithTimeout(time.Second*time.Duration(grpcClientCfg.Timeout))) + } + + // load balance + if grpcClientCfg.EnableLoadBalance { + cliOptions = append(cliOptions, grpccli.WithEnableLoadBalance()) + } + + // secure + cliOptions = append(cliOptions, grpccli.WithSecure( + grpcClientCfg.ClientSecure.Type, + grpcClientCfg.ClientSecure.ServerName, + grpcClientCfg.ClientSecure.CaFile, + grpcClientCfg.ClientSecure.CertFile, + grpcClientCfg.ClientSecure.KeyFile, + )) + + // token + cliOptions = append(cliOptions, grpccli.WithToken( + grpcClientCfg.ClientToken.Enable, + grpcClientCfg.ClientToken.AppID, + grpcClientCfg.ClientToken.AppKey, + )) + + // if service discovery is not used, connect directly to the rpc service using the ip and port + endpoint := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + + isUseDiscover := false + switch grpcClientCfg.RegistryDiscoveryType { + // discovering services using consul + case "consul": + endpoint = "discovery:///" + grpcClientCfg.Name // connecting to grpc services by service name + cli, err := consulcli.Init(cfg.Consul.Addr, consulcli.WithWaitTime(time.Second*5)) + if err != nil { + panic(fmt.Sprintf("consulcli.Init error: %v, addr: %s", err, cfg.Consul.Addr)) + } + iDiscovery := consul.New(cli) + cliOptions = append(cliOptions, grpccli.WithDiscovery(iDiscovery)) + isUseDiscover = true + // discovering services using etcd + case "etcd": + endpoint = "discovery:///" + grpcClientCfg.Name // Connecting to grpc services by service name + cli, err := etcdcli.Init(cfg.Etcd.Addrs, etcdcli.WithDialTimeout(time.Second*5)) + if err != nil { + panic(fmt.Sprintf("etcdcli.Init error: %v, addr: %v", err, cfg.Etcd.Addrs)) + } + iDiscovery := etcd.New(cli) + cliOptions = append(cliOptions, grpccli.WithDiscovery(iDiscovery)) + isUseDiscover = true + // discovering services using nacos + case "nacos": + // example: endpoint = "discovery:///serverName.scheme" + endpoint = "discovery:///" + grpcClientCfg.Name + ".grpc" // Connecting to grpc services by service name + cli, err := nacoscli.NewNamingClient( + cfg.NacosRd.IPAddr, + cfg.NacosRd.Port, + cfg.NacosRd.NamespaceID) + if err != nil { + panic(fmt.Sprintf("nacoscli.NewNamingClient error: %v, ipAddr: %s, port: %d", + err, cfg.NacosRd.IPAddr, cfg.NacosRd.Port)) + } + iDiscovery := nacos.New(cli) + cliOptions = append(cliOptions, grpccli.WithDiscovery(iDiscovery)) + isUseDiscover = true + } + + if cfg.App.EnableTrace { + cliOptions = append(cliOptions, grpccli.WithEnableTrace()) + } + if cfg.App.EnableCircuitBreaker { + cliOptions = append(cliOptions, grpccli.WithEnableCircuitBreaker()) + } + if cfg.App.EnableMetrics { + cliOptions = append(cliOptions, grpccli.WithEnableMetrics()) + } + + msg := "dial grpc server" + if isUseDiscover { + msg += " with service discovery from " + grpcClientCfg.RegistryDiscoveryType + } + logger.Info(msg, logger.String("name", serverName), logger.String("endpoint", endpoint)) + + var err error + inventoryConn, err = grpccli.Dial(context.Background(), endpoint, cliOptions...) + if err != nil { + panic(fmt.Sprintf("dial rpc server failed: %v, name: %s, endpoint: %s", err, serverName, endpoint)) + } +} + +// GetInventoryRPCConn get client conn +func GetInventoryRPCConn() *grpc.ClientConn { + if inventoryConn == nil { + inventoryOnce.Do(func() { + NewInventoryRPCConn() + }) + } + + return inventoryConn +} + +// CloseInventoryRPCConn Close tears down the ClientConn and all underlying connections. +func CloseInventoryRPCConn() error { + if inventoryConn == nil { + return nil + } + + return inventoryConn.Close() +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/product.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/product.go new file mode 100644 index 0000000..185fc6b --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/rpcclient/product.go @@ -0,0 +1,159 @@ +package rpcclient + +import ( + "context" + "fmt" + "strings" + "sync" + "time" + + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/consulcli" + "github.com/zhufuyi/sponge/pkg/etcdcli" + "github.com/zhufuyi/sponge/pkg/grpc/grpccli" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/nacoscli" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/consul" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" + + "eshop/eshop_gw/internal/config" +) + +var ( + productConn *grpc.ClientConn + productOnce sync.Once +) + +// NewProductRPCConn instantiate rpc client connection +func NewProductRPCConn() { + cfg := config.Get() + + serverName := "product" + var grpcClientCfg config.GrpcClient + for _, cli := range cfg.GrpcClient { + if strings.EqualFold(cli.Name, serverName) { + grpcClientCfg = cli + break + } + } + if grpcClientCfg.Name == "" { + panic(fmt.Sprintf("not found grpc service name '%v' in configuration file(yaml), "+ + "please add gprc service configuration in the configuration file(yaml) under the field grpcClient.", serverName)) + } + + var cliOptions = []grpccli.Option{ + grpccli.WithEnableRequestID(), + grpccli.WithEnableLog(logger.Get()), + } + + if grpcClientCfg.Timeout > 0 { + cliOptions = append(cliOptions, grpccli.WithTimeout(time.Second*time.Duration(grpcClientCfg.Timeout))) + } + + // load balance + if grpcClientCfg.EnableLoadBalance { + cliOptions = append(cliOptions, grpccli.WithEnableLoadBalance()) + } + + // secure + cliOptions = append(cliOptions, grpccli.WithSecure( + grpcClientCfg.ClientSecure.Type, + grpcClientCfg.ClientSecure.ServerName, + grpcClientCfg.ClientSecure.CaFile, + grpcClientCfg.ClientSecure.CertFile, + grpcClientCfg.ClientSecure.KeyFile, + )) + + // token + cliOptions = append(cliOptions, grpccli.WithToken( + grpcClientCfg.ClientToken.Enable, + grpcClientCfg.ClientToken.AppID, + grpcClientCfg.ClientToken.AppKey, + )) + + // if service discovery is not used, connect directly to the rpc service using the ip and port + endpoint := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + + isUseDiscover := false + switch grpcClientCfg.RegistryDiscoveryType { + // discovering services using consul + case "consul": + endpoint = "discovery:///" + grpcClientCfg.Name // connecting to grpc services by service name + cli, err := consulcli.Init(cfg.Consul.Addr, consulcli.WithWaitTime(time.Second*5)) + if err != nil { + panic(fmt.Sprintf("consulcli.Init error: %v, addr: %s", err, cfg.Consul.Addr)) + } + iDiscovery := consul.New(cli) + cliOptions = append(cliOptions, grpccli.WithDiscovery(iDiscovery)) + isUseDiscover = true + // discovering services using etcd + case "etcd": + endpoint = "discovery:///" + grpcClientCfg.Name // Connecting to grpc services by service name + cli, err := etcdcli.Init(cfg.Etcd.Addrs, etcdcli.WithDialTimeout(time.Second*5)) + if err != nil { + panic(fmt.Sprintf("etcdcli.Init error: %v, addr: %v", err, cfg.Etcd.Addrs)) + } + iDiscovery := etcd.New(cli) + cliOptions = append(cliOptions, grpccli.WithDiscovery(iDiscovery)) + isUseDiscover = true + // discovering services using nacos + case "nacos": + // example: endpoint = "discovery:///serverName.scheme" + endpoint = "discovery:///" + grpcClientCfg.Name + ".grpc" // Connecting to grpc services by service name + cli, err := nacoscli.NewNamingClient( + cfg.NacosRd.IPAddr, + cfg.NacosRd.Port, + cfg.NacosRd.NamespaceID) + if err != nil { + panic(fmt.Sprintf("nacoscli.NewNamingClient error: %v, ipAddr: %s, port: %d", + err, cfg.NacosRd.IPAddr, cfg.NacosRd.Port)) + } + iDiscovery := nacos.New(cli) + cliOptions = append(cliOptions, grpccli.WithDiscovery(iDiscovery)) + isUseDiscover = true + } + + if cfg.App.EnableTrace { + cliOptions = append(cliOptions, grpccli.WithEnableTrace()) + } + if cfg.App.EnableCircuitBreaker { + cliOptions = append(cliOptions, grpccli.WithEnableCircuitBreaker()) + } + if cfg.App.EnableMetrics { + cliOptions = append(cliOptions, grpccli.WithEnableMetrics()) + } + + msg := "dial grpc server" + if isUseDiscover { + msg += " with service discovery from " + grpcClientCfg.RegistryDiscoveryType + } + logger.Info(msg, logger.String("name", serverName), logger.String("endpoint", endpoint)) + + var err error + productConn, err = grpccli.Dial(context.Background(), endpoint, cliOptions...) + if err != nil { + panic(fmt.Sprintf("dial rpc server failed: %v, name: %s, endpoint: %s", err, serverName, endpoint)) + } +} + +// GetProductRPCConn get client conn +func GetProductRPCConn() *grpc.ClientConn { + if productConn == nil { + productOnce.Do(func() { + NewProductRPCConn() + }) + } + + return productConn +} + +// CloseProductRPCConn Close tears down the ClientConn and all underlying connections. +func CloseProductRPCConn() error { + if productConn == nil { + return nil + } + + return productConn.Close() +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/server/http.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/server/http.go new file mode 100644 index 0000000..6497e5e --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/server/http.go @@ -0,0 +1,88 @@ +package server + +import ( + "context" + "fmt" + "net/http" + "time" + + "github.com/gin-gonic/gin" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + + "eshop/eshop_gw/internal/routers" +) + +var _ app.IServer = (*httpServer)(nil) + +type httpServer struct { + addr string + server *http.Server + + instance *registry.ServiceInstance + iRegistry registry.Registry +} + +// Start http service +func (s *httpServer) Start() error { + if s.iRegistry != nil { + ctx, _ := context.WithTimeout(context.Background(), 5*time.Second) //nolint + if err := s.iRegistry.Register(ctx, s.instance); err != nil { + return err + } + } + + if err := s.server.ListenAndServe(); err != nil && err != http.ErrServerClosed { + return fmt.Errorf("listen server error: %v", err) + } + return nil +} + +// Stop http service +func (s *httpServer) Stop() error { + if s.iRegistry != nil { + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) + go func() { + _ = s.iRegistry.Deregister(ctx, s.instance) + cancel() + }() + <-ctx.Done() + } + + ctx, _ := context.WithTimeout(context.Background(), 3*time.Second) //nolint + return s.server.Shutdown(ctx) +} + +// String comment +func (s *httpServer) String() string { + return "http service address " + s.addr +} + +// NewHTTPServer creates a new http server +func NewHTTPServer(addr string, opts ...HTTPOption) app.IServer { + o := defaultHTTPOptions() + o.apply(opts...) + + if o.isProd { + gin.SetMode(gin.ReleaseMode) + } else { + gin.SetMode(gin.DebugMode) + } + + router := routers.NewRouter() + server := &http.Server{ + Addr: addr, + Handler: router, + //ReadTimeout: time.Second*30, + //WriteTimeout: time.Second*60, + MaxHeaderBytes: 1 << 20, + } + + return &httpServer{ + addr: addr, + server: server, + iRegistry: o.iRegistry, + instance: o.instance, + } +} diff --git a/a_micro-grpc-http-protobuf/internal/server/http_option.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/server/http_option.go similarity index 100% rename from a_micro-grpc-http-protobuf/internal/server/http_option.go rename to 6_micro-cluster/example-2-mono-repo/eshop_gw/internal/server/http_option.go diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/server/http_test.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/server/http_test.go new file mode 100644 index 0000000..f28e786 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/server/http_test.go @@ -0,0 +1,115 @@ +package server + +import ( + "context" + "fmt" + "net/http" + "testing" + "time" + + "github.com/gin-gonic/gin" + "github.com/stretchr/testify/assert" + + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + "github.com/zhufuyi/sponge/pkg/utils" + + "eshop/eshop_gw/configs" + "eshop/eshop_gw/internal/config" +) + +// need real database to test +func TestHTTPServer(t *testing.T) { + err := config.Init(configs.Path("eshop_gw.yml")) + if err != nil { + t.Fatal(err) + } + config.Get().App.EnableMetrics = true + config.Get().App.EnableTrace = true + config.Get().App.EnableHTTPProfile = true + config.Get().App.EnableLimit = true + config.Get().App.EnableCircuitBreaker = true + + port, _ := utils.GetAvailablePort() + addr := fmt.Sprintf(":%d", port) + gin.SetMode(gin.ReleaseMode) + + utils.SafeRunWithTimeout(time.Second*2, func(cancel context.CancelFunc) { + server := NewHTTPServer(addr, + WithHTTPIsProd(true), + WithHTTPRegistry(&iRegistry{}, ®istry.ServiceInstance{}), + ) + assert.NotNil(t, server) + cancel() + }) + utils.SafeRunWithTimeout(time.Second, func(cancel context.CancelFunc) { + server := NewHTTPServer(addr) + assert.NotNil(t, server) + cancel() + }) + + utils.SafeRunWithTimeout(time.Second*2, func(cancel context.CancelFunc) { + server := NewHTTPServer(addr, + WithHTTPIsProd(true), + WithHTTPRegistry(&iRegistry{}, ®istry.ServiceInstance{}), + ) + assert.NotNil(t, server) + cancel() + }) + utils.SafeRunWithTimeout(time.Second, func(cancel context.CancelFunc) { + server := NewHTTPServer(addr) + assert.NotNil(t, server) + cancel() + }) +} + +func TestHTTPServerMock(t *testing.T) { + err := config.Init(configs.Path("eshop_gw.yml")) + if err != nil { + t.Fatal(err) + } + config.Get().App.EnableMetrics = true + config.Get().App.EnableTrace = true + config.Get().App.EnableHTTPProfile = true + config.Get().App.EnableLimit = true + config.Get().App.EnableCircuitBreaker = true + + port, _ := utils.GetAvailablePort() + addr := fmt.Sprintf(":%d", port) + + o := defaultHTTPOptions() + if o.isProd { + gin.SetMode(gin.ReleaseMode) + } + s := &httpServer{ + addr: addr, + instance: ®istry.ServiceInstance{}, + iRegistry: &iRegistry{}, + } + s.server = &http.Server{ + Addr: addr, + Handler: http.NewServeMux(), + MaxHeaderBytes: 1 << 20, + } + + go func() { + time.Sleep(time.Second * 3) + _ = s.server.Shutdown(context.Background()) + }() + + str := s.String() + assert.NotEmpty(t, str) + err = s.Start() + assert.NoError(t, err) + err = s.Stop() + assert.NoError(t, err) +} + +type iRegistry struct{} + +func (i *iRegistry) Register(ctx context.Context, service *registry.ServiceInstance) error { + return nil +} + +func (i *iRegistry) Deregister(ctx context.Context, service *registry.ServiceInstance) error { + return nil +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/service/eshop_gw.go b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/service/eshop_gw.go new file mode 100644 index 0000000..ffc51a7 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/internal/service/eshop_gw.go @@ -0,0 +1,75 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package service + +import ( + "context" + + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/logger" + + commentV1 "eshop/api/comment/v1" + eshop_gwV1 "eshop/api/eshop_gw/v1" + inventoryV1 "eshop/api/inventory/v1" + productV1 "eshop/api/product/v1" + "eshop/eshop_gw/internal/ecode" + "eshop/eshop_gw/internal/rpcclient" +) + +var _ eshop_gwV1.EShopGwLogicer = (*eShopGwClient)(nil) + +type eShopGwClient struct { + commentCli commentV1.CommentClient + inventoryCli inventoryV1.InventoryClient + productCli productV1.ProductClient +} + +// NewEShopGwClient create a client +func NewEShopGwClient() eshop_gwV1.EShopGwLogicer { + return &eShopGwClient{ + commentCli: commentV1.NewCommentClient(rpcclient.GetCommentRPCConn()), + inventoryCli: inventoryV1.NewInventoryClient(rpcclient.GetInventoryRPCConn()), + productCli: productV1.NewProductClient(rpcclient.GetProductRPCConn()), + } +} + +// GetDetailsByProductID get page detail by product id +func (c *eShopGwClient) GetDetailsByProductID(ctx context.Context, req *eshop_gwV1.GetDetailsByProductIDRequest) (*eshop_gwV1.GetDetailsByProductIDReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.CtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + + // fill in the business logic code here + + productReply, err := c.productCli.GetByID(ctx, &productV1.GetByIDRequest{ + Id: req.ProductID, + }) + if err != nil { + return nil, err + } + logger.Info("get product info successfully", interceptor.CtxRequestIDField(ctx)) + + inventoryReply, err := c.inventoryCli.GetByID(ctx, &inventoryV1.GetByIDRequest{ + Id: productReply.InventoryID, + }) + if err != nil { + return nil, err + } + logger.Info("get inventory info successfully", interceptor.CtxRequestIDField(ctx)) + + commentReply, err := c.commentCli.ListByProductID(ctx, &commentV1.ListByProductIDRequest{ + ProductID: req.ProductID, + }) + if err != nil { + return nil, err + } + logger.Info("list comments info successfully", interceptor.CtxRequestIDField(ctx)) + + return &eshop_gwV1.GetDetailsByProductIDReply{ + ProductDetail: productReply.ProductDetail, + InventoryDetail: inventoryReply.InventoryDetail, + CommentDetails: commentReply.CommentDetails, + }, nil +} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/binary-package.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/binary-package.sh new file mode 100644 index 0000000..ecb2226 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/binary-package.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +serviceName="eshop_gw" + +mkdir -p ${serviceName}-binary/configs + +cp -f deployments/binary/run.sh ${serviceName}-binary +chmod +x ${serviceName}-binary/run.sh + +cp -f deployments/binary/deploy.sh ${serviceName}-binary +chmod +x ${serviceName}-binary/deploy.sh + +cp -f cmd/${serviceName}/${serviceName} ${serviceName}-binary +cp -f configs/${serviceName}.yml ${serviceName}-binary/configs +cp -f configs/${serviceName}_cc.yml ${serviceName}-binary/configs + +# compressing binary file +#upx -9 ${serviceName} + +tar zcvf ${serviceName}-binary.tar.gz ${serviceName}-binary +rm -rf ${serviceName}-binary + +echo "" +echo "package binary successfully, output file = ${serviceName}-binary.tar.gz" diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile new file mode 100644 index 0000000..8c0c767 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile @@ -0,0 +1,25 @@ +FROM alpine:latest +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# set the time zone to Shanghai +RUN apk add tzdata \ + && cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && echo "Asia/Shanghai" > /etc/timezone \ + && apk del tzdata + +# add curl, used for http service checking, can be installed without it if deployed in k8s +RUN apk add curl + +COPY configs/ /app/configs/ +COPY eshop_gw /app/eshop_gw +RUN chmod +x /app/eshop_gw + +# http port +EXPOSE 8080 + + +WORKDIR /app + +CMD ["./eshop_gw", "-c", "configs/eshop_gw.yml"] +# if you use the Configuration Center, eshop_gw.yml is changed to the Configuration Center configuration. +#CMD ["./eshop_gw", "-c", "configs/eshop_gw.yml", "-enable-cc"] diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile_build b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile_build new file mode 100644 index 0000000..805c2f6 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile_build @@ -0,0 +1,41 @@ +# Need to package the code first `tar zcf eshop_gw.tar.gz $(ls)` and move it to the same directory as Dokerfile + +# Compile the go code, you can specify the golang version +FROM golang:1.21-alpine as build +COPY . /go/src/eshop_gw +WORKDIR /go/src/eshop_gw +RUN tar zxf eshop_gw.tar.gz +RUN go env -w GOPROXY=https://goproxy.cn,direct +RUN go mod download +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o /eshop_gw cmd/eshop_gw/main.go + +# compressing binary files +#cd / +#upx -9 eshop_gw + + +# building images with binary +FROM alpine:latest +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# set the time zone to Shanghai +RUN apk add tzdata \ + && cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && echo "Asia/Shanghai" > /etc/timezone \ + && apk del tzdata + +# add curl, used for http service checking, can be installed without it if deployed in k8s +RUN apk add curl + +COPY --from=build /eshop_gw /app/eshop_gw +COPY --from=build /go/src/eshop_gw/configs/eshop_gw.yml /app/configs/eshop_gw.yml + +# http port +EXPOSE 8080 + + +WORKDIR /app + +CMD ["./eshop_gw", "-c", "configs/eshop_gw.yml"] +# if you use the Configuration Center, eshop_gw.yml is changed to the Configuration Center configuration. +#CMD ["./eshop_gw", "-c", "configs/eshop_gw.yml", "-enable-cc"] diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile_test b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile_test new file mode 100644 index 0000000..302e926 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/Dockerfile_test @@ -0,0 +1,16 @@ +# Need to package the code first `tar zcf eshop_gw.tar.gz $(ls)` and move it to the same directory as Dokerfile +# rpc server source code, used to test rpc methods +FROM golang:1.21-alpine +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# go test dependency packages +RUN apk add bash alpine-sdk build-base gcc + +COPY . /go/src/eshop_gw +WORKDIR /go/src/eshop_gw +RUN tar zxf eshop_gw.tar.gz +RUN go env -w GOPROXY=https://goproxy.cn,direct +RUN go mod download +RUN rm -f eshop_gw.tar.gz + +CMD ["sleep","86400"] diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/README.md b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/README.md new file mode 100644 index 0000000..ba0f3e8 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/build/README.md @@ -0,0 +1,4 @@ + +- `Dockerfile`: build the image by directly copying the compiled binaries, fast build speed. +- `Dockerfile_build`: two-stage build of the image, slower build speed, you can specify the golang version. +- `Dockerfile_test`: container for testing rpc services. diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-binary.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-binary.sh new file mode 100644 index 0000000..7bd09a6 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-binary.sh @@ -0,0 +1,35 @@ +#!/usr/bin/expect + +set serviceName "eshop_gw" + +# parameters +set username [lindex $argv 0] +set password [lindex $argv 1] +set hostname [lindex $argv 2] + +set timeout 30 + +spawn scp -r ./${serviceName}-binary.tar.gz ${username}@${hostname}:/tmp/ +#expect "*yes/no*" +#send "yes\r" +expect "*password:*" +send "${password}\r" +expect eof + +spawn ssh ${username}@${hostname} +#expect "*yes/no*" +#send "yes\r" +expect "*password:*" +send "${password}\r" + +# execute a command or script +expect "*${username}@*" +send "cd /tmp && tar zxvf ${serviceName}-binary.tar.gz\r" +expect "*${username}@*" +send "bash /tmp/${serviceName}-binary/deploy.sh\r" + +# logging out of a session +expect "*${username}@*" +send "exit\r" + +expect eof diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-docker.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-docker.sh new file mode 100644 index 0000000..df12ecd --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-docker.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +dockerComposeFilePath="deployments/docker-compose" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +mkdir -p ${dockerComposeFilePath}/configs +if [ ! -f "${dockerComposeFilePath}/configs/eshop_gw.yml" ];then + cp configs/eshop_gw.yml ${dockerComposeFilePath}/configs +fi + +# shellcheck disable=SC2164 +cd ${dockerComposeFilePath} + +docker-compose down +checkResult $? + +docker-compose up -d +checkResult $? + +colorCyan='\033[1;36m' +highBright='\033[1m' +markEnd='\033[0m' + +echo "" +echo -e "run service successfully, if you want to stop the service, go into the ${highBright}${dockerComposeFilePath}${markEnd} directory and execute the command ${colorCyan}docker-compose down${markEnd}." +echo "" diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-k8s.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-k8s.sh new file mode 100644 index 0000000..adef045 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/deploy-k8s.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +SERVER_NAME="eshop_gw" +DEPLOY_FILE="deployments/kubernetes/${SERVER_NAME}-deployment.yml" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# Determining whether a file exists +if [ ! -f "${DEPLOY_FILE}" ];then + echo "Deployment file file ${DEPLOY_FILE} does not exist" + checkResult 1 +fi + +# Check if you are authorised to operate k8s +echo "kubectl version" +kubectl version +checkResult $? + +echo "kubectl delete -f ${DEPLOY_FILE} --ignore-not-found" +kubectl delete -f ${DEPLOY_FILE} --ignore-not-found +checkResult $? + +sleep 1 + +echo "kubectl apply -f ${DEPLOY_FILE}" +kubectl apply -f ${DEPLOY_FILE} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build-local.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build-local.sh new file mode 100644 index 0000000..31accb4 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build-local.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +# build the image for local docker, using the binaries, if you want to reduce the size of the image, +# use upx to compress the binaries before building the image. + +serverName="eshop_gw" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/eshop-gw" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" + +mv -f cmd/${serverName}/${serverName} ${DOCKERFILE_PATH}/${serverName} + +# compressing binary file +#cd ${DOCKERFILE_PATH} +#upx -9 ${serverName} +#cd - + +mkdir -p ${DOCKERFILE_PATH}/configs && cp -f configs/${serverName}.yml ${DOCKERFILE_PATH}/configs/ +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME}:latest ${DOCKERFILE_PATH}" +docker build -f ${DOCKERFILE} -t ${IMAGE_NAME}:latest ${DOCKERFILE_PATH} + + +if [ -f "${DOCKERFILE_PATH}/${serverName}" ]; then + rm -f ${DOCKERFILE_PATH}/${serverName} +fi + +if [ -d "${DOCKERFILE_PATH}/configs" ]; then + rm -rf ${DOCKERFILE_PATH}/configs +fi + +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build.sh new file mode 100644 index 0000000..3517a8e --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +# build the docker image using the binaries, if you want to reduce the size of the image, +# use upx to compress the binaries before building the image. + +serverName="eshop_gw" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/eshop-gw" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-build.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +# binary executable files +BIN_FILE="cmd/${serverName}/${serverName}" +# configuration file directory +CONFIG_PATH="configs" + +CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o ${BIN_FILE} cmd/${serverName}/*.go +mv -f ${BIN_FILE} ${DOCKERFILE_PATH} +mkdir -p ${DOCKERFILE_PATH}/${CONFIG_PATH} && cp -f ${CONFIG_PATH}/${serverName}.yml ${DOCKERFILE_PATH}/${CONFIG_PATH} + +# compressing binary file +#cd ${DOCKERFILE_PATH} +#upx -9 ${serverName} +#cd - + +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} + + +if [ -f "${DOCKERFILE_PATH}/${serverName}" ]; then + rm -f ${DOCKERFILE_PATH}/${serverName} +fi + +if [ -d "${DOCKERFILE_PATH}/configs" ]; then + rm -rf ${DOCKERFILE_PATH}/configs +fi + +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build2.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build2.sh new file mode 100644 index 0000000..15e4d40 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-build2.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +# two-stage build docker image + +serverName="eshop_gw" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/eshop-gw" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_build" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-build.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +PROJECT_FILES=$(ls) +tar zcf ${serverName}.tar.gz ${PROJECT_FILES} +mv -f ${serverName}.tar.gz ${DOCKERFILE_PATH} +echo "docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} +rm -rf ${DOCKERFILE_PATH}/${serverName}.tar.gz +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 + diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-push.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-push.sh new file mode 100644 index 0000000..e49ce1a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-push.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +# image name, prohibit uppercase letters in names. +IMAGE_NAME="eshop/eshop-gw" + +# image repo address, passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-push.sh hub.docker.com v1.0.0" + exit 1 +fi + +# version tag, passed in via the second parameter, if empty, defaults to latest +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# image repository host, https://index.docker.io/v1 is the official docker image repository +IMAGE_REPO_HOST="https://index.docker.io/v1" +# check if you are authorized to log into docker +function checkLogin() { + loginStatus=$(cat /root/.docker/config.json | grep "${IMAGE_REPO_HOST}") + if [ "X${loginStatus}" = "X" ];then + echo "docker is not logged into the image repository" + checkResult 1 + fi +} + +checkLogin + +# push image to image repository +echo "docker push ${IMAGE_NAME_TAG}" +docker push ${IMAGE_NAME_TAG} +checkResult $? +echo "docker push image success." + +sleep 1 + +# delete image +echo "docker rmi -f ${IMAGE_NAME_TAG}" +docker rmi -f ${IMAGE_NAME_TAG} +checkResult $? +echo "docker remove image success." diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-rpc-test.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-rpc-test.sh new file mode 100644 index 0000000..e181f8a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/image-rpc-test.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# build rpc service test image + +serverName="eshop_gw" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/eshop-gw.rpc-test" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_test" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-rpc-test.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +PROJECT_FILES=$(ls) +tar zcf ${serverName}.tar.gz ${PROJECT_FILES} +mv -f ${serverName}.tar.gz ${DOCKERFILE_PATH} + +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} + +rm -rf ${DOCKERFILE_PATH}/${serverName}.tar.gz diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/patch-mono.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/patch-mono.sh new file mode 100644 index 0000000..ef49030 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/patch-mono.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +goModFile="go.mod" +thirdPartyProtoDir="third_party" +genServerType="grpc-gw-pb" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +if [ ! -f "../$goModFile" ]; then + sponge patch copy-go-mod -f + checkResult $? + mv -f go.mod .. + mv -f go.sum .. +fi + +if [ "$genServerType"x != "http"x ]; then + if [ ! -d "../$thirdPartyProtoDir" ]; then + sponge patch copy-third-party-proto + checkResult $? + mv -f $thirdPartyProtoDir .. + fi +fi + +if [ "$genServerType"x = "grpc"x ]; then + if [ ! -d "../api/types" ]; then + sponge patch gen-types-pb --out=. + checkResult $? + mv -f api/types ../api + rmdir api + fi +fi diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/patch.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/patch.sh new file mode 100644 index 0000000..f06f10a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/patch.sh @@ -0,0 +1,81 @@ +#!/bin/bash + +patchType=$1 +typesPb="types-pb" +initMysql="init-mysql" +initMongodb="init-mongodb" +initTidb="init-tidb" +initPostgresql="init-postgresql" +initSqlite="init-sqlite" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +function importPkg() { + go mod tidy +} + +function generateTypesPbCode() { + + if [ ! -d "../api/types" ]; then + sponge patch gen-types-pb --out=./ + checkResult $? + mv -f api/types ../api + rmdir api + fi + checkResult $? +} + +function generateInitMysqlCode() { + sponge patch gen-db-init --db-driver=mysql --out=./ + checkResult $? + importPkg +} + +function generateInitMongodbCode() { + sponge patch gen-db-init --db-driver=mongodb --out=./ + checkResult $? + importPkg +} + +function generateInitTidbCode() { + sponge patch gen-db-init --db-driver=tidb --out=./ + checkResult $? + importPkg +} + +function generateInitPostgresqlCode() { + sponge patch gen-db-init --db-driver=postgresql --out=./ + checkResult $? + importPkg +} + +function generateInitSqliteCode() { + sponge patch gen-db-init --db-driver=sqlite --out=./ + checkResult $? + importPkg +} + +if [ "$patchType" = "$typesPb" ]; then + generateTypesPbCode +elif [ "$patchType" = "$initMysql" ]; then + generateInitMysqlCode +elif [ "$patchType" = "$initMongodb" ]; then + generateInitMongodbCode +elif [ "$patchType" = "$initTidb" ]; then + generateInitTidbCode +elif [ "$patchType" = "$initPostgresql" ]; then + generateInitPostgresqlCode +elif [ "$patchType" = "$initSqlite" ]; then + generateInitSqliteCode +else + echo "invalid patch type: '$patchType'" + echo "supported types: $initMysql, $initMongodb, $initTidb, $initPostgresql, $initSqlite, $typesPb" + echo "e.g. make patch TYPE=init-mysql" + echo "" + exit 1 +fi diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/proto-doc.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/proto-doc.sh new file mode 100644 index 0000000..1618483 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/proto-doc.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# the directory where the proto files are located +bash scripts/patch-mono.sh +cd .. + +protoBasePath="api" +allProtoFiles="" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +function listFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + listFiles $item + else + if [ "${item#*.}"x = "proto"x ];then + file=$(pwd)/${item} + protoFile="${protoBasePath}${file#*${protoBasePath}}" + allProtoFiles="${allProtoFiles} ${protoFile}" + fi + fi + done + cd .. +} + +# get all proto file paths +listFiles $protoBasePath + +protoc --proto_path=. --proto_path=./third_party \ + --doc_out=. --doc_opt=html,apis.html \ + $allProtoFiles + +checkResult $? + +mv -f apis.html eshop_gw/docs/apis.html + +echo "generate proto doc file successfully, view in eshop_gw/docs/apis.html" diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/protoc.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/protoc.sh new file mode 100644 index 0000000..fec3b8f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/protoc.sh @@ -0,0 +1,221 @@ +#!/bin/bash + +bash scripts/patch-mono.sh +cd .. + +protoBasePath="api" +allProtoFiles="" + +specifiedProtoFilePath=$1 +specifiedProtoFilePaths="" + +colorGray='\033[1;30m' +colorGreen='\033[1;32m' +colorMagenta='\033[1;35m' +colorCyan='\033[1;36m' +highBright='\033[1m' +markEnd='\033[0m' + +tipMsg="" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# get specified proto files, if empty, return 0 else return 1 +function getSpecifiedProtoFiles() { + if [ "$specifiedProtoFilePath"x = x ];then + return 0 + fi + + specifiedProtoFilePaths=${specifiedProtoFilePath//,/ } + + for v in $specifiedProtoFilePaths; do + if [ ! -f "$v" ];then + echo "Error: not found specified proto file $v" + echo "example: make proto FILES=api/user/v1/user.proto,api/types/types.proto" + checkResult 1 + fi + done + + return 1 +} + +# add the import of useless packages from the generated *.pb.go code here +function deleteUnusedPkg() { + file=$1 + osType=$(uname -s) + if [ "${osType}"x = "Darwin"x ];then + sed -i '' 's#_ \"github.com/envoyproxy/protoc-gen-validate/validate\"##g' ${file} + sed -i '' 's#_ \"github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options\"##g' ${file} + sed -i '' 's#_ \"github.com/srikrsna/protoc-gen-gotag/tagger\"##g' ${file} + sed -i '' 's#_ \"google.golang.org/genproto/googleapis/api/annotations\"##g' ${file} + else + sed -i "s#_ \"github.com/envoyproxy/protoc-gen-validate/validate\"##g" ${file} + sed -i "s#_ \"github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options\"##g" ${file} + sed -i "s#_ \"github.com/srikrsna/protoc-gen-gotag/tagger\"##g" ${file} + sed -i "s#_ \"google.golang.org/genproto/googleapis/api/annotations\"##g" ${file} + fi + checkResult $? +} + +function listProtoFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + listProtoFiles $item + else + if [ "${item#*.}"x = "proto"x ];then + file=$(pwd)/${item} + protoFile="${protoBasePath}${file#*${protoBasePath}}" + allProtoFiles="${allProtoFiles} ${protoFile}" + fi + fi + done + cd .. +} + +function handlePbGoFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + handlePbGoFiles $item + else + if [ "${item#*.}"x = "pb.go"x ];then + deleteUnusedPkg $item + fi + fi + done + cd .. +} + +function generateByAllProto(){ + getSpecifiedProtoFiles + if [ $? -eq 0 ]; then + listProtoFiles $protoBasePath + else + allProtoFiles=$specifiedProtoFilePaths + fi + + if [ "$allProtoFiles"x = x ];then + echo "Error: not found proto file in path $protoBasePath" + exit 1 + fi + echo -e "generate *pb.go by proto files: ${colorGray}$allProtoFiles${markEnd}" + echo "" + + # generate files *_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --go_out=. --go_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + # generate files *_grpc_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --go-grpc_out=. --go-grpc_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + + # generate the file *_pb.validate.go + protoc --proto_path=. --proto_path=./third_party \ + --validate_out=lang=go:. --validate_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + # embed the tag field into *_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --gotag_out=:. --gotag_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? +} + +function generateBySpecifiedProto(){ + # get the proto file of the eshop_gw server + allProtoFiles="" + listProtoFiles ${protoBasePath}/eshop_gw + cd .. + specifiedProtoFiles="" + getSpecifiedProtoFiles + if [ $? -eq 0 ]; then + specifiedProtoFiles=$allProtoFiles + else + for v1 in $specifiedProtoFilePaths; do + for v2 in $allProtoFiles; do + if [ "$v1"x = "$v2"x ];then + specifiedProtoFiles="$specifiedProtoFiles $v1" + fi + done + done + fi + + if [ "$specifiedProtoFiles"x = x ];then + return + fi + echo -e "generate template code by proto files: ${colorMagenta}$specifiedProtoFiles${markEnd}" + echo "" + + # Generate the swagger document and merge all files into docs/apis.swagger.json + protoc --proto_path=. --proto_path=./third_party \ + --openapiv2_out=. --openapiv2_opt=logtostderr=true --openapiv2_opt=allow_merge=true --openapiv2_opt=merge_file_name=eshop_gw/docs/apis.json \ + $specifiedProtoFiles + + checkResult $? + + sponge micro swagger --file=eshop_gw/docs/apis.swagger.json + checkResult $? + + moduleName=$(cat eshop_gw/docs/gen.info | head -1 | cut -d , -f 1) + serverName=$(cat eshop_gw/docs/gen.info | head -1 | cut -d , -f 2) + suitedMonoRepo=$(cat eshop_gw/docs/gen.info | head -1 | cut -d , -f 3) + + protoc --proto_path=. --proto_path=./third_party \ + --go-gin_out=. --go-gin_opt=paths=source_relative --go-gin_opt=plugin=service \ + --go-gin_opt=moduleName=${moduleName} --go-gin_opt=serverName=${serverName} --go-gin_opt=suitedMonoRepo=${suitedMonoRepo} \ + $specifiedProtoFiles + + checkResult $? + + sponge merge rpc-gw-pb --dir=eshop_gw + checkResult $? + + tipMsg="${highBright}Tip:${markEnd} execute the command ${colorCyan}make run${markEnd} and then visit ${colorCyan}http://localhost:8080/apis/swagger/index.html${markEnd} in your browser." + + + + if [ "$suitedMonoRepo" == "true" ]; then + sponge patch adapt-mono-repo --dir=eshop_gw + fi +} + +# generate pb.go by all proto files +generateByAllProto + +# generate pb.go by specified proto files +generateBySpecifiedProto + +# delete unused packages in pb.go +handlePbGoFiles $protoBasePath + +# delete json tag omitempty +sponge patch del-omitempty --dir=$protoBasePath --suffix-name=pb.go > /dev/null + +# modify duplicate numbers and error codes +sponge patch modify-dup-num --dir=eshop_gw/internal/ecode +sponge patch modify-dup-err-code --dir=eshop_gw/internal/ecode + +echo -e "${colorGreen}generated code done.${markEnd}" +echo "" +echo -e $tipMsg +echo "" diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/run-nohup.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/run-nohup.sh new file mode 100644 index 0000000..f5c6b05 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/run-nohup.sh @@ -0,0 +1,61 @@ +#!/bin/bash + +# chkconfig: - 85 15 +# description: eshop_gw + +serverName="eshop_gw" +cmdStr="cmd/${serverName}/${serverName}" + + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +stopService(){ + NAME=$1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + for id in $ID + do + kill -9 $id + echo "Stopped ${NAME} service successfully, process ID=${ID}" + done + fi +} + +startService() { + NAME=$1 + + if [ -f "${NAME}" ] ;then + rm "${NAME}" + fi + sleep 0.2 + go build -o ${cmdStr} cmd/${NAME}/main.go + checkResult $? + + nohup ${cmdStr} > ${NAME}.log 2>&1 & + sleep 1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + echo "Start the ${NAME} service successfully, process ID=${ID}" + else + echo "Failed to start ${NAME} service" + return 1 + fi + return 0 +} + + +stopService ${serverName} +if [ "$1"x != "stop"x ] ;then + sleep 1 + startService ${serverName} + checkResult $? +else + echo "Service ${serverName} has stopped" +fi diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/run.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/run.sh new file mode 100644 index 0000000..c9fe133 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/run.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +serverName="eshop_gw" + +binaryFile="cmd/${serverName}/${serverName}" + +osType=$(uname -s) +if [ "${osType%%_*}"x = "MINGW64"x ];then + binaryFile="${binaryFile}.exe" +fi + +if [ -f "${binaryFile}" ] ;then + rm "${binaryFile}" +fi + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +sleep 0.2 + +go build -o ${binaryFile} cmd/${serverName}/main.go +checkResult $? + +# running server +./${binaryFile} diff --git a/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/swag-docs.sh b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/swag-docs.sh new file mode 100644 index 0000000..d26b4de --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/eshop_gw/scripts/swag-docs.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +bash scripts/patch-mono.sh + +HOST_ADDR=$1 + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +echo "go mod tidy" +go mod tidy +checkResult $? +gofmt -s -w . + +# change host addr +if [ "X${HOST_ADDR}" = "X" ];then + HOST_ADDR=$(cat cmd/eshop_gw/main.go | grep "@host" | awk '{print $3}') + HOST_ADDR=$(echo ${HOST_ADDR} | cut -d ':' -f 1) +else + sed -i "s/@host .*:8080/@host ${HOST_ADDR}:8080/g" cmd/eshop_gw/main.go +fi + +# generate api docs +swag init -g cmd/eshop_gw/main.go +checkResult $? + +# modify duplicate numbers and error codes +sponge patch modify-dup-num --dir=eshop_gw/internal/ecode +sponge patch modify-dup-err-code --dir=eshop_gw/internal/ecode + +colorGreen='\033[1;32m' +colorCyan='\033[1;36m' +highBright='\033[1m' +markEnd='\033[0m' + +echo "" +echo -e "${highBright}Tip:${markEnd} execute the command ${colorCyan}make run${markEnd} and then visit ${colorCyan}http://${HOST_ADDR}:8080/swagger/index.html${markEnd} in your browser." +echo "" +echo -e "${colorGreen}generated api docs done.${markEnd}" +echo "" diff --git a/a_micro-grpc-http-protobuf/go.mod b/6_micro-cluster/example-2-mono-repo/go.mod similarity index 87% rename from a_micro-grpc-http-protobuf/go.mod rename to 6_micro-cluster/example-2-mono-repo/go.mod index 409c750..3e2e7d5 100644 --- a/a_micro-grpc-http-protobuf/go.mod +++ b/6_micro-cluster/example-2-mono-repo/go.mod @@ -1,16 +1,16 @@ -module user +module eshop -go 1.20 +go 1.21 require ( github.com/gin-gonic/gin v1.9.1 github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/jinzhu/copier v0.3.5 - github.com/stretchr/testify v1.8.4 - github.com/zhufuyi/sponge v1.8.0 + github.com/stretchr/testify v1.9.0 + github.com/zhufuyi/sponge v1.10.1 go.uber.org/zap v1.24.0 google.golang.org/grpc v1.61.0 - google.golang.org/protobuf v1.32.0 + google.golang.org/protobuf v1.34.2 ) require ( @@ -24,7 +24,12 @@ require ( github.com/PuerkitoBio/purell v1.1.1 // indirect github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect - github.com/aliyun/alibaba-cloud-sdk-go v1.61.1704 // indirect + github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68 // indirect + github.com/alibabacloud-go/tea v1.1.17 // indirect + github.com/alibabacloud-go/tea-utils v1.4.4 // indirect + github.com/aliyun/alibaba-cloud-sdk-go v1.61.1800 // indirect + github.com/aliyun/alibabacloud-dkms-gcs-go-sdk v0.2.2 // indirect + github.com/aliyun/alibabacloud-dkms-transfer-go-sdk v0.1.7 // indirect github.com/armon/go-metrics v0.3.10 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bojand/ghz v0.117.0 // indirect @@ -62,7 +67,7 @@ require ( github.com/gogo/protobuf v1.3.2 // indirect github.com/golang-jwt/jwt/v5 v5.0.0 // indirect github.com/golang/mock v1.6.0 // indirect - github.com/golang/protobuf v1.5.3 // indirect + github.com/golang/protobuf v1.5.4 // indirect github.com/google/pprof v0.0.0-20211214055906-6f57359322fd // indirect github.com/google/uuid v1.4.0 // indirect github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 // indirect @@ -74,7 +79,7 @@ require ( github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/hashicorp/serf v0.9.7 // indirect - github.com/huandu/xstrings v1.3.3 // indirect + github.com/huandu/xstrings v1.4.0 // indirect github.com/imdario/mergo v0.3.11 // indirect github.com/jhump/protoreflect v1.15.1 // indirect github.com/jinzhu/configor v1.2.1 // indirect @@ -95,7 +100,7 @@ require ( github.com/mitchellh/reflectwalk v1.0.1 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect - github.com/nacos-group/nacos-sdk-go/v2 v2.1.0 // indirect + github.com/nacos-group/nacos-sdk-go/v2 v2.2.7 // indirect github.com/natefinch/lumberjack v2.0.0+incompatible // indirect github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml/v2 v2.0.8 // indirect @@ -138,14 +143,14 @@ require ( go.uber.org/goleak v1.2.1 // indirect go.uber.org/multierr v1.9.0 // indirect golang.org/x/arch v0.3.0 // indirect - golang.org/x/crypto v0.21.0 // indirect - golang.org/x/net v0.21.0 // indirect + golang.org/x/crypto v0.26.0 // indirect + golang.org/x/net v0.25.0 // indirect golang.org/x/oauth2 v0.14.0 // indirect - golang.org/x/sync v0.5.0 // indirect - golang.org/x/sys v0.18.0 // indirect - golang.org/x/text v0.14.0 // indirect - golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 // indirect - golang.org/x/tools v0.10.0 // indirect + golang.org/x/sync v0.8.0 // indirect + golang.org/x/sys v0.23.0 // indirect + golang.org/x/text v0.17.0 // indirect + golang.org/x/time v0.1.0 // indirect + golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17 // indirect diff --git a/a_micro-grpc-http-protobuf/go.sum b/6_micro-cluster/example-2-mono-repo/go.sum similarity index 95% rename from a_micro-grpc-http-protobuf/go.sum rename to 6_micro-cluster/example-2-mono-repo/go.sum index d0bb5d8..ad737f8 100644 --- a/a_micro-grpc-http-protobuf/go.sum +++ b/6_micro-cluster/example-2-mono-repo/go.sum @@ -64,8 +64,19 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/aliyun/alibaba-cloud-sdk-go v1.61.1704 h1:PpfENOj/vPfhhy9N2OFRjpue0hjM5XqAp2thFmkXXIk= -github.com/aliyun/alibaba-cloud-sdk-go v1.61.1704/go.mod h1:RcDobYh8k5VP6TNybz9m++gL3ijVI5wueVr0EM10VsU= +github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68 h1:NqugFkGxx1TXSh/pBcU00Y6bljgDPaFdh5MUSeJ7e50= +github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68/go.mod h1:6pb/Qy8c+lqua8cFpEy7g39NRRqOWc3rOwAy8m5Y2BY= +github.com/alibabacloud-go/tea v1.1.0/go.mod h1:IkGyUSX4Ba1V+k4pCtJUc6jDpZLFph9QMy2VUPTwukg= +github.com/alibabacloud-go/tea v1.1.17 h1:05R5DnaJXe9sCNIe8KUgWHC/z6w/VZIwczgUwzRnul8= +github.com/alibabacloud-go/tea v1.1.17/go.mod h1:nXxjm6CIFkBhwW4FQkNrolwbfon8Svy6cujmKFUq98A= +github.com/alibabacloud-go/tea-utils v1.4.4 h1:lxCDvNCdTo9FaXKKq45+4vGETQUKNOW/qKTcX9Sk53o= +github.com/alibabacloud-go/tea-utils v1.4.4/go.mod h1:KNcT0oXlZZxOXINnZBs6YvgOd5aYp9U67G+E3R8fcQw= +github.com/aliyun/alibaba-cloud-sdk-go v1.61.1800 h1:ie/8RxBOfKZWcrbYSJi2Z8uX8TcOlSMwPlEJh83OeOw= +github.com/aliyun/alibaba-cloud-sdk-go v1.61.1800/go.mod h1:RcDobYh8k5VP6TNybz9m++gL3ijVI5wueVr0EM10VsU= +github.com/aliyun/alibabacloud-dkms-gcs-go-sdk v0.2.2 h1:rWkH6D2XlXb/Y+tNAQROxBzp3a0p92ni+pXcaHBe/WI= +github.com/aliyun/alibabacloud-dkms-gcs-go-sdk v0.2.2/go.mod h1:GDtq+Kw+v0fO+j5BrrWiUHbBq7L+hfpzpPfXKOZMFE0= +github.com/aliyun/alibabacloud-dkms-transfer-go-sdk v0.1.7 h1:olLiPI2iM8Hqq6vKnSxpM3awCrm9/BeOgHpzQkOYnI4= +github.com/aliyun/alibabacloud-dkms-transfer-go-sdk v0.1.7/go.mod h1:oDg1j4kFxnhgftaiLJABkGeSvuEvSF5Lo6UmRAMruX4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= @@ -108,12 +119,9 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe h1:QQ3GSy+MqSHxm/d8nCtnAiZdYFd45cYZPs8vOOIYKfk= github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101 h1:7To3pQ+pZo0i3dsWEbinPNFs5gPSBOsJtx3wTT94VBY= github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= @@ -134,7 +142,6 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/go-control-plane v0.11.1 h1:wSUXTlLfiAQRWs2F+p+EKOY9rUyis1MyGqJ2DIk5HpM= github.com/envoyproxy/go-control-plane v0.11.1/go.mod h1:uhMcXKCQMEJHiAb0w+YGefQLaTEw+YhGluxZkrTmD0g= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= @@ -147,6 +154,7 @@ github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYF github.com/felixge/fgprof v0.9.3 h1:VvyZxILNuCiUCSXtPtYmmtGvb65nqXh2QFWc0Wpf2/g= github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= @@ -192,6 +200,7 @@ github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyr github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.12.1/go.mod h1:IUMDtCfWo/w/mtMfIE/IG2K+Ey3ygWanZIBtBW0W2TM= github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= @@ -243,8 +252,8 @@ github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -261,6 +270,7 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -295,8 +305,9 @@ github.com/hashicorp/consul/api v1.12.0 h1:k3y1FYv6nuKyNTqj6w9gXOx5r5CfLj/k/euUe github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0= github.com/hashicorp/consul/sdk v0.8.0 h1:OJtKBtEjboEZvG6AOUdh4Z1Zbyu0WcxQ0qatRrZHTVU= github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= -github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= @@ -312,6 +323,7 @@ github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iP github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= @@ -319,8 +331,9 @@ github.com/hashicorp/go-sockaddr v1.0.0 h1:GeH6tui99pF4NJgfnhp+L6+FfobzVW3Ah46sL github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= @@ -334,8 +347,9 @@ github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOn github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= github.com/hashicorp/serf v0.9.7 h1:hkdgbqizGQHuU5IPqYM1JdSMV8nKfpuOnZYXssk9muY= github.com/hashicorp/serf v0.9.7/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= -github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4= github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU= +github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= @@ -438,8 +452,8 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/nacos-group/nacos-sdk-go/v2 v2.1.0 h1:PxRwOzHhnK6eGGvioEGkn8s6XRXmUVuXu91i2yQcdDs= -github.com/nacos-group/nacos-sdk-go/v2 v2.1.0/go.mod h1:ys/1adWeKXXzbNWfRNbaFlX/t6HVLWdpsNDvmoWTw0g= +github.com/nacos-group/nacos-sdk-go/v2 v2.2.7 h1:wCC1f3/VzIR1WD30YKeJGZAOchYCK/35mLC8qWt6Q6o= +github.com/nacos-group/nacos-sdk-go/v2 v2.2.7/go.mod h1:VYlyDPlQchPC31PmfBustu81vsOkdpCuO5k0dRdQcFc= github.com/natefinch/lumberjack v2.0.0+incompatible h1:4QJd3OLAMgj7ph+yZTuX13Ld4UpgHp07nNdFX7mqFfM= github.com/natefinch/lumberjack v2.0.0+incompatible/go.mod h1:Wi9p2TTF5DG5oU+6YfsmYQpsTIOm0B1VNzQg9Mw6nPk= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= @@ -476,7 +490,6 @@ github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= -github.com/prometheus/client_golang v1.12.2/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= github.com/prometheus/client_golang v1.13.0 h1:b71QUfeo5M8gq2+evJdTPfZhYMAU0uKPkyPJ7TPsloU= github.com/prometheus/client_golang v1.13.0/go.mod h1:vTeo+zgvILHsnnj/39Ou/1fPN5nJFOEMgftOUOmlvYQ= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= @@ -537,8 +550,9 @@ github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiu github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -550,8 +564,9 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.3.0 h1:mjC+YW8QpAdXibNi+vNWgzmgBH4+5l5dCXv8cNysBLI= github.com/subosito/gotenv v1.3.0/go.mod h1:YzJjq/33h7nrwdY+iHMhEOEEbW0ovIz0tB6t6PwAXzs= github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= @@ -585,8 +600,8 @@ github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= -github.com/zhufuyi/sponge v1.8.0 h1:mGmskfT0q5Oqv/CDNnruZ9617SH2s33KpQQpjHwIHIQ= -github.com/zhufuyi/sponge v1.8.0/go.mod h1:Y+E7rQaGEkN+E1NLE6THZUvcnRbGLFBzsm3RctvJxJA= +github.com/zhufuyi/sponge v1.10.1 h1:feB75axQtMJ5EkC9M6WgUC+VIqZlB+K6zmNul9xlB0c= +github.com/zhufuyi/sponge v1.10.1/go.mod h1:g6oDmwPTUrCL9+RJbSbjQEtmc0yhJ1vUD5rCdF1QNG4= go.etcd.io/etcd/api/v3 v3.5.4 h1:OHVyt3TopwtUQ2GKdd5wu3PmmipR4FTwCqoEjSyRdIc= go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= go.etcd.io/etcd/client/pkg/v3 v3.5.4 h1:lrneYvz923dvC14R54XcA7FXoZ3mlGZAgmwhfm7HqOg= @@ -615,11 +630,9 @@ go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucg go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A= go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= @@ -628,7 +641,6 @@ go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60= go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= @@ -640,14 +652,15 @@ golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= -golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= -golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= +golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw= +golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -684,7 +697,8 @@ golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.11.0 h1:bUO06HqtnRcc/7l71XBe4WcqTZ+3AH1J59zWDDwLKgU= +golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -729,8 +743,8 @@ golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= -golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= -golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -756,8 +770,8 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= -golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -832,8 +846,8 @@ golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= -golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.23.0 h1:YfKFowiIMvtgl1UERQoTPPToxltDeZfbj4H7dVUCwmM= +golang.org/x/sys v0.23.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= @@ -848,13 +862,13 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= +golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 h1:ftMN5LMiBFjbzleLqtoBZk7KdJwhuybIU+FckUHgoyQ= -golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.1.0 h1:xYY+Bajn2a7VBmTM5GikTmnK8ZuX8YgnQCqZpbBNtmA= +golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -908,11 +922,10 @@ golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.10.0 h1:tvDr/iQoUqNdohiYm0LmmKcBk+q86lb9EprIUFhHHGg= -golang.org/x/tools v0.10.0/go.mod h1:UJwyiVBsOA2uwvK/e5OY3GTpDUJriEd+/YlqAwLPmyM= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1007,9 +1020,7 @@ google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTp google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.61.0 h1:TOvOcuXn30kRao+gfcvsebNEa5iZIiLkisYEkf7R7o0= google.golang.org/grpc v1.61.0/go.mod h1:VUbo7IFqmF1QtCAstipjG0GIoq49KvMe9+h1jFLBNJs= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= @@ -1024,10 +1035,9 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/6_micro-cluster/example-2-mono-repo/inventory/.gitignore b/6_micro-cluster/example-2-mono-repo/inventory/.gitignore new file mode 100644 index 0000000..c98f925 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/.gitignore @@ -0,0 +1,26 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib +*.log + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +vendor/ +dist/ + +# idea +.idea +*.iml +*.ipr +*.iws + +cmd/inventory/inventory + diff --git a/6_micro-cluster/example-2-mono-repo/inventory/.golangci.yml b/6_micro-cluster/example-2-mono-repo/inventory/.golangci.yml new file mode 100644 index 0000000..d17ff22 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/.golangci.yml @@ -0,0 +1,342 @@ +# This file configures eshop. + +run: + # timeout for analysis, e.g. 30s, 5m, default is 1m + timeout: 10m + # default concurrency is available CPU number + concurrency: 4 + # include test files or not, default is true + tests: false + # which dirs to skip: issues from them won't be reported; + # can use regexp here: generated.*, regexp is applied on full path; + # default value is empty list, but default dirs are skipped independently + # from this option's value (see skip-dirs-use-default). + skip-dirs: + - docs + - api + # which files to skip: they will be analyzed, but issues from them + # won't be reported. Default value is empty list, but there is + # no need to include all autogenerated files, we confidently recognize + # autogenerated files. If it's not please let us know. + skip-files: + - _test.go + + # exit code when at least one issue was found, default is 1 + issues-exit-code: 1 + + # list of build tags, all linters use it. Default is empty list. + build-tags: + - mytag + + # default is true. Enables skipping of directories: + # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ + skip-dirs-use-default: true + + +linters: + # please, do not use `enable-all`: it's deprecated and will be removed soon. + # inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint + disable-all: true + enable: + - revive + - goimports + - gofmt + - unused + #- depguard + - dogsled + - errcheck + #- gochecknoinits + - goconst + - gocyclo + - gosimple + - govet + - lll + - misspell + - typecheck + - unconvert + - whitespace + - staticcheck + #- bodyclose + #- dupl + #- goprintffuncname + #- gosec + #- unparam + #- ineffassign + + +linters-settings: + revive: + rules: + - name: argument-limit + arguments: [ 8 ] + - name: atomic + - name: bare-return + - name: blank-imports + - name: bool-literal-in-expr + - name: call-to-gc + - name: confusing-naming + - name: confusing-results + - name: constant-logical-expr + - name: context-as-argument + - name: context-keys-type + - name: deep-exit + - name: defer + - name: dot-imports + - name: duplicated-imports + - name: early-return + - name: empty-block + #- name: empty-lines + - name: error-naming + - name: error-return + - name: error-strings + - name: errorf + - name: function-result-limit + arguments: [ 3 ] + - name: identical-branches + - name: if-return + - name: import-shadowing + - name: increment-decrement + - name: indent-error-flow + - name: modifies-parameter + - name: modifies-value-receiver + - name: package-comments + - name: range + - name: range-val-address + - name: range-val-in-closure + - name: receiver-naming + - name: redefines-builtin-id + - name: string-of-int + - name: struct-tag + - name: superfluous-else + - name: time-naming + - name: unconditional-recursion + - name: unexported-naming + - name: unnecessary-stmt + - name: unreachable-code + - name: unused-parameter + - name: var-declaration + - name: var-naming + - name: waitgroup-by-value + + dogsled: + # checks assignments with too many blank identifiers; default is 2 + max-blank-identifiers: 2 + + dupl: + # tokens count to trigger issue, 150 by default + threshold: 100 + + errcheck: + # report about not checking of errors in type assertions: `a := b.(MyStruct)`; + # default is false: such cases aren't reported by default. + check-type-assertions: false + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; + # default is false: such cases aren't reported by default. + check-blank: false + + # [deprecated] comma-separated list of pairs of the form pkg:regex + # the regex is used to ignore names within pkg. (default "fmt:.*"). + # see https://github.com/kisielk/errcheck#the-deprecated-method for details + ignore: fmt:.*,io/ioutil:^Read.* + + # path to a file containing a list of functions to exclude from checking + # see https://github.com/kisielk/errcheck#excluding-functions for details + # exclude: /path/to/file.txt + funlen: + lines: 60 + statements: 40 + + gocognit: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 10 + + goconst: + # minimal length of string constant, 3 by default + min-len: 4 + # minimal occurrences count to trigger, 3 by default + min-occurrences: 4 + + gocyclo: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 20 + + godox: + # report any comments starting with keywords, this is useful for TODO or FIXME comments that + # might be left in the code accidentally and should be resolved before merging + keywords: # default keywords are TODO, BUG, and FIXME, these can be overwritten by this setting + - NOTE + - OPTIMIZE # marks code that should be optimized before merging + - HACK # marks hack-arounds that should be removed before merging + + gofmt: + # simplify code: gofmt with `-s` option, true by default + simplify: true + + goimports: + # put imports beginning with prefix after 3rd-party packages; + # it's a comma-separated list of prefixes + local-prefixes: eshop + + gomnd: + settings: + mnd: + # the list of enabled checks, see https://github.com/tommy-muehle/go-mnd/#checks for description. + checks: argument,case,condition,operation,return,assign + + govet: + # report about shadowed variables + check-shadowing: true + + # settings per analyzer + settings: + printf: # analyzer name, run `go tool vet help` to see all analyzers + funcs: # run `go tool vet help printf` to see available settings for `printf` analyzer + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf + + # enable or disable analyzers by name + enable: + - atomicalign + enable-all: false + disable: + - shadow + disable-all: false + + depguard: + list-type: blacklist + include-go-root: false + #packages: + # - github.com/user/name + #packages-with-error-message: + # specify an error message to output when a blacklisted package is used + # - github.com/user/name: "logging is allowed only by logutils.Log" + + lll: + # max line length, lines longer will be reported. Default is 120. + # '\t' is counted as 1 character by default, and can be changed with the tab-width option + line-length: 200 + # tab width in spaces. Default to 1. + tab-width: 1 + + maligned: + # print struct with more effective memory layout or not, false by default + suggest-new: true + + misspell: + # Correct spellings using locale preferences for US or UK. + # Default is to use a neutral variety of English. + # Setting locale to US will correct the British spelling of 'colour' to 'color'. + locale: US + ignore-words: + - someword + + nakedret: + # make an issue if func has more lines of code than this setting and it has naked returns; default is 30 + max-func-lines: 30 + + prealloc: + # XXX: we don't recommend using this linter before doing performance profiling. + # For most programs usage of prealloc will be a premature optimization. + + # Report preallocation suggestions only on simple loops that have no returns/breaks/continues/gotos in them. + # True by default. + simple: true + range-loops: true # Report preallocation suggestions on range loops, true by default + for-loops: false # Report preallocation suggestions on for loops, false by default + + #rowserrcheck: + # packages: + # - github.com/user/name + + unparam: + # Inspect exported functions, default is false. Set to true if no external program/library imports your code. + # XXX: if you enable this setting, unparam will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find external interfaces. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + unused: + # treat code as a program (not a library) and report unused exported identifiers; default is false. + # XXX: if you enable this setting, unused will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find funcs usages. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + whitespace: + multi-if: false # Enforces newlines (or comments) after every multi-line if statement + multi-func: false # Enforces newlines (or comments) after every multi-line function signature + + wsl: + # If true append is only allowed to be cuddled if appending value is + # matching variables, fields or types on line above. Default is true. + strict-append: true + # Allow calls and assignments to be cuddled as long as the lines have any + # matching variables, fields or types. Default is true. + allow-assign-and-call: true + # Allow multiline assignments to be cuddled. Default is true. + allow-multiline-assign: true + # Allow declarations (var) to be cuddled. + allow-cuddle-declarations: false + # Allow trailing comments in ending of blocks + allow-trailing-comment: false + # Force newlines in end of case at this limit (0 = never). + force-case-trailing-whitespace: 0 + +issues: + # List of regexps of issue texts to exclude, empty list by default. + # But independently from this option we use default exclude patterns, + # it can be disabled by `exclude-use-default: false`. To list all + # excluded by default patterns execute `golangci-lint run --help` + exclude: + - abcdef + + # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: + # Exclude some linters from running on tests files. + - path: _test\.go + linters: + - gocyclo + - errcheck + - dupl + - gosec + + # Exclude known linters from partially hard-vendored code, + # which is impossible to exclude via "nolint" comments. + - path: internal/hmac/ + text: "weak cryptographic primitive" + linters: + - gosec + + # Exclude lll issues for long lines with go:generate + - linters: + - lll + source: "^//go:generate " + + # Independently from option `exclude` we use default exclude patterns, + # it can be disabled by this option. To list all + # excluded by default patterns execute `golangci-lint run --help`. + # Default value for this option is true. + exclude-use-default: false + + # Maximum issues count per one linter. Set to 0 to disable. Default is 50. + max-issues-per-linter: 0 + + # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. + max-same-issues: 0 + + # Show only new issues: if there are unstaged changes or untracked files, + # only those changes are analyzed, else only changes in HEAD~ are analyzed. + # It's a super-useful option for integration of golangci-lint into existing + # large codebase. It's not practical to fix all existing issues at the moment + # of integration: much better don't allow issues in new code. + # Default is false. + new: false + + # Show only new issues created after git revision `REV` + new-from-rev: "" + +service: + golangci-lint-version: 1.48.0 # use the fixed version to not introduce new linters unexpectedly diff --git a/6_micro-cluster/example-2-mono-repo/inventory/Jenkinsfile b/6_micro-cluster/example-2-mono-repo/inventory/Jenkinsfile new file mode 100644 index 0000000..cc76915 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/Jenkinsfile @@ -0,0 +1,200 @@ +pipeline { + agent any + + stages { + stage("Check Build Branch") { + steps { + echo "Checking build branch in progress ......" + script { + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + echo "building production environment, tag=${env.GIT_BRANCH}" + } else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + echo "building test environment, tag=${env.GIT_BRANCH}" + } else if (env.GIT_BRANCH ==~ /(origin\/develop)/) { + echo "building development environment, /origin/develop" + } else { + echo "The build branch ${env.GIT_BRANCH} is not legal, allowing to build the development environment branch (/origin/develop), the test environment branch (e.g. test-1.0.0), and the production environment branch (e.g. v1.0.0)" + sh 'exit 1' + } + } + echo "Check build branch complete." + } + } + + stage("Check Code") { + steps { + echo "Checking code in progress ......" + sh 'make ci-lint' + echo "Check code complete." + } + } + + stage("Unit Testing") { + steps { + echo "Unit testing in progress ......" + sh 'make test' + echo "Unit testing complete." + } + } + + stage("Compile Code") { + steps { + echo "Compiling code in progress ......" + sh 'make build' + echo "compile code complete." + } + } + + stage("Build Image") { + steps { + echo "building image in progress ......" + script { + registryHost="" + tagName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.PROD_REPO_HOST == null) { + echo "The value of environment variable PROD_REPO_HOST is empty, please set the value of PROD_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the production environment image repository ${env.PROD_REPO_HOST}" + registryHost=env.PROD_REPO_HOST + tagName=env.GIT_BRANCH + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.TEST_REPO_HOST == null) { + echo "The value of environment variable TEST_REPO_HOST is empty, please set the value of TEST_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the test environment image repository ${env.TEST_REPO_HOST}" + registryHost=env.TEST_REPO_HOST + tagName=env.GIT_BRANCH + } + else { + if (env.DEV_REPO_HOST == null) { + echo "The value of environment variable DEV_REPO_HOST is empty, please set the value of DEV_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Using the development environment ${env.DEV_REPO_HOST}" + registryHost=env.DEV_REPO_HOST + } + sh "make image-build REPO_HOST=$registryHost TAG=$tagName" + } + echo "Build image complete" + } + } + + stage("Push Image") { + steps { + echo "pushing image in progress ......" + script { + registryHost="" + tagName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.PROD_REPO_HOST == null) { + echo "The value of environment variable PROD_REPO_HOST is empty, please set the value of PROD_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the production environment image repository ${env.PROD_REPO_HOST}" + registryHost=env.PROD_REPO_HOST + tagName=env.GIT_BRANCH + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.TEST_REPO_HOST == null) { + echo "The value of environment variable TEST_REPO_HOST is empty, please set the value of TEST_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the test environment image repository ${env.TEST_REPO_HOST}" + registryHost=env.TEST_REPO_HOST + tagName=env.GIT_BRANCH + } + else { + if (env.DEV_REPO_HOST == null) { + echo "The value of environment variable DEV_REPO_HOST is empty, please set the value of DEV_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Using the development environment ${env.DEV_REPO_HOST}" + registryHost=env.DEV_REPO_HOST + } + sh "make image-push REPO_HOST=$registryHost TAG=$tagName" + } + echo "push image complete, clear image complete." + } + } + + stage("Deploy to k8s") { + when { expression { return env.GIT_BRANCH ==~ /(origin\/staging|origin\/develop)/ } } + steps { + echo "Deploying to k8s in progress ......" + sh 'make deploy-k8s' + echo "Deploy to k8s complete." + } + } + } + + post { + always { + echo 'One way or another, I have finished' + echo sh(returnStdout: true, script: 'env') + deleteDir() /* clean up our workspace */ + } + success { + SendDingding("success") + //SendEmail("success") + echo 'structure success' + } + failure { + SendDingding("failure") + //SendEmail("failure") + echo 'structure failure' + } + } +} + +// Notifications using dingding +void SendDingding(res) +{ + // Fill in the corresponding cell phone number and specify a person to be notified in the pinned group + tel_num="xxxxxxxxxxx" + dingding_url="https://oapi.dingtalk.com/robot/send\\?access_token\\=your dingding robot token" + + branchName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + branchName="${env.SERVER_PLATFORM} production environment, tag=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/){ + branchName="${env.SERVER_PLATFORM} test environment, tag=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + else { + branchName="${env.SERVER_PLATFORM} develop environment, branch=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + + json_msg="" + if( res == "success" ) { + json_msg='{\\"msgtype\\":\\"text\\",\\"text\\":{\\"content\\":\\"@' + tel_num +' [OK] ' + "${branchName} ${env.BUILD_NUMBER}th " + 'build success. \\"},\\"at\\":{\\"atMobiles\\":[\\"' + tel_num + '\\"],\\"isAtAll\\":false}}' + } + else { + json_msg='{\\"msgtype\\":\\"text\\",\\"text\\":{\\"content\\":\\"@' + tel_num +' [cry] ' + "${branchName} ${env.BUILD_NUMBER}th " + 'build failed, please deal with it promptly! \\"},\\"at\\":{\\"atMobiles\\":[\\"' + tel_num + '\\"],\\"isAtAll\\":false}}' + } + + post_header="Content-Type:application/json;charset=utf-8" + sh_cmd="curl -X POST " + dingding_url + " -H " + "\'" + post_header + "\'" + " -d " + "\"" + json_msg + "\"" + sh sh_cmd +} + +// Notifications using email +void SendEmail(res) +{ + emailAddr="xxx@xxx.com" + if( res == "success" ) + { + mail to: emailAddr, + subject: "Build Success: ${currentBuild.fullDisplayName}", + body: "\nJob name: ${env.JOB_NAME} ${env.BUILD_NUMBER}th build. \n\n For more information, please see: ${env.BUILD_URL}" + } + else + { + mail to: emailAddr, + subject: "Build Failed: ${currentBuild.fullDisplayName}", + body: "\nJob name: ${env.JOB_NAME} ${env.BUILD_NUMBER}th build. \n\n For more information, please see: ${env.BUILD_URL}" + } +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/Makefile b/6_micro-cluster/example-2-mono-repo/inventory/Makefile new file mode 100644 index 0000000..72d552d --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/Makefile @@ -0,0 +1,183 @@ +SHELL := /bin/bash + +PROJECT_NAME := "eshop" +PKG := "$(PROJECT_NAME)" +PKG_LIST := $(shell go list ${PKG}/... | grep -v /vendor/ | grep -v /api/) + + + + +.PHONY: ci-lint +# Check code formatting, naming conventions, security, maintainability, etc. the rules in the .golangci.yml file +ci-lint: + @gofmt -s -w . + golangci-lint run ./... + + +.PHONY: test +# Test *_test.go files, the parameter -count=1 means that caching is disabled +test: + go test -count=1 -short ${PKG_LIST} + + +.PHONY: cover +# Generate test coverage +cover: + go test -short -coverprofile=cover.out -covermode=atomic ${PKG_LIST} + go tool cover -html=cover.out + + +.PHONY: graph +# Generate interactive visual function dependency graphs +graph: + @echo "generating graph ......" + @cp -f cmd/inventory/main.go . + go-callvis -skipbrowser -format=svg -nostd -file=inventory eshop + @rm -f main.go inventory.gv + + + +.PHONY: proto +# Generate *.go and template code by proto files, the default is all the proto files in the api directory. you can specify the proto file, multiple files are separated by commas, e.g. make proto FILES=api/user/v1/user.proto +proto: + @bash scripts/protoc.sh $(FILES) + go mod tidy + @gofmt -s -w . + + +.PHONY: proto-doc +# Generate doc from *.proto files +proto-doc: + @bash scripts/proto-doc.sh + + +.PHONY: build +# Build inventory for linux amd64 binary +build: + @echo "building 'inventory', linux binary file will output to 'cmd/inventory'" + @cd cmd/inventory && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build + + + +.PHONY: run +# Build and run service +run: + @bash scripts/run.sh + + +.PHONY: run-nohup +# Run service with nohup in local, if you want to stop the server, pass the parameter stop, e.g. make run-nohup CMD=stop +run-nohup: + @bash scripts/run-nohup.sh $(CMD) + + +.PHONY: run-docker +# Run service in local docker, if you want to update the service, run the make run-docker command again +run-docker: image-build-local + @bash scripts/deploy-docker.sh + + +.PHONY: binary-package +# Packaged binary files +binary-package: build + @bash scripts/binary-package.sh + + +.PHONY: deploy-binary +# Deploy binary to remote linux server, e.g. make deploy-binary USER=root PWD=123456 IP=192.168.1.10 +deploy-binary: binary-package + @expect scripts/deploy-binary.sh $(USER) $(PWD) $(IP) + + +.PHONY: image-build-local +# Build image for local docker, tag=latest, use binary files to build +image-build-local: build + @bash scripts/image-build-local.sh + + +.PHONY: image-build +# Build image for remote repositories, use binary files to build, e.g. make image-build REPO_HOST=addr TAG=latest +image-build: + @bash scripts/image-build.sh $(REPO_HOST) $(TAG) + + +.PHONY: image-build2 +# Build image for remote repositories, phase II build, e.g. make image-build2 REPO_HOST=addr TAG=latest +image-build2: + @bash scripts/image-build2.sh $(REPO_HOST) $(TAG) + + +.PHONY: image-push +# Push docker image to remote repositories, e.g. make image-push REPO_HOST=addr TAG=latest +image-push: + @bash scripts/image-push.sh $(REPO_HOST) $(TAG) + + +.PHONY: deploy-k8s +# Deploy service to k8s +deploy-k8s: + @bash scripts/deploy-k8s.sh + + +.PHONY: image-build-rpc-test +# Build grpc test image for remote repositories, e.g. make image-build-rpc-test REPO_HOST=addr TAG=latest +image-build-rpc-test: + @bash scripts/image-rpc-test.sh $(REPO_HOST) $(TAG) + + +.PHONY: patch +# Patch some dependent code, e.g. make patch TYPE=types-pb , make patch TYPE=init-, your_db_driver is mysql, mongodb, postgresql, tidb, sqlite, for example: make patch TYPE=init-mysql +patch: + @bash scripts/patch.sh $(TYPE) + + +.PHONY: copy-proto +# Copy proto file from the grpc server directory, multiple directories or proto files separated by commas. default is to copy all proto files, e.g. make copy-proto SERVER=yourServerDir, copy specified proto files, e.g. make copy-proto SERVER=yourServerDir PROTO_FILE=yourProtoFile1,yourProtoFile2 +copy-proto: + @sponge patch copy-proto --server-dir=$(SERVER) --proto-file=$(PROTO_FILE) + + +.PHONY: modify-proto-pkg-name +# Modify the 'package' and 'go_package' names of all proto files in the 'api' directory +modify-proto-pkg-name: + @sponge patch modify-proto-package --dir=api --server-dir=. + + +.PHONY: update-config +# Update internal/config code base on yaml file +update-config: + @sponge config --server-dir=. + + +.PHONY: clean +# Clean binary file, cover.out, template file +clean: + @rm -vrf cmd/inventory/inventory* + @rm -vrf cover.out + @rm -vrf main.go inventory.gv + @rm -vrf internal/ecode/*.go.gen* + @rm -vrf internal/routers/*.go.gen* + @rm -vrf internal/handler/*.go.gen* + @rm -vrf internal/service/*.go.gen* + @rm -rf inventory-binary.tar.gz + @echo "clean finished" + + +# Show help +help: + @echo '' + @echo 'Usage:' + @echo ' make ' + @echo '' + @echo 'Targets:' + @awk '/^[a-zA-Z\-_0-9]+:/ { \ + helpMessage = match(lastLine, /^# (.*)/); \ + if (helpMessage) { \ + helpCommand = substr($$1, 0, index($$1, ":")-1); \ + helpMessage = substr(lastLine, RSTART + 2, RLENGTH); \ + printf "\033[1;36m %-22s\033[0m %s\n", helpCommand,helpMessage; \ + } \ + } \ + { lastLine = $$0 }' $(MAKEFILE_LIST) + +.DEFAULT_GOAL := all diff --git a/6_micro-cluster/example-2-mono-repo/inventory/README.md b/6_micro-cluster/example-2-mono-repo/inventory/README.md new file mode 100644 index 0000000..7c22382 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/README.md @@ -0,0 +1,9 @@ +## inventory + +| Feature | Value | +| :----------------: | :-----------: | +| Server name | `inventory` | +| Server type | `grpc-pb` | +| Go module name | `eshop` | +| Repository type | `mono-repo` | + diff --git a/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/close.go b/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/close.go new file mode 100644 index 0000000..3613914 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/close.go @@ -0,0 +1,44 @@ +package initial + +import ( + "context" + "time" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/tracer" + + "eshop/inventory/internal/config" + //"eshop/inventory/internal/model" +) + +// Close releasing resources after service exit +func Close(servers []app.IServer) []app.Close { + var closes []app.Close + + // close server + for _, s := range servers { + closes = append(closes, s.Stop) + } + + // close database + //closes = append(closes, func() error { + // return model.CloseDB() + //}) + + // close redis + //if config.Get().App.CacheType == "redis" { + // closes = append(closes, func() error { + // return model.CloseRedis() + // }) + //} + + // close tracing + if config.Get().App.EnableTrace { + closes = append(closes, func() error { + ctx, _ := context.WithTimeout(context.Background(), 2*time.Second) //nolint + return tracer.Close(ctx) + }) + } + + return closes +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/createService.go b/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/createService.go new file mode 100644 index 0000000..35e1940 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/createService.go @@ -0,0 +1,97 @@ +package initial + +import ( + "fmt" + "strconv" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/consul" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" + + "eshop/inventory/internal/config" + "eshop/inventory/internal/server" +) + +// CreateServices create grpc or http service +func CreateServices() []app.IServer { + var cfg = config.Get() + var servers []app.IServer + + // creating grpc service + grpcAddr := ":" + strconv.Itoa(cfg.Grpc.Port) + grpcRegistry, grpcInstance := registerService("grpc", cfg.App.Host, cfg.Grpc.Port) + grpcServer := server.NewGRPCServer(grpcAddr, + server.WithGrpcRegistry(grpcRegistry, grpcInstance), + ) + servers = append(servers, grpcServer) + + return servers +} + +func registerService(scheme string, host string, port int) (registry.Registry, *registry.ServiceInstance) { + var ( + instanceEndpoint = fmt.Sprintf("%s://%s:%d", scheme, host, port) + cfg = config.Get() + + iRegistry registry.Registry + instance *registry.ServiceInstance + err error + + id = cfg.App.Name + "_" + scheme + "_" + host + logField logger.Field + ) + + switch cfg.App.RegistryDiscoveryType { + // registering service with consul + case "consul": + iRegistry, instance, err = consul.NewRegistry( + cfg.Consul.Addr, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.Any("consulAddress", cfg.Consul.Addr) + + // registering service with etcd + case "etcd": + iRegistry, instance, err = etcd.NewRegistry( + cfg.Etcd.Addrs, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.Any("etcdAddress", cfg.Etcd.Addrs) + + // registering service with nacos + case "nacos": + iRegistry, instance, err = nacos.NewRegistry( + cfg.NacosRd.IPAddr, + cfg.NacosRd.Port, + cfg.NacosRd.NamespaceID, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.String("nacosAddress", fmt.Sprintf("%v:%d", cfg.NacosRd.IPAddr, cfg.NacosRd.Port)) + } + + if instance != nil { + msg := fmt.Sprintf("register service address to %s", cfg.App.RegistryDiscoveryType) + logger.Info(msg, logField, logger.String("id", id), logger.String("name", cfg.App.Name), logger.String("endpoint", instanceEndpoint)) + return iRegistry, instance + } + + return nil, nil +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/initApp.go b/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/initApp.go new file mode 100644 index 0000000..7db321f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/initial/initApp.go @@ -0,0 +1,132 @@ +// Package initial is the package that starts the service to initialize the service, including +// the initialization configuration, service configuration, connecting to the database, and +// resource release needed when shutting down the service. +package initial + +import ( + "flag" + "fmt" + "strconv" + + "github.com/jinzhu/copier" + + "github.com/zhufuyi/sponge/pkg/conf" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/nacoscli" + "github.com/zhufuyi/sponge/pkg/stat" + "github.com/zhufuyi/sponge/pkg/tracer" + + "eshop/inventory/configs" + "eshop/inventory/internal/config" + //"eshop/inventory/internal/model" +) + +var ( + version string + configFile string + enableConfigCenter bool +) + +// InitApp initial app configuration +func InitApp() { + initConfig() + cfg := config.Get() + + // initializing log + _, err := logger.Init( + logger.WithLevel(cfg.Logger.Level), + logger.WithFormat(cfg.Logger.Format), + logger.WithSave( + cfg.Logger.IsSave, + //logger.WithFileName(cfg.Logger.LogFileConfig.Filename), + //logger.WithFileMaxSize(cfg.Logger.LogFileConfig.MaxSize), + //logger.WithFileMaxBackups(cfg.Logger.LogFileConfig.MaxBackups), + //logger.WithFileMaxAge(cfg.Logger.LogFileConfig.MaxAge), + //logger.WithFileIsCompression(cfg.Logger.LogFileConfig.IsCompression), + ), + ) + if err != nil { + panic(err) + } + logger.Debug(config.Show()) + logger.Info("[logger] was initialized") + + // initializing tracing + if cfg.App.EnableTrace { + tracer.InitWithConfig( + cfg.App.Name, + cfg.App.Env, + cfg.App.Version, + cfg.Jaeger.AgentHost, + strconv.Itoa(cfg.Jaeger.AgentPort), + cfg.App.TracingSamplingRate, + ) + logger.Info("[tracer] was initialized") + } + + // initializing the print system and process resources + if cfg.App.EnableStat { + stat.Init( + stat.WithLog(logger.Get()), + stat.WithAlarm(), // invalid if it is windows, the default threshold for cpu and memory is 0.8, you can modify them + stat.WithPrintField(logger.String("service_name", cfg.App.Name), logger.String("host", cfg.App.Host)), + ) + logger.Info("[resource statistics] was initialized") + } + + // initializing database + //model.InitDB() + //logger.Infof("[%s] was initialized", cfg.Database.Driver) + //model.InitCache(cfg.App.CacheType) + //if cfg.App.CacheType != "" { + // logger.Infof("[%s] was initialized", cfg.App.CacheType) + //} +} + +func initConfig() { + flag.StringVar(&version, "version", "", "service Version Number") + flag.BoolVar(&enableConfigCenter, "enable-cc", false, "whether to get from the configuration center, "+ + "if true, the '-c' parameter indicates the configuration center") + flag.StringVar(&configFile, "c", "", "configuration file") + flag.Parse() + + if enableConfigCenter { + // get the configuration from the configuration center (first get the nacos configuration, + // then read the service configuration according to the nacos configuration center) + if configFile == "" { + configFile = configs.Path("inventory_cc.yml") + } + nacosConfig, err := config.NewCenter(configFile) + if err != nil { + panic(err) + } + appConfig := &config.Config{} + params := &nacoscli.Params{} + _ = copier.Copy(params, &nacosConfig.Nacos) + format, data, err := nacoscli.GetConfig(params) + if err != nil { + panic(fmt.Sprintf("connect to configuration center err, %v", err)) + } + err = conf.ParseConfigData(data, format, appConfig) + if err != nil { + panic(fmt.Sprintf("parse configuration data err, %v", err)) + } + if appConfig.App.Name == "" { + panic("read the config from center error, config data is empty") + } + config.Set(appConfig) + } else { + // get configuration from local configuration file + if configFile == "" { + configFile = configs.Path("inventory.yml") + } + err := config.Init(configFile) + if err != nil { + panic("init config error: " + err.Error()) + } + } + + if version != "" { + config.Get().App.Version = version + } +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/main.go b/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/main.go new file mode 100644 index 0000000..538e2ff --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/cmd/inventory/main.go @@ -0,0 +1,17 @@ +// Package main is the grpc server of the application. +package main + +import ( + "github.com/zhufuyi/sponge/pkg/app" + + "eshop/inventory/cmd/inventory/initial" +) + +func main() { + initial.InitApp() + services := initial.CreateServices() + closes := initial.Close(services) + + a := app.New(services, closes) + a.Run() +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/configs/inventory.yml b/6_micro-cluster/example-2-mono-repo/inventory/configs/inventory.yml new file mode 100644 index 0000000..7b66f4d --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/configs/inventory.yml @@ -0,0 +1,117 @@ +# Generate the go struct command: sponge config --server-dir=./serverDir + +# app settings +app: + name: "inventory" # server name + env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment + version: "v0.0.0" + host: "127.0.0.1" # domain or ip, for service registration + enableStat: true # whether to turn on printing statistics, true:enable, false:disable + enableMetrics: true # whether to turn on indicator collection, true:enable, false:disable + enableHTTPProfile: false # whether to turn on performance analysis, true:enable, false:disable + enableLimit: false # whether to turn on rate limiting (adaptive), true:on, false:off + enableCircuitBreaker: false # whether to turn on circuit breaker(adaptive), true:on, false:off + enableTrace: false # whether to turn on trace, true:enable, false:disable, if true jaeger configuration must be set + tracingSamplingRate: 1.0 # tracing sampling rate, between 0 and 1, 0 means no sampling, 1 means sampling all links + registryDiscoveryType: "" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used + cacheType: "" # cache type, if empty, the cache is not used, support for "memory" and "redis", if set to redis, must set redis configuration + + +# grpc server settings +grpc: + port: 28282 # listen port + httpPort: 28283 # profile and metrics ports + enableToken: false # whether to enable server-side token authentication, default appID=grpc, appKey=123456 + # serverSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'certFile' and 'keyFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + serverSecure: + type: "" # secures type, "", "one-way", "two-way" + caFile: "" # ca certificate file, valid only in "two-way", absolute path + certFile: "" # server side cert file, absolute path + keyFile: "" # server side key file, absolute path + + +# grpc client-side settings, support for setting up multiple grpc clients. +grpcClient: + - name: "your_grpc_service_name" # grpc service name, used for service discovery + host: "127.0.0.1" # grpc service address, used for direct connection + port: 8282 # grpc service port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, valid only for unary grpc type + registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port + enableLoadBalance: true # whether to turn on the load balancer + # clientSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'serverName' and 'certFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + clientSecure: + type: "" # secures type, "", "one-way", "two-way" + serverName: "" # server name, e.g. *.foo.com + caFile: "" # client side ca file, valid only in "two-way", absolute path + certFile: "" # client side cert file, absolute path, if secureType="one-way", fill in server side cert file here + keyFile: "" # client side key file, valid only in "two-way", absolute path + clientToken: + enable: false # whether to enable token authentication + appID: "" # app id + appKey: "" # app key + + + +# logger settings +logger: + level: "info" # output log levels debug, info, warn, error, default is debug + format: "console" # output format, console or json, default is console + isSave: false # false:output to terminal, true:output to file, default is false + #logFileConfig: # Effective when isSave=true + #filename: "out.log" # File name (default is out.log) + #maxSize: 20 # Maximum file size (MB, default is 10MB) + #maxBackups: 50 # Maximum number of old files to retain (default is 100) + #maxAge: 15 # Maximum number of days to retain old files (default is 30 days) + #isCompression: true # Whether to compress/archive old files (default is false) + + +# set database configuration. reference-db-config-url +database: + driver: "mysql" # database driver + # mysql settings + mysql: + # dsn format, :@(:)/?[k=v& ......] + dsn: "root:123456@(192.168.3.37:3306)/account?parseTime=true&loc=Local&charset=utf8,utf8mb4" + enableLog: true # whether to turn on printing of all logs + maxIdleConns: 10 # set the maximum number of connections in the idle connection pool + maxOpenConns: 100 # set the maximum number of open database connections + connMaxLifetime: 30 # sets the maximum time for which the connection can be reused, in minutes + + + +# redis settings +redis: + # dsn format, [user]:@127.0.0.1:6379/[db], the default user is default, redis version 6.0 and above only supports user. + dsn: "default:123456@192.168.3.37:6379/0" + dialTimeout: 10 # connection timeout, unit(second) + readTimeout: 2 # read timeout, unit(second) + writeTimeout: 2 # write timeout, unit(second) + + +# jaeger settings +jaeger: + agentHost: "192.168.3.37" + agentPort: 6831 + + +# consul settings +consul: + addr: "192.168.3.37:8500" + + +# etcd settings +etcd: + addrs: ["192.168.3.37:2379"] + + +# nacos settings, used in service registration discovery +nacosRd: + ipAddr: "192.168.3.37" + port: 8848 + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id diff --git a/6_micro-cluster/example-2-mono-repo/inventory/configs/inventory_cc.yml b/6_micro-cluster/example-2-mono-repo/inventory/configs/inventory_cc.yml new file mode 100644 index 0000000..f77e630 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/configs/inventory_cc.yml @@ -0,0 +1,13 @@ +# Generate the go struct command: sponge config --server-dir=./serverDir +# App config from nacos + +# nacos settings +nacos: + ipAddr: "192.168.3.37" # server address + port: 8848 # listening port + scheme: "http" # http or grpc + contextPath: "/nacos" # path + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id + group: "dev" # group name: dev, prod, test + dataID: "inventory.yml" # config file id + format: "yaml" # configuration file type: json,yaml,toml diff --git a/6_micro-cluster/example-2-mono-repo/inventory/configs/location.go b/6_micro-cluster/example-2-mono-repo/inventory/configs/location.go new file mode 100644 index 0000000..6b610a6 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/configs/location.go @@ -0,0 +1,23 @@ +// Package configs used to locate config file. +package configs + +import ( + "path/filepath" + "runtime" +) + +var basePath string + +func init() { + _, currentFile, _, _ := runtime.Caller(0) //nolint + basePath = filepath.Dir(currentFile) +} + +// Path return absolute path +func Path(rel string) string { + if filepath.IsAbs(rel) { + return rel + } + + return filepath.Join(basePath, rel) +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/README.md b/6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/README.md new file mode 100644 index 0000000..6bad1b1 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/README.md @@ -0,0 +1,26 @@ + +copy the configuration file to the configs directory and binary file before starting the service. + +``` +├── configs +│ └── inventory.yml +├── inventory +├── deploy.sh +└── run.sh +``` + +### Running and stopping service manually + +Running service: + +> ./run.sh + +Stopping the service: + +> ./run.sh stop + +
+ +### Automated deployment service + +> ./deploy.sh diff --git a/6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/deploy.sh b/6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/deploy.sh new file mode 100644 index 0000000..0fd3878 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/deploy.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +serviceName="inventory" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# determine if the startup service script run.sh exists +runFile="~/app/${serviceName}/run.sh" +if [ ! -f "$runFile" ]; then + # if it does not exist, copy the entire directory + mkdir -p ~/app + cp -rf /tmp/${serviceName}-binary ~/app/ + checkResult $? + rm -rf /tmp/${serviceName}-binary* +else + # replace only the binary file if it exists + cp -f ${serviceName}-binary/${serviceName} ~/app/${serviceName}-binary/${serviceName} + checkResult $? + rm -rf /tmp/${serviceName}-binary* +fi + +# running service +cd ~/app/${serviceName}-binary +chmod +x run.sh +./run.sh +checkResult $? + +echo "server directory is ~/app/${serviceName}-binary" diff --git a/6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/run.sh b/6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/run.sh new file mode 100644 index 0000000..7d402ac --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/deployments/binary/run.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +serviceName="inventory" +cmdStr="./${serviceName} -c configs/${serviceName}.yml" + +chmod +x ./${serviceName} + +stopService(){ + NAME=$1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + for id in $ID + do + kill -9 $id + echo "Stopped ${NAME} service successfully, process ID=${ID}" + done + fi +} + +startService() { + NAME=$1 + + nohup ${cmdStr} > ${serviceName}.log 2>&1 & + sleep 1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + echo "Start the ${NAME} service ...... process ID=${ID}" + else + echo "Failed to start ${NAME} service" + return 1 + fi + return 0 +} + + +stopService ${serviceName} +if [ "$1"x != "stop"x ] ;then + sleep 1 + startService ${serviceName} + exit $? + echo "" +else + echo "Service ${serviceName} has stopped" +fi diff --git a/6_micro-cluster/example-2-mono-repo/inventory/deployments/docker-compose/README.md b/6_micro-cluster/example-2-mono-repo/inventory/deployments/docker-compose/README.md new file mode 100644 index 0000000..9776a55 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/deployments/docker-compose/README.md @@ -0,0 +1,12 @@ + +copy the configuration file to the configs directory before starting the service. + +``` +├── configs +│ └── inventory.yml +└── docker-compose.yml +``` + +running service: + +> docker-compose up -d diff --git a/6_micro-cluster/example-2-mono-repo/inventory/deployments/docker-compose/docker-compose.yml b/6_micro-cluster/example-2-mono-repo/inventory/deployments/docker-compose/docker-compose.yml new file mode 100644 index 0000000..3686ace --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/deployments/docker-compose/docker-compose.yml @@ -0,0 +1,21 @@ +version: "3.7" + +services: + inventory: + image: eshop/inventory:latest + container_name: inventory + restart: always + command: ["./inventory", "-c", "/app/configs/inventory.yml"] + volumes: + - $PWD/configs:/app/configs + + ports: + - "8282:8282" # grpc port + - "8283:8283" # grpc metrics or pprof port + healthcheck: + test: ["CMD", "grpc_health_probe", "-addr=localhost:8282"] # grpc health check, note: the image must contain the grpc_health_probe command + + interval: 10s # interval time + timeout: 5s # timeout time + retries: 3 # number of retries + start_period: 10s # how long after start-up does the check begin diff --git a/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/README.md b/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/README.md new file mode 100644 index 0000000..6cea145 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/README.md @@ -0,0 +1,32 @@ +Before deploying the service to k8s, create a Secret that pulls image permissions for k8s in a docker host that is already logged into the image repository, with the following command. + +```bash +kubectl create secret generic docker-auth-secret \ + --from-file=.dockerconfigjson=/root/.docker/config.json \ + --type=kubernetes.io/dockerconfigjson +``` + +
+ +run server: + +```bash +cd deployments + +kubectl apply -f ./*namespace.yml + +kubectl apply -f ./ +``` + +view the start-up status. + +> kubectl get all -n eshop + +
+ +simple test of http port + +```bash +# mapping to the http port of the service on the local port +kubectl port-forward --address=0.0.0.0 service/ 8080:8080 -n +``` diff --git a/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/eshop-namespace.yml b/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/eshop-namespace.yml new file mode 100644 index 0000000..eba474f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/eshop-namespace.yml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: eshop diff --git a/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-configmap.yml b/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-configmap.yml new file mode 100644 index 0000000..9210d90 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-configmap.yml @@ -0,0 +1,124 @@ +kind: ConfigMap +apiVersion: v1 +metadata: + name: inventory-config + namespace: eshop +data: + inventory.yml: |- + # Generate the go struct command: sponge config --server-dir=./serverDir + + # app settings + app: + name: "inventory" # server name + env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment + version: "v0.0.0" + host: "127.0.0.1" # domain or ip, for service registration + enableStat: true # whether to turn on printing statistics, true:enable, false:disable + enableMetrics: true # whether to turn on indicator collection, true:enable, false:disable + enableHTTPProfile: false # whether to turn on performance analysis, true:enable, false:disable + enableLimit: false # whether to turn on rate limiting (adaptive), true:on, false:off + enableCircuitBreaker: false # whether to turn on circuit breaker(adaptive), true:on, false:off + enableTrace: false # whether to turn on trace, true:enable, false:disable, if true jaeger configuration must be set + tracingSamplingRate: 1.0 # tracing sampling rate, between 0 and 1, 0 means no sampling, 1 means sampling all links + registryDiscoveryType: "" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used + cacheType: "" # cache type, if empty, the cache is not used, support for "memory" and "redis", if set to redis, must set redis configuration + + + # grpc server settings + grpc: + port: 8282 # listen port + httpPort: 8283 # profile and metrics ports + enableToken: false # whether to enable server-side token authentication, default appID=grpc, appKey=123456 + # serverSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'certFile' and 'keyFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + serverSecure: + type: "" # secures type, "", "one-way", "two-way" + caFile: "" # ca certificate file, valid only in "two-way", absolute path + certFile: "" # server side cert file, absolute path + keyFile: "" # server side key file, absolute path + + + # grpc client-side settings, support for setting up multiple grpc clients. + grpcClient: + - name: "your_grpc_service_name" # grpc service name, used for service discovery + host: "127.0.0.1" # grpc service address, used for direct connection + port: 8282 # grpc service port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, valid only for unary grpc type + registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port + enableLoadBalance: true # whether to turn on the load balancer + # clientSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'serverName' and 'certFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + clientSecure: + type: "" # secures type, "", "one-way", "two-way" + serverName: "" # server name, e.g. *.foo.com + caFile: "" # client side ca file, valid only in "two-way", absolute path + certFile: "" # client side cert file, absolute path, if secureType="one-way", fill in server side cert file here + keyFile: "" # client side key file, valid only in "two-way", absolute path + clientToken: + enable: false # whether to enable token authentication + appID: "" # app id + appKey: "" # app key + + + + # logger settings + logger: + level: "info" # output log levels debug, info, warn, error, default is debug + format: "console" # output format, console or json, default is console + isSave: false # false:output to terminal, true:output to file, default is false + #logFileConfig: # Effective when isSave=true + #filename: "out.log" # File name (default is out.log) + #maxSize: 20 # Maximum file size (MB, default is 10MB) + #maxBackups: 50 # Maximum number of old files to retain (default is 100) + #maxAge: 15 # Maximum number of days to retain old files (default is 30 days) + #isCompression: true # Whether to compress/archive old files (default is false) + + + # set database configuration. reference-db-config-url + database: + driver: "mysql" # database driver + # mysql settings + mysql: + # dsn format, :@(:)/?[k=v& ......] + dsn: "root:123456@(192.168.3.37:3306)/account?parseTime=true&loc=Local&charset=utf8,utf8mb4" + enableLog: true # whether to turn on printing of all logs + maxIdleConns: 10 # set the maximum number of connections in the idle connection pool + maxOpenConns: 100 # set the maximum number of open database connections + connMaxLifetime: 30 # sets the maximum time for which the connection can be reused, in minutes + + + + # redis settings + redis: + # dsn format, [user]:@127.0.0.1:6379/[db], the default user is default, redis version 6.0 and above only supports user. + dsn: "default:123456@192.168.3.37:6379/0" + dialTimeout: 10 # connection timeout, unit(second) + readTimeout: 2 # read timeout, unit(second) + writeTimeout: 2 # write timeout, unit(second) + + + # jaeger settings + jaeger: + agentHost: "192.168.3.37" + agentPort: 6831 + + + # consul settings + consul: + addr: "192.168.3.37:8500" + + + # etcd settings + etcd: + addrs: ["192.168.3.37:2379"] + + + # nacos settings, used in service registration discovery + nacosRd: + ipAddr: "192.168.3.37" + port: 8848 + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id diff --git a/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-deployment.yml b/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-deployment.yml new file mode 100644 index 0000000..9f361b1 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-deployment.yml @@ -0,0 +1,63 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: inventory-dm + namespace: eshop +spec: + replicas: 1 + selector: + matchLabels: + app: inventory + template: + metadata: + name: inventory-pod + labels: + app: inventory + spec: + containers: + - name: inventory + image: /eshop/inventory:latest + # If using a local image, use Never, default is Always + #imagePullPolicy: Never + command: ["./inventory", "-c", "/app/configs/inventory.yml"] + resources: + requests: + cpu: 10m + memory: 10Mi + limits: + cpu: 1000m + memory: 1000Mi + volumeMounts: + - name: inventory-vl + mountPath: /app/configs/ + readOnly: true + + ports: + - name: grpc-port + containerPort: 8282 + - name: metrics-port + containerPort: 8283 + readinessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:8282"] + initialDelaySeconds: 10 + timeoutSeconds: 2 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + livenessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:8282"] + + initialDelaySeconds: 10 + timeoutSeconds: 2 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + # todo for private repositories, you need to create a secret (here docker-auth-secret) to store the account and password to log into docker + imagePullSecrets: + - name: docker-auth-secret + volumes: + - name: inventory-vl + configMap: + name: inventory-config diff --git a/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-svc.yml b/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-svc.yml new file mode 100644 index 0000000..72f7e2f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/deployments/kubernetes/inventory-svc.yml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: Service +metadata: + name: inventory-svc + namespace: eshop +spec: + selector: + app: inventory + type: ClusterIP + ports: + - name: inventory-svc-grpc-port + port: 8282 + targetPort: 8282 + - name: inventory-svc-grpc-metrics-port + port: 8283 + targetPort: 8283 + diff --git a/6_micro-cluster/example-2-mono-repo/inventory/docs/gen.info b/6_micro-cluster/example-2-mono-repo/inventory/docs/gen.info new file mode 100644 index 0000000..c1a77b1 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/docs/gen.info @@ -0,0 +1 @@ +eshop,inventory,true \ No newline at end of file diff --git a/6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory.go new file mode 100644 index 0000000..2991648 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory.go @@ -0,0 +1,173 @@ +// code generated by https://eshop + +package config + +import ( + "github.com/zhufuyi/sponge/pkg/conf" +) + +var config *Config + +func Init(configFile string, fs ...func()) error { + config = &Config{} + return conf.Parse(configFile, config, fs...) +} + +func Show(hiddenFields ...string) string { + return conf.Show(config, hiddenFields...) +} + +func Get() *Config { + if config == nil { + panic("config is nil, please call config.Init() first") + } + return config +} + +func Set(conf *Config) { + config = conf +} + +type Config struct { + App App `yaml:"app" json:"app"` + Consul Consul `yaml:"consul" json:"consul"` + Database Database `yaml:"database" json:"database"` + Etcd Etcd `yaml:"etcd" json:"etcd"` + Grpc Grpc `yaml:"grpc" json:"grpc"` + GrpcClient []GrpcClient `yaml:"grpcClient" json:"grpcClient"` + HTTP HTTP `yaml:"http" json:"http"` + Jaeger Jaeger `yaml:"jaeger" json:"jaeger"` + Logger Logger `yaml:"logger" json:"logger"` + NacosRd NacosRd `yaml:"nacosRd" json:"nacosRd"` + Redis Redis `yaml:"redis" json:"redis"` +} + +type Consul struct { + Addr string `yaml:"addr" json:"addr"` +} + +type Etcd struct { + Addrs []string `yaml:"addrs" json:"addrs"` +} + +type Jaeger struct { + AgentHost string `yaml:"agentHost" json:"agentHost"` + AgentPort int `yaml:"agentPort" json:"agentPort"` +} + +type ClientToken struct { + AppID string `yaml:"appID" json:"appID"` + AppKey string `yaml:"appKey" json:"appKey"` + Enable bool `yaml:"enable" json:"enable"` +} + +type ClientSecure struct { + CaFile string `yaml:"caFile" json:"caFile"` + CertFile string `yaml:"certFile" json:"certFile"` + KeyFile string `yaml:"keyFile" json:"keyFile"` + ServerName string `yaml:"serverName" json:"serverName"` + Type string `yaml:"type" json:"type"` +} + +type ServerSecure struct { + CaFile string `yaml:"caFile" json:"caFile"` + CertFile string `yaml:"certFile" json:"certFile"` + KeyFile string `yaml:"keyFile" json:"keyFile"` + Type string `yaml:"type" json:"type"` +} + +type App struct { + CacheType string `yaml:"cacheType" json:"cacheType"` + EnableCircuitBreaker bool `yaml:"enableCircuitBreaker" json:"enableCircuitBreaker"` + EnableHTTPProfile bool `yaml:"enableHTTPProfile" json:"enableHTTPProfile"` + EnableLimit bool `yaml:"enableLimit" json:"enableLimit"` + EnableMetrics bool `yaml:"enableMetrics" json:"enableMetrics"` + EnableStat bool `yaml:"enableStat" json:"enableStat"` + EnableTrace bool `yaml:"enableTrace" json:"enableTrace"` + Env string `yaml:"env" json:"env"` + Host string `yaml:"host" json:"host"` + Name string `yaml:"name" json:"name"` + RegistryDiscoveryType string `yaml:"registryDiscoveryType" json:"registryDiscoveryType"` + TracingSamplingRate float64 `yaml:"tracingSamplingRate" json:"tracingSamplingRate"` + Version string `yaml:"version" json:"version"` +} + +type GrpcClient struct { + ClientSecure ClientSecure `yaml:"clientSecure" json:"clientSecure"` + ClientToken ClientToken `yaml:"clientToken" json:"clientToken"` + EnableLoadBalance bool `yaml:"enableLoadBalance" json:"enableLoadBalance"` + Host string `yaml:"host" json:"host"` + Name string `yaml:"name" json:"name"` + Port int `yaml:"port" json:"port"` + RegistryDiscoveryType string `yaml:"registryDiscoveryType" json:"registryDiscoveryType"` + Timeout int `yaml:"timeout" json:"timeout"` +} + +type Sqlite struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + DBFile string `yaml:"dbFile" json:"dbFile"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` +} + +type Mysql struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + Dsn string `yaml:"dsn" json:"dsn"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MastersDsn []string `yaml:"mastersDsn" json:"mastersDsn"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` + SlavesDsn []string `yaml:"slavesDsn" json:"slavesDsn"` +} + +type Postgresql struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + Dsn string `yaml:"dsn" json:"dsn"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` +} + +type Redis struct { + DialTimeout int `yaml:"dialTimeout" json:"dialTimeout"` + Dsn string `yaml:"dsn" json:"dsn"` + ReadTimeout int `yaml:"readTimeout" json:"readTimeout"` + WriteTimeout int `yaml:"writeTimeout" json:"writeTimeout"` +} + +type Database struct { + Driver string `yaml:"driver" json:"driver"` + Mongodb Mongodb `yaml:"mongodb" json:"mongodb"` + Mysql Mysql `yaml:"mysql" json:"mysql"` + Postgresql Mysql `yaml:"postgresql" json:"postgresql"` + Sqlite Sqlite `yaml:"sqlite" json:"sqlite"` +} + +type Mongodb struct { + Dsn string `yaml:"dsn" json:"dsn"` +} + +type Grpc struct { + EnableToken bool `yaml:"enableToken" json:"enableToken"` + HTTPPort int `yaml:"httpPort" json:"httpPort"` + Port int `yaml:"port" json:"port"` + ServerSecure ServerSecure `yaml:"serverSecure" json:"serverSecure"` +} + +type Logger struct { + Format string `yaml:"format" json:"format"` + IsSave bool `yaml:"isSave" json:"isSave"` + Level string `yaml:"level" json:"level"` +} + +type NacosRd struct { + IPAddr string `yaml:"ipAddr" json:"ipAddr"` + NamespaceID string `yaml:"namespaceID" json:"namespaceID"` + Port int `yaml:"port" json:"port"` +} + +type HTTP struct { + Port int `yaml:"port" json:"port"` + Timeout int `yaml:"timeout" json:"timeout"` +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory_cc.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory_cc.go new file mode 100644 index 0000000..326e1bc --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory_cc.go @@ -0,0 +1,28 @@ +// code generated by https://eshop + +package config + +import ( + "github.com/zhufuyi/sponge/pkg/conf" +) + +func NewCenter(configFile string) (*Center, error) { + nacosConf := &Center{} + err := conf.Parse(configFile, nacosConf) + return nacosConf, err +} + +type Center struct { + Nacos Nacos `yaml:"nacos" json:"nacos"` +} + +type Nacos struct { + ContextPath string `yaml:"contextPath" json:"contextPath"` + DataID string `yaml:"dataID" json:"dataID"` + Format string `yaml:"format" json:"format"` + Group string `yaml:"group" json:"group"` + IPAddr string `yaml:"ipAddr" json:"ipAddr"` + NamespaceID string `yaml:"namespaceID" json:"namespaceID"` + Port int `yaml:"port" json:"port"` + Scheme string `yaml:"scheme" json:"scheme"` +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory_test.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory_test.go new file mode 100644 index 0000000..025e5ba --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/internal/config/inventory_test.go @@ -0,0 +1,45 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/zhufuyi/sponge/pkg/gofile" + + "eshop/inventory/configs" +) + +func TestInit(t *testing.T) { + configFile := configs.Path("inventory.yml") + err := Init(configFile) + if gofile.IsExists(configFile) { + assert.NoError(t, err) + } else { + assert.Error(t, err) + } + + c := Get() + assert.NotNil(t, c) + + str := Show() + assert.NotEmpty(t, str) + t.Log(str) + + // set nil + Set(nil) + defer func() { + recover() + }() + Get() +} + +func TestInitNacos(t *testing.T) { + configFile := configs.Path("inventory_cc.yml") + _, err := NewCenter(configFile) + if gofile.IsExists(configFile) { + assert.NoError(t, err) + } else { + assert.Error(t, err) + } +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/internal/ecode/inventory_rpc.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/ecode/inventory_rpc.go new file mode 100644 index 0000000..cb3c802 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/internal/ecode/inventory_rpc.go @@ -0,0 +1,19 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// inventory business-level rpc error codes. +// the _inventoryNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + _inventoryNO = 51 + _inventoryName = "inventory" + _inventoryBaseCode = errcode.RCode(_inventoryNO) + + StatusGetByIDInventory = errcode.NewRPCStatus(_inventoryBaseCode+1, "failed to GetByID "+_inventoryName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/6_micro-cluster/example-2-mono-repo/inventory/internal/ecode/systemCode_rpc.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/ecode/systemCode_rpc.go new file mode 100644 index 0000000..8a88afd --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/internal/ecode/systemCode_rpc.go @@ -0,0 +1,46 @@ +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// rpc system level error code, with status prefix, error code range 30000~40000 +var ( + StatusSuccess = errcode.StatusSuccess + + StatusCanceled = errcode.StatusCanceled + StatusUnknown = errcode.StatusUnknown + StatusInvalidParams = errcode.StatusInvalidParams + StatusDeadlineExceeded = errcode.StatusDeadlineExceeded + StatusNotFound = errcode.StatusNotFound + StatusAlreadyExists = errcode.StatusAlreadyExists + StatusPermissionDenied = errcode.StatusPermissionDenied + StatusResourceExhausted = errcode.StatusResourceExhausted + StatusFailedPrecondition = errcode.StatusFailedPrecondition + StatusAborted = errcode.StatusAborted + StatusOutOfRange = errcode.StatusOutOfRange + StatusUnimplemented = errcode.StatusUnimplemented + StatusInternalServerError = errcode.StatusInternalServerError + StatusServiceUnavailable = errcode.StatusServiceUnavailable + StatusDataLoss = errcode.StatusDataLoss + StatusUnauthorized = errcode.StatusUnauthorized + + StatusTimeout = errcode.StatusTimeout + StatusTooManyRequests = errcode.StatusTooManyRequests + StatusForbidden = errcode.StatusForbidden + StatusLimitExceed = errcode.StatusLimitExceed + StatusMethodNotAllowed = errcode.StatusMethodNotAllowed + StatusAccessDenied = errcode.StatusAccessDenied + StatusConflict = errcode.StatusConflict +) + +// Any kev-value +func Any(key string, val interface{}) errcode.Detail { + return errcode.Any(key, val) +} + +// StatusSkipResponse is only use for grpc-gateway +var StatusSkipResponse = errcode.SkipResponse + +// GetStatusCode get status code from error returned by RPC invoke +var GetStatusCode = errcode.GetStatusCode diff --git a/6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc.go new file mode 100644 index 0000000..d4cf140 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc.go @@ -0,0 +1,334 @@ +// Package server is a package that holds the http or grpc service. +package server + +import ( + "context" + "fmt" + "net" + "net/http" + "time" + + grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/grpc/gtls" + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/grpc/metrics" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/prof" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + + "eshop/inventory/internal/config" + "eshop/inventory/internal/ecode" + "eshop/inventory/internal/service" +) + +var _ app.IServer = (*grpcServer)(nil) + +var ( + defaultTokenAppID = "grpc" + defaultTokenAppKey = "mko09ijn" +) + +type grpcServer struct { + addr string + server *grpc.Server + listen net.Listener + + mux *http.ServeMux + httpServer *http.Server + registerMetricsMuxAndMethodFunc func() error + + iRegistry registry.Registry + instance *registry.ServiceInstance +} + +// Start grpc service +func (s *grpcServer) Start() error { + // registration Services + if s.iRegistry != nil { + ctx, _ := context.WithTimeout(context.Background(), 5*time.Second) //nolint + if err := s.iRegistry.Register(ctx, s.instance); err != nil { + return err + } + } + + if s.registerMetricsMuxAndMethodFunc != nil { + if err := s.registerMetricsMuxAndMethodFunc(); err != nil { + return err + } + } + + // if either pprof or metrics is enabled, the http service will be started + if s.mux != nil { + addr := fmt.Sprintf(":%d", config.Get().Grpc.HTTPPort) + s.httpServer = &http.Server{ + Addr: addr, + Handler: s.mux, + } + go func() { + fmt.Printf("http address of pprof and metrics %s\n", addr) + if err := s.httpServer.ListenAndServe(); err != nil && err != http.ErrServerClosed { + panic("listen and serve error: " + err.Error()) + } + }() + } + + if err := s.server.Serve(s.listen); err != nil { // block + return err + } + + return nil +} + +// Stop grpc service +func (s *grpcServer) Stop() error { + if s.iRegistry != nil { + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) + go func() { + _ = s.iRegistry.Deregister(ctx, s.instance) + cancel() + }() + <-ctx.Done() + } + + s.server.GracefulStop() + + if s.httpServer != nil { + ctx, _ := context.WithTimeout(context.Background(), 3*time.Second) //nolint + if err := s.httpServer.Shutdown(ctx); err != nil { + return err + } + } + + return nil +} + +// String comment +func (s *grpcServer) String() string { + return "grpc service address " + s.addr +} + +// secure option +func (s *grpcServer) secureServerOption() grpc.ServerOption { + switch config.Get().Grpc.ServerSecure.Type { + case "one-way": // server side certification + credentials, err := gtls.GetServerTLSCredentials( + config.Get().Grpc.ServerSecure.CertFile, + config.Get().Grpc.ServerSecure.KeyFile, + ) + if err != nil { + panic(err) + } + logger.Info("grpc security type: sever-side certification") + return grpc.Creds(credentials) + + case "two-way": // both client and server side certification + credentials, err := gtls.GetServerTLSCredentialsByCA( + config.Get().Grpc.ServerSecure.CaFile, + config.Get().Grpc.ServerSecure.CertFile, + config.Get().Grpc.ServerSecure.KeyFile, + ) + if err != nil { + panic(err) + } + logger.Info("grpc security type: both client-side and server-side certification") + return grpc.Creds(credentials) + } + + logger.Info("grpc security type: insecure") + return nil +} + +// setting up unary server interceptors +func (s *grpcServer) unaryServerOptions() grpc.ServerOption { + unaryServerInterceptors := []grpc.UnaryServerInterceptor{ + interceptor.UnaryServerRecovery(), + interceptor.UnaryServerRequestID(), + } + + // logger interceptor, to print simple messages, replace interceptor.UnaryServerLog with interceptor.UnaryServerSimpleLog + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerLog( + logger.Get(), + interceptor.WithReplaceGRPCLogger(), + )) + + // token interceptor + if config.Get().Grpc.EnableToken { + checkToken := func(appID string, appKey string) error { + // todo the defaultTokenAppID and defaultTokenAppKey are usually retrieved from the cache or database + if appID != defaultTokenAppID || appKey != defaultTokenAppKey { + return status.Errorf(codes.Unauthenticated, "app id or app key checksum failure") + } + return nil + } + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerToken(checkToken)) + } + + // jwt token interceptor + //unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerJwtAuth( + // // choose a verification method as needed + //interceptor.WithStandardVerify(standardVerifyFn), // standard verify (default), you can set standardVerifyFn to nil if you don't need it + //interceptor.WithCustomVerify(customVerifyFn), // custom verify + // // specify the grpc API to ignore token verification(full path) + //interceptor.WithAuthIgnoreMethods("/api.user.v1.User/Register", "/api.user.v1.User/Login"), + //)) + + // metrics interceptor + if config.Get().App.EnableMetrics { + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerMetrics()) + s.registerMetricsMuxAndMethodFunc = s.registerMetricsMuxAndMethod() + } + + // limit interceptor + if config.Get().App.EnableLimit { + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerRateLimit()) + } + + // circuit breaker interceptor + if config.Get().App.EnableCircuitBreaker { + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerCircuitBreaker( + // set rpc code for circuit breaker, default already includes codes.Internal and codes.Unavailable + interceptor.WithValidCode(ecode.StatusInternalServerError.Code()), + interceptor.WithValidCode(ecode.StatusServiceUnavailable.Code()), + )) + } + + // trace interceptor + if config.Get().App.EnableTrace { + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerTracing()) + } + + return grpc_middleware.WithUnaryServerChain(unaryServerInterceptors...) +} + +// setting up stream server interceptors +func (s *grpcServer) streamServerOptions() grpc.ServerOption { + streamServerInterceptors := []grpc.StreamServerInterceptor{ + interceptor.StreamServerRecovery(), + //interceptor.StreamServerRequestID(), + } + + // logger interceptor, to print simple messages, replace interceptor.StreamServerLog with interceptor.StreamServerSimpleLog + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerLog( + logger.Get(), + interceptor.WithReplaceGRPCLogger(), + )) + + // token interceptor + if config.Get().Grpc.EnableToken { + checkToken := func(appID string, appKey string) error { + // todo the defaultTokenAppID and defaultTokenAppKey are usually retrieved from the cache or database + if appID != defaultTokenAppID || appKey != defaultTokenAppKey { + return status.Errorf(codes.Unauthenticated, "app id or app key checksum failure") + } + return nil + } + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerToken(checkToken)) + } + + // jwt token interceptor + //streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerJwtAuth( + // // choose a verification method as needed + //interceptor.WithStandardVerify(standardVerifyFn), // standard verify (default), you can set standardVerifyFn to nil if you don't need it + //interceptor.WithCustomVerify(customVerifyFn), // custom verify + // // specify the grpc API to ignore token verification(full path) + // interceptor.WithAuthIgnoreMethods("/api.user.v1.User/Register", "/api.user.v1.User/Login"), + //)) + + // metrics interceptor + if config.Get().App.EnableMetrics { + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerMetrics()) + } + + // limit interceptor + if config.Get().App.EnableLimit { + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerRateLimit()) + } + + // circuit breaker interceptor + if config.Get().App.EnableCircuitBreaker { + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerCircuitBreaker( + // set rpc code for circuit breaker, default already includes codes.Internal and codes.Unavailable + interceptor.WithValidCode(ecode.StatusInternalServerError.Code()), + interceptor.WithValidCode(ecode.StatusServiceUnavailable.Code()), + )) + } + + // trace interceptor + if config.Get().App.EnableTrace { + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerTracing()) + } + + return grpc_middleware.WithStreamServerChain(streamServerInterceptors...) +} + +func (s *grpcServer) getOptions() []grpc.ServerOption { + var options []grpc.ServerOption + + secureOption := s.secureServerOption() + if secureOption != nil { + options = append(options, secureOption) + } + + options = append(options, s.unaryServerOptions()) + options = append(options, s.streamServerOptions()) + + return options +} + +func (s *grpcServer) registerMetricsMuxAndMethod() func() error { + return func() error { + if s.mux == nil { + s.mux = http.NewServeMux() + } + metrics.Register(s.mux, s.server) + return nil + } +} + +func (s *grpcServer) registerProfMux() { + if s.mux == nil { + s.mux = http.NewServeMux() + } + prof.Register(s.mux, prof.WithIOWaitTime()) +} + +func (s *grpcServer) addHTTPRouter() { + if s.mux == nil { + s.mux = http.NewServeMux() + } + s.mux.HandleFunc("/codes", errcode.ListGRPCErrCodes) // error codes router + + cfgStr := config.Show() + s.mux.HandleFunc("/config", errcode.ShowConfig([]byte(cfgStr))) // config router +} + +// NewGRPCServer creates a new grpc server +func NewGRPCServer(addr string, opts ...GrpcOption) app.IServer { + var err error + o := defaultGrpcOptions() + o.apply(opts...) + s := &grpcServer{ + addr: addr, + iRegistry: o.iRegistry, + instance: o.instance, + } + s.addHTTPRouter() + if config.Get().App.EnableHTTPProfile { + s.registerProfMux() + } + + s.listen, err = net.Listen("tcp", addr) + if err != nil { + panic(err) + } + + s.server = grpc.NewServer(s.getOptions()...) + service.RegisterAllService(s.server) // register for all services + return s +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc_option.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc_option.go new file mode 100644 index 0000000..02c37d7 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc_option.go @@ -0,0 +1,34 @@ +package server + +import ( + "github.com/zhufuyi/sponge/pkg/servicerd/registry" +) + +// GrpcOption grpc settings +type GrpcOption func(*grpcOptions) + +type grpcOptions struct { + instance *registry.ServiceInstance + iRegistry registry.Registry +} + +func defaultGrpcOptions() *grpcOptions { + return &grpcOptions{ + instance: nil, + iRegistry: nil, + } +} + +func (o *grpcOptions) apply(opts ...GrpcOption) { + for _, opt := range opts { + opt(o) + } +} + +// WithGrpcRegistry registration services +func WithGrpcRegistry(iRegistry registry.Registry, instance *registry.ServiceInstance) GrpcOption { + return func(o *grpcOptions) { + o.iRegistry = iRegistry + o.instance = instance + } +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc_test.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc_test.go new file mode 100644 index 0000000..f09ff3c --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/internal/server/grpc_test.go @@ -0,0 +1,130 @@ +package server + +import ( + "context" + "fmt" + "net" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/grpc/gtls/certfile" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + "github.com/zhufuyi/sponge/pkg/utils" + + "eshop/inventory/configs" + "eshop/inventory/internal/config" +) + +func TestGRPCServer(t *testing.T) { + err := config.Init(configs.Path("inventory.yml")) + if err != nil { + t.Fatal(err) + } + + config.Get().App.EnableMetrics = true + config.Get().App.EnableTrace = true + config.Get().App.EnableHTTPProfile = true + config.Get().App.EnableLimit = true + config.Get().App.EnableCircuitBreaker = true + config.Get().Grpc.EnableToken = true + + port, _ := utils.GetAvailablePort() + addr := fmt.Sprintf(":%d", port) + instance := registry.NewServiceInstance("foo", "bar", []string{"grpc://127.0.0.1:8282"}) + + utils.SafeRunWithTimeout(time.Second*2, func(cancel context.CancelFunc) { + server := NewGRPCServer(addr, + WithGrpcRegistry(nil, instance), + ) + assert.NotNil(t, server) + cancel() + }) +} + +func TestGRPCServerMock(t *testing.T) { + err := config.Init(configs.Path("inventory.yml")) + if err != nil { + t.Fatal(err) + } + config.Get().App.EnableMetrics = true + config.Get().App.EnableTrace = true + config.Get().App.EnableHTTPProfile = true + config.Get().App.EnableLimit = true + config.Get().App.EnableCircuitBreaker = true + config.Get().Grpc.EnableToken = true + + port, _ := utils.GetAvailablePort() + addr := fmt.Sprintf(":%d", port) + instance := registry.NewServiceInstance("foo", "bar", []string{"grpc://127.0.0.1:8282"}) + + o := defaultGrpcOptions() + o.apply(WithGrpcRegistry(&gRegistry{}, instance)) + + s := &grpcServer{ + addr: addr, + iRegistry: o.iRegistry, + instance: o.instance, + } + + s.listen, err = net.Listen("tcp", addr) + if err != nil { + t.Fatal(err) + } + s.server = grpc.NewServer(s.unaryServerOptions(), s.streamServerOptions()) + + go func() { + time.Sleep(time.Second * 3) + s.server.Stop() + }() + + str := s.String() + assert.NotEmpty(t, str) + err = s.Start() + assert.NoError(t, err) + err = s.Stop() + assert.NoError(t, err) +} + +type gRegistry struct{} + +func (g gRegistry) Register(ctx context.Context, service *registry.ServiceInstance) error { + return nil +} + +func (g gRegistry) Deregister(ctx context.Context, service *registry.ServiceInstance) error { + return nil +} + +func Test_grpcServer_getOptions(t *testing.T) { + err := config.Init(configs.Path("inventory.yml")) + if err != nil { + t.Fatal(err) + } + s := &grpcServer{} + + defer func() { + recover() + }() + + config.Get().Grpc.ServerSecure.Type = "" + opt := s.secureServerOption() + assert.Equal(t, nil, opt) + + config.Get().Grpc.ServerSecure.Type = "one-way" + config.Get().Grpc.ServerSecure.CertFile = certfile.Path("one-way/server.crt") + config.Get().Grpc.ServerSecure.KeyFile = certfile.Path("one-way/server.key") + opt = s.secureServerOption() + assert.NotNil(t, opt) + + config.Get().Grpc.ServerSecure.Type = "two-way" + config.Get().Grpc.ServerSecure.CaFile = certfile.Path("two-way/ca.pem") + config.Get().Grpc.ServerSecure.CertFile = certfile.Path("two-way/server/server.pem") + config.Get().Grpc.ServerSecure.KeyFile = certfile.Path("two-way/server/server.key") + opt = s.secureServerOption() + assert.NotNil(t, opt) + + fmt.Println(certfile.Path("one-way/server.crt"), certfile.Path("one-way/server.key")) +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/internal/service/inventory.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/service/inventory.go new file mode 100644 index 0000000..ad56f80 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/internal/service/inventory.go @@ -0,0 +1,60 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package service + +import ( + "context" + + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/logger" + + inventoryV1 "eshop/api/inventory/v1" + "eshop/inventory/internal/ecode" +) + +func init() { + registerFns = append(registerFns, func(server *grpc.Server) { + inventoryV1.RegisterInventoryServer(server, NewInventoryServer()) + }) +} + +var _ inventoryV1.InventoryServer = (*inventory)(nil) + +type inventory struct { + inventoryV1.UnimplementedInventoryServer + + // example: + // iDao dao.InventoryDao +} + +// NewInventoryServer create a server +func NewInventoryServer() inventoryV1.InventoryServer { + return &inventory{ + // example: + // iDao: dao.NewInventoryDao( + // model.GetDB(), + // cache.NewInventoryCache(model.GetCacheType()), + // ), + } +} + +// GetByID get inventory by id +func (s *inventory) GetByID(ctx context.Context, req *inventoryV1.GetByIDRequest) (*inventoryV1.GetByIDReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + + // fill in the business logic code here + + return &inventoryV1.GetByIDReply{ + InventoryDetail: &inventoryV1.InventoryDetail{ + Id: 1, + Num: 999, + SoldNum: 111, + }, + }, nil +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/internal/service/inventory_client_test.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/service/inventory_client_test.go new file mode 100644 index 0000000..5e88a54 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/internal/service/inventory_client_test.go @@ -0,0 +1,112 @@ +// Code generated by https://github.com/zhufuyi/sponge +// Test_service_inventory_methods is used to test the inventory api +// Test_service_inventory_benchmark is used to performance test the inventory api + +package service + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/zhufuyi/sponge/pkg/grpc/benchmark" + + inventoryV1 "eshop/api/inventory/v1" + "eshop/inventory/configs" + "eshop/inventory/internal/config" +) + +// Test service inventory api via grpc client +func Test_service_inventory_methods(t *testing.T) { + conn := getRPCClientConnForTest() + cli := inventoryV1.NewInventoryClient(conn) + ctx, _ := context.WithTimeout(context.Background(), time.Second*30) + + tests := []struct { + name string + fn func() (interface{}, error) + wantErr bool + }{ + + { + name: "GetByID", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &inventoryV1.GetByIDRequest{ + Id: 0, + } + + return cli.GetByID(ctx, req) + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + data, _ := json.MarshalIndent(got, "", " ") + fmt.Println(string(data)) + }) + } +} + +// performance test service inventory api, copy the report to +// the browser to view when the pressure test is finished. +func Test_service_inventory_benchmark(t *testing.T) { + err := config.Init(configs.Path("inventory.yml")) + if err != nil { + panic(err) + } + + grpcClientCfg := getGRPCClientCfg() + host := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + protoFile := configs.Path("../api/inventory/v1/inventory.proto") + // If third-party dependencies are missing during the press test, + // copy them to the project's third_party directory. + dependentProtoFilePath := []string{ + configs.Path("../third_party"), // third_party directory + configs.Path(".."), // Previous level of third_party + } + + tests := []struct { + name string + fn func() error + wantErr bool + }{ + + { + name: "GetByID", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &inventoryV1.GetByIDRequest{ + Id: 0, + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "GetByID", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + }) + } +} diff --git a/b_sponge-dtm-msg/internal/service/service.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/service/service.go similarity index 100% rename from b_sponge-dtm-msg/internal/service/service.go rename to 6_micro-cluster/example-2-mono-repo/inventory/internal/service/service.go diff --git a/6_micro-cluster/example-2-mono-repo/inventory/internal/service/service_test.go b/6_micro-cluster/example-2-mono-repo/inventory/internal/service/service_test.go new file mode 100644 index 0000000..af1c304 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/internal/service/service_test.go @@ -0,0 +1,173 @@ +package service + +import ( + "context" + "io" + "strconv" + "testing" + "time" + + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/consulcli" + "github.com/zhufuyi/sponge/pkg/etcdcli" + "github.com/zhufuyi/sponge/pkg/grpc/grpccli" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/nacoscli" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/consul" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" + "github.com/zhufuyi/sponge/pkg/utils" + + "eshop/inventory/configs" + "eshop/inventory/internal/config" +) + +var ioEOF = io.EOF + +func TestRegisterAllService(t *testing.T) { + utils.SafeRunWithTimeout(time.Second*2, func(cancel context.CancelFunc) { + server := grpc.NewServer() + RegisterAllService(server) + cancel() + }) +} + +// The default is to connect to the local grpc server, if you want to connect to a remote grpc server, +// pass in the parameter grpcClient. +func getRPCClientConnForTest(grpcClient ...config.GrpcClient) *grpc.ClientConn { + err := config.Init(configs.Path("inventory.yml")) + if err != nil { + panic(err) + } + grpcClientCfg := getGRPCClientCfg(grpcClient...) + + var cliOptions []grpccli.Option + + if grpcClientCfg.Timeout > 0 { + cliOptions = append(cliOptions, grpccli.WithTimeout(time.Second*time.Duration(grpcClientCfg.Timeout))) + } + + // load balance + if grpcClientCfg.EnableLoadBalance { + cliOptions = append(cliOptions, grpccli.WithEnableLoadBalance()) + } + + // secure + cliOptions = append(cliOptions, grpccli.WithSecure( + grpcClientCfg.ClientSecure.Type, + grpcClientCfg.ClientSecure.ServerName, + grpcClientCfg.ClientSecure.CaFile, + grpcClientCfg.ClientSecure.CertFile, + grpcClientCfg.ClientSecure.KeyFile, + )) + + // token + cliOptions = append(cliOptions, grpccli.WithToken( + grpcClientCfg.ClientToken.Enable, + grpcClientCfg.ClientToken.AppID, + grpcClientCfg.ClientToken.AppKey, + )) + + cliOptions = append(cliOptions, + grpccli.WithEnableRequestID(), + grpccli.WithEnableLog(logger.Get()), + ) + + var ( + endpoint string + isUseDiscover bool + iDiscovery registry.Discovery + ) + + switch grpcClientCfg.RegistryDiscoveryType { + case "consul": + endpoint = "discovery:///" + grpcClientCfg.Name // Connecting to grpc services by service name + cli, err := consulcli.Init(config.Get().Consul.Addr, consulcli.WithWaitTime(time.Second*2)) + if err != nil { + panic(err) + } + iDiscovery = consul.New(cli) + isUseDiscover = true + + case "etcd": + endpoint = "discovery:///" + grpcClientCfg.Name // Connecting to grpc services by service name + cli, err := etcdcli.Init(config.Get().Etcd.Addrs, etcdcli.WithDialTimeout(time.Second*2)) + if err != nil { + panic(err) + } + iDiscovery = etcd.New(cli) + isUseDiscover = true + case "nacos": + // example: endpoint = "discovery:///serverName.scheme" + endpoint = "discovery:///" + grpcClientCfg.Name + ".grpc" + cli, err := nacoscli.NewNamingClient( + config.Get().NacosRd.IPAddr, + config.Get().NacosRd.Port, + config.Get().NacosRd.NamespaceID) + if err != nil { + panic(err) + } + iDiscovery = nacos.New(cli) + isUseDiscover = true + + default: + endpoint = grpcClientCfg.Host + ":" + strconv.Itoa(grpcClientCfg.Port) + iDiscovery = nil + isUseDiscover = false + } + + if iDiscovery != nil { + cliOptions = append(cliOptions, grpccli.WithDiscovery(iDiscovery)) + } + + msg := "dialing grpc server" + if isUseDiscover { + msg += " with discovery from " + grpcClientCfg.RegistryDiscoveryType + } + logger.Info(msg, logger.String("name", grpcClientCfg.Name), logger.String("endpoint", endpoint)) + + conn, err := grpccli.Dial(context.Background(), endpoint, cliOptions...) + if err != nil { + panic(err) + } + + return conn +} + +func getGRPCClientCfg(grpcClient ...config.GrpcClient) config.GrpcClient { + var grpcClientCfg config.GrpcClient + + // custom config + if len(grpcClient) > 0 { + // parameter config, highest priority + grpcClientCfg = grpcClient[0] + } else { + // grpcClient config in the yaml file, second priority + if len(config.Get().GrpcClient) > 0 { + for _, v := range config.Get().GrpcClient { + if v.Name == config.Get().App.Name { // match the current app name + grpcClientCfg = v + break + } + } + } + } + + // if there is no custom configuration, use the default configuration + if grpcClientCfg.Name == "" { + grpcClientCfg = config.GrpcClient{ + Host: config.Get().App.Host, + Port: config.Get().Grpc.Port, + // If RegistryDiscoveryType is not empty, service discovery is used, and Host and Port values are invalid + RegistryDiscoveryType: config.Get().App.RegistryDiscoveryType, // supports consul, etcd and nacos + Name: config.Get().App.Name, + } + if grpcClientCfg.RegistryDiscoveryType != "" { + grpcClientCfg.EnableLoadBalance = true + } + } + + return grpcClientCfg +} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/binary-package.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/binary-package.sh new file mode 100644 index 0000000..0e81397 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/binary-package.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +serviceName="inventory" + +mkdir -p ${serviceName}-binary/configs + +cp -f deployments/binary/run.sh ${serviceName}-binary +chmod +x ${serviceName}-binary/run.sh + +cp -f deployments/binary/deploy.sh ${serviceName}-binary +chmod +x ${serviceName}-binary/deploy.sh + +cp -f cmd/${serviceName}/${serviceName} ${serviceName}-binary +cp -f configs/${serviceName}.yml ${serviceName}-binary/configs +cp -f configs/${serviceName}_cc.yml ${serviceName}-binary/configs + +# compressing binary file +#upx -9 ${serviceName} + +tar zcvf ${serviceName}-binary.tar.gz ${serviceName}-binary +rm -rf ${serviceName}-binary + +echo "" +echo "package binary successfully, output file = ${serviceName}-binary.tar.gz" diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile b/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile new file mode 100644 index 0000000..17637f5 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile @@ -0,0 +1,26 @@ +FROM alpine:latest +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# set the time zone to Shanghai +RUN apk add tzdata \ + && cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && echo "Asia/Shanghai" > /etc/timezone \ + && apk del tzdata + +# add grpc_health_probe for health check of grpc services +COPY grpc_health_probe /bin/grpc_health_probe +RUN chmod +x /bin/grpc_health_probe + +COPY configs/ /app/configs/ +COPY inventory /app/inventory +RUN chmod +x /app/inventory + +# grpc and http port +EXPOSE 8282 8283 + + +WORKDIR /app + +CMD ["./inventory", "-c", "configs/inventory.yml"] +# if you use the Configuration Center, inventory.yml is changed to the Configuration Center configuration. +#CMD ["./inventory", "-c", "configs/inventory.yml", "-enable-cc"] diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile_build b/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile_build new file mode 100644 index 0000000..b00fa66 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile_build @@ -0,0 +1,47 @@ +# Need to package the code first `tar zcf inventory.tar.gz $(ls)` and move it to the same directory as Dokerfile + +# Compile the go code, you can specify the golang version +FROM golang:1.21-alpine as build +COPY . /go/src/inventory +WORKDIR /go/src/inventory +RUN tar zxf inventory.tar.gz +RUN go env -w GOPROXY=https://goproxy.cn,direct +RUN go mod download +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o /inventory cmd/inventory/main.go + +# install grpc-health-probe, for health check of grpc service +RUN go install github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 +RUN cd $GOPATH/pkg/mod/github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 \ + && go mod download \ + && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "all=-s -w" -o /grpc_health_probe + +# compressing binary files +#cd / +#upx -9 inventory +#upx -9 grpc_health_probe + + +# building images with binary +FROM alpine:latest +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# set the time zone to Shanghai +RUN apk add tzdata \ + && cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && echo "Asia/Shanghai" > /etc/timezone \ + && apk del tzdata + +# add grpc_health_probe for health check of grpc services +COPY --from=build /grpc_health_probe /bin/grpc_health_probe +COPY --from=build /inventory /app/inventory +COPY --from=build /go/src/inventory/configs/inventory.yml /app/configs/inventory.yml + +# grpc and http port +EXPOSE 8282 8283 + + +WORKDIR /app + +CMD ["./inventory", "-c", "configs/inventory.yml"] +# if you use the Configuration Center, inventory.yml is changed to the Configuration Center configuration. +#CMD ["./inventory", "-c", "configs/inventory.yml", "-enable-cc"] diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile_test b/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile_test new file mode 100644 index 0000000..d5b71f8 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/Dockerfile_test @@ -0,0 +1,16 @@ +# Need to package the code first `tar zcf inventory.tar.gz $(ls)` and move it to the same directory as Dokerfile +# rpc server source code, used to test rpc methods +FROM golang:1.21-alpine +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# go test dependency packages +RUN apk add bash alpine-sdk build-base gcc + +COPY . /go/src/inventory +WORKDIR /go/src/inventory +RUN tar zxf inventory.tar.gz +RUN go env -w GOPROXY=https://goproxy.cn,direct +RUN go mod download +RUN rm -f inventory.tar.gz + +CMD ["sleep","86400"] diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/README.md b/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/README.md new file mode 100644 index 0000000..ba0f3e8 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/build/README.md @@ -0,0 +1,4 @@ + +- `Dockerfile`: build the image by directly copying the compiled binaries, fast build speed. +- `Dockerfile_build`: two-stage build of the image, slower build speed, you can specify the golang version. +- `Dockerfile_test`: container for testing rpc services. diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-binary.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-binary.sh new file mode 100644 index 0000000..c87a2ac --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-binary.sh @@ -0,0 +1,35 @@ +#!/usr/bin/expect + +set serviceName "inventory" + +# parameters +set username [lindex $argv 0] +set password [lindex $argv 1] +set hostname [lindex $argv 2] + +set timeout 30 + +spawn scp -r ./${serviceName}-binary.tar.gz ${username}@${hostname}:/tmp/ +#expect "*yes/no*" +#send "yes\r" +expect "*password:*" +send "${password}\r" +expect eof + +spawn ssh ${username}@${hostname} +#expect "*yes/no*" +#send "yes\r" +expect "*password:*" +send "${password}\r" + +# execute a command or script +expect "*${username}@*" +send "cd /tmp && tar zxvf ${serviceName}-binary.tar.gz\r" +expect "*${username}@*" +send "bash /tmp/${serviceName}-binary/deploy.sh\r" + +# logging out of a session +expect "*${username}@*" +send "exit\r" + +expect eof diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-docker.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-docker.sh new file mode 100644 index 0000000..2b809ab --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-docker.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +dockerComposeFilePath="deployments/docker-compose" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +mkdir -p ${dockerComposeFilePath}/configs +if [ ! -f "${dockerComposeFilePath}/configs/inventory.yml" ];then + cp configs/inventory.yml ${dockerComposeFilePath}/configs +fi + +# shellcheck disable=SC2164 +cd ${dockerComposeFilePath} + +docker-compose down +checkResult $? + +docker-compose up -d +checkResult $? + +colorCyan='\033[1;36m' +highBright='\033[1m' +markEnd='\033[0m' + +echo "" +echo -e "run service successfully, if you want to stop the service, go into the ${highBright}${dockerComposeFilePath}${markEnd} directory and execute the command ${colorCyan}docker-compose down${markEnd}." +echo "" diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-k8s.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-k8s.sh new file mode 100644 index 0000000..c80463a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/deploy-k8s.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +SERVER_NAME="inventory" +DEPLOY_FILE="deployments/kubernetes/${SERVER_NAME}-deployment.yml" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# Determining whether a file exists +if [ ! -f "${DEPLOY_FILE}" ];then + echo "Deployment file file ${DEPLOY_FILE} does not exist" + checkResult 1 +fi + +# Check if you are authorised to operate k8s +echo "kubectl version" +kubectl version +checkResult $? + +echo "kubectl delete -f ${DEPLOY_FILE} --ignore-not-found" +kubectl delete -f ${DEPLOY_FILE} --ignore-not-found +checkResult $? + +sleep 1 + +echo "kubectl apply -f ${DEPLOY_FILE}" +kubectl apply -f ${DEPLOY_FILE} diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build-local.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build-local.sh new file mode 100644 index 0000000..2ba405b --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build-local.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +# build the image for local docker, using the binaries, if you want to reduce the size of the image, +# use upx to compress the binaries before building the image. + +serverName="inventory" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/inventory" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" + +mv -f cmd/${serverName}/${serverName} ${DOCKERFILE_PATH}/${serverName} + +# install grpc-health-probe, for health check of grpc service +rootDockerFilePath=$(pwd)/${DOCKERFILE_PATH} +go install github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 +cd $GOPATH/pkg/mod/github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 \ + && go mod download \ + && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "all=-s -w" -o "${rootDockerFilePath}/grpc_health_probe" +cd - + +# compressing binary file +#cd ${DOCKERFILE_PATH} +#upx -9 ${serverName} +#upx -9 grpc_health_probe +#cd - + +mkdir -p ${DOCKERFILE_PATH}/configs && cp -f configs/${serverName}.yml ${DOCKERFILE_PATH}/configs/ +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME}:latest ${DOCKERFILE_PATH}" +docker build -f ${DOCKERFILE} -t ${IMAGE_NAME}:latest ${DOCKERFILE_PATH} + +if [ -f "${DOCKERFILE_PATH}/grpc_health_probe" ]; then + rm -f ${DOCKERFILE_PATH}/grpc_health_probe +fi + + +if [ -f "${DOCKERFILE_PATH}/${serverName}" ]; then + rm -f ${DOCKERFILE_PATH}/${serverName} +fi + +if [ -d "${DOCKERFILE_PATH}/configs" ]; then + rm -rf ${DOCKERFILE_PATH}/configs +fi + +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build.sh new file mode 100644 index 0000000..b3c40ca --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build.sh @@ -0,0 +1,71 @@ +#!/bin/bash + +# build the docker image using the binaries, if you want to reduce the size of the image, +# use upx to compress the binaries before building the image. + +serverName="inventory" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/inventory" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-build.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +# binary executable files +BIN_FILE="cmd/${serverName}/${serverName}" +# configuration file directory +CONFIG_PATH="configs" + +CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o ${BIN_FILE} cmd/${serverName}/*.go +mv -f ${BIN_FILE} ${DOCKERFILE_PATH} +mkdir -p ${DOCKERFILE_PATH}/${CONFIG_PATH} && cp -f ${CONFIG_PATH}/${serverName}.yml ${DOCKERFILE_PATH}/${CONFIG_PATH} + +# install grpc-health-probe, for health check of grpc service +rootDockerFilePath=$(pwd)/${DOCKERFILE_PATH} +go install github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 +cd $GOPATH/pkg/mod/github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 \ + && go mod download \ + && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "all=-s -w" -o "${rootDockerFilePath}/grpc_health_probe" +cd - + +# compressing binary file +#cd ${DOCKERFILE_PATH} +#upx -9 ${serverName} +#upx -9 grpc_health_probe +#cd - + +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} + +if [ -f "${DOCKERFILE_PATH}/grpc_health_probe" ]; then + rm -f ${DOCKERFILE_PATH}/grpc_health_probe +fi + + +if [ -f "${DOCKERFILE_PATH}/${serverName}" ]; then + rm -f ${DOCKERFILE_PATH}/${serverName} +fi + +if [ -d "${DOCKERFILE_PATH}/configs" ]; then + rm -rf ${DOCKERFILE_PATH}/configs +fi + +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build2.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build2.sh new file mode 100644 index 0000000..d225f08 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-build2.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +# two-stage build docker image + +serverName="inventory" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/inventory" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_build" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-build.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +PROJECT_FILES=$(ls) +tar zcf ${serverName}.tar.gz ${PROJECT_FILES} +mv -f ${serverName}.tar.gz ${DOCKERFILE_PATH} +echo "docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} +rm -rf ${DOCKERFILE_PATH}/${serverName}.tar.gz +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 + diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-push.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-push.sh new file mode 100644 index 0000000..c1a3364 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-push.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +# image name, prohibit uppercase letters in names. +IMAGE_NAME="eshop/inventory" + +# image repo address, passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-push.sh hub.docker.com v1.0.0" + exit 1 +fi + +# version tag, passed in via the second parameter, if empty, defaults to latest +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# image repository host, https://index.docker.io/v1 is the official docker image repository +IMAGE_REPO_HOST="https://index.docker.io/v1" +# check if you are authorized to log into docker +function checkLogin() { + loginStatus=$(cat /root/.docker/config.json | grep "${IMAGE_REPO_HOST}") + if [ "X${loginStatus}" = "X" ];then + echo "docker is not logged into the image repository" + checkResult 1 + fi +} + +checkLogin + +# push image to image repository +echo "docker push ${IMAGE_NAME_TAG}" +docker push ${IMAGE_NAME_TAG} +checkResult $? +echo "docker push image success." + +sleep 1 + +# delete image +echo "docker rmi -f ${IMAGE_NAME_TAG}" +docker rmi -f ${IMAGE_NAME_TAG} +checkResult $? +echo "docker remove image success." diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-rpc-test.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-rpc-test.sh new file mode 100644 index 0000000..66a7655 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/image-rpc-test.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# build rpc service test image + +serverName="inventory" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/inventory.rpc-test" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_test" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-rpc-test.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +PROJECT_FILES=$(ls) +tar zcf ${serverName}.tar.gz ${PROJECT_FILES} +mv -f ${serverName}.tar.gz ${DOCKERFILE_PATH} + +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} + +rm -rf ${DOCKERFILE_PATH}/${serverName}.tar.gz diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/patch-mono.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/patch-mono.sh new file mode 100644 index 0000000..7d00974 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/patch-mono.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +goModFile="go.mod" +thirdPartyProtoDir="third_party" +genServerType="grpc-pb" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +if [ ! -f "../$goModFile" ]; then + sponge patch copy-go-mod -f + checkResult $? + mv -f go.mod .. + mv -f go.sum .. +fi + +if [ "$genServerType"x != "http"x ]; then + if [ ! -d "../$thirdPartyProtoDir" ]; then + sponge patch copy-third-party-proto + checkResult $? + mv -f $thirdPartyProtoDir .. + fi +fi + +if [ "$genServerType"x = "grpc"x ]; then + if [ ! -d "../api/types" ]; then + sponge patch gen-types-pb --out=. + checkResult $? + mv -f api/types ../api + rmdir api + fi +fi diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/patch.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/patch.sh new file mode 100644 index 0000000..f06f10a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/patch.sh @@ -0,0 +1,81 @@ +#!/bin/bash + +patchType=$1 +typesPb="types-pb" +initMysql="init-mysql" +initMongodb="init-mongodb" +initTidb="init-tidb" +initPostgresql="init-postgresql" +initSqlite="init-sqlite" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +function importPkg() { + go mod tidy +} + +function generateTypesPbCode() { + + if [ ! -d "../api/types" ]; then + sponge patch gen-types-pb --out=./ + checkResult $? + mv -f api/types ../api + rmdir api + fi + checkResult $? +} + +function generateInitMysqlCode() { + sponge patch gen-db-init --db-driver=mysql --out=./ + checkResult $? + importPkg +} + +function generateInitMongodbCode() { + sponge patch gen-db-init --db-driver=mongodb --out=./ + checkResult $? + importPkg +} + +function generateInitTidbCode() { + sponge patch gen-db-init --db-driver=tidb --out=./ + checkResult $? + importPkg +} + +function generateInitPostgresqlCode() { + sponge patch gen-db-init --db-driver=postgresql --out=./ + checkResult $? + importPkg +} + +function generateInitSqliteCode() { + sponge patch gen-db-init --db-driver=sqlite --out=./ + checkResult $? + importPkg +} + +if [ "$patchType" = "$typesPb" ]; then + generateTypesPbCode +elif [ "$patchType" = "$initMysql" ]; then + generateInitMysqlCode +elif [ "$patchType" = "$initMongodb" ]; then + generateInitMongodbCode +elif [ "$patchType" = "$initTidb" ]; then + generateInitTidbCode +elif [ "$patchType" = "$initPostgresql" ]; then + generateInitPostgresqlCode +elif [ "$patchType" = "$initSqlite" ]; then + generateInitSqliteCode +else + echo "invalid patch type: '$patchType'" + echo "supported types: $initMysql, $initMongodb, $initTidb, $initPostgresql, $initSqlite, $typesPb" + echo "e.g. make patch TYPE=init-mysql" + echo "" + exit 1 +fi diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/proto-doc.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/proto-doc.sh new file mode 100644 index 0000000..b2f094e --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/proto-doc.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# the directory where the proto files are located +bash scripts/patch-mono.sh +cd .. + +protoBasePath="api" +allProtoFiles="" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +function listFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + listFiles $item + else + if [ "${item#*.}"x = "proto"x ];then + file=$(pwd)/${item} + protoFile="${protoBasePath}${file#*${protoBasePath}}" + allProtoFiles="${allProtoFiles} ${protoFile}" + fi + fi + done + cd .. +} + +# get all proto file paths +listFiles $protoBasePath + +protoc --proto_path=. --proto_path=./third_party \ + --doc_out=. --doc_opt=html,apis.html \ + $allProtoFiles + +checkResult $? + +mv -f apis.html inventory/docs/apis.html + +echo "generate proto doc file successfully, view in inventory/docs/apis.html" diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/protoc.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/protoc.sh new file mode 100644 index 0000000..1a0ad03 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/protoc.sh @@ -0,0 +1,211 @@ +#!/bin/bash + +bash scripts/patch-mono.sh +cd .. + +protoBasePath="api" +allProtoFiles="" + +specifiedProtoFilePath=$1 +specifiedProtoFilePaths="" + +colorGray='\033[1;30m' +colorGreen='\033[1;32m' +colorMagenta='\033[1;35m' +colorCyan='\033[1;36m' +highBright='\033[1m' +markEnd='\033[0m' + +tipMsg="" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# get specified proto files, if empty, return 0 else return 1 +function getSpecifiedProtoFiles() { + if [ "$specifiedProtoFilePath"x = x ];then + return 0 + fi + + specifiedProtoFilePaths=${specifiedProtoFilePath//,/ } + + for v in $specifiedProtoFilePaths; do + if [ ! -f "$v" ];then + echo "Error: not found specified proto file $v" + echo "example: make proto FILES=api/user/v1/user.proto,api/types/types.proto" + checkResult 1 + fi + done + + return 1 +} + +# add the import of useless packages from the generated *.pb.go code here +function deleteUnusedPkg() { + file=$1 + osType=$(uname -s) + if [ "${osType}"x = "Darwin"x ];then + sed -i '' 's#_ \"github.com/envoyproxy/protoc-gen-validate/validate\"##g' ${file} + sed -i '' 's#_ \"github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options\"##g' ${file} + sed -i '' 's#_ \"github.com/srikrsna/protoc-gen-gotag/tagger\"##g' ${file} + sed -i '' 's#_ \"google.golang.org/genproto/googleapis/api/annotations\"##g' ${file} + else + sed -i "s#_ \"github.com/envoyproxy/protoc-gen-validate/validate\"##g" ${file} + sed -i "s#_ \"github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options\"##g" ${file} + sed -i "s#_ \"github.com/srikrsna/protoc-gen-gotag/tagger\"##g" ${file} + sed -i "s#_ \"google.golang.org/genproto/googleapis/api/annotations\"##g" ${file} + fi + checkResult $? +} + +function listProtoFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + listProtoFiles $item + else + if [ "${item#*.}"x = "proto"x ];then + file=$(pwd)/${item} + protoFile="${protoBasePath}${file#*${protoBasePath}}" + allProtoFiles="${allProtoFiles} ${protoFile}" + fi + fi + done + cd .. +} + +function handlePbGoFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + handlePbGoFiles $item + else + if [ "${item#*.}"x = "pb.go"x ];then + deleteUnusedPkg $item + fi + fi + done + cd .. +} + +function generateByAllProto(){ + getSpecifiedProtoFiles + if [ $? -eq 0 ]; then + listProtoFiles $protoBasePath + else + allProtoFiles=$specifiedProtoFilePaths + fi + + if [ "$allProtoFiles"x = x ];then + echo "Error: not found proto file in path $protoBasePath" + exit 1 + fi + echo -e "generate *pb.go by proto files: ${colorGray}$allProtoFiles${markEnd}" + echo "" + + # generate files *_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --go_out=. --go_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + # generate files *_grpc_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --go-grpc_out=. --go-grpc_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + + # generate the file *_pb.validate.go + protoc --proto_path=. --proto_path=./third_party \ + --validate_out=lang=go:. --validate_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + # embed the tag field into *_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --gotag_out=:. --gotag_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? +} + +function generateBySpecifiedProto(){ + # get the proto file of the inventory server + allProtoFiles="" + listProtoFiles ${protoBasePath}/inventory + cd .. + specifiedProtoFiles="" + getSpecifiedProtoFiles + if [ $? -eq 0 ]; then + specifiedProtoFiles=$allProtoFiles + else + for v1 in $specifiedProtoFilePaths; do + for v2 in $allProtoFiles; do + if [ "$v1"x = "$v2"x ];then + specifiedProtoFiles="$specifiedProtoFiles $v1" + fi + done + done + fi + + if [ "$specifiedProtoFiles"x = x ];then + return + fi + echo -e "generate template code by proto files: ${colorMagenta}$specifiedProtoFiles${markEnd}" + echo "" + + moduleName=$(cat inventory/docs/gen.info | head -1 | cut -d , -f 1) + serverName=$(cat inventory/docs/gen.info | head -1 | cut -d , -f 2) + suitedMonoRepo=$(cat inventory/docs/gen.info | head -1 | cut -d , -f 3) + + protoc --proto_path=. --proto_path=./third_party \ + --go-rpc-tmpl_out=. --go-rpc-tmpl_opt=paths=source_relative \ + --go-rpc-tmpl_opt=moduleName=${moduleName} --go-rpc-tmpl_opt=serverName=${serverName} --go-rpc-tmpl_opt=suitedMonoRepo=${suitedMonoRepo} \ + $specifiedProtoFiles + + checkResult $? + + sponge merge rpc-pb --dir=inventory + checkResult $? + + tipMsg="${highBright}Tip:${markEnd} execute the command ${colorCyan}make run${markEnd} and then test grpc api in the file ${colorCyan}internal/service/xxx_client_test.go${markEnd}." + + + + if [ "$suitedMonoRepo" == "true" ]; then + sponge patch adapt-mono-repo --dir=inventory + fi +} + +# generate pb.go by all proto files +generateByAllProto + +# generate pb.go by specified proto files +generateBySpecifiedProto + +# delete unused packages in pb.go +handlePbGoFiles $protoBasePath + +# delete json tag omitempty +sponge patch del-omitempty --dir=$protoBasePath --suffix-name=pb.go > /dev/null + +# modify duplicate numbers and error codes +sponge patch modify-dup-num --dir=inventory/internal/ecode +sponge patch modify-dup-err-code --dir=inventory/internal/ecode + +echo -e "${colorGreen}generated code done.${markEnd}" +echo "" +echo -e $tipMsg +echo "" diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/run-nohup.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/run-nohup.sh new file mode 100644 index 0000000..7afe343 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/run-nohup.sh @@ -0,0 +1,61 @@ +#!/bin/bash + +# chkconfig: - 85 15 +# description: inventory + +serverName="inventory" +cmdStr="cmd/${serverName}/${serverName}" + + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +stopService(){ + NAME=$1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + for id in $ID + do + kill -9 $id + echo "Stopped ${NAME} service successfully, process ID=${ID}" + done + fi +} + +startService() { + NAME=$1 + + if [ -f "${NAME}" ] ;then + rm "${NAME}" + fi + sleep 0.2 + go build -o ${cmdStr} cmd/${NAME}/main.go + checkResult $? + + nohup ${cmdStr} > ${NAME}.log 2>&1 & + sleep 1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + echo "Start the ${NAME} service successfully, process ID=${ID}" + else + echo "Failed to start ${NAME} service" + return 1 + fi + return 0 +} + + +stopService ${serverName} +if [ "$1"x != "stop"x ] ;then + sleep 1 + startService ${serverName} + checkResult $? +else + echo "Service ${serverName} has stopped" +fi diff --git a/6_micro-cluster/example-2-mono-repo/inventory/scripts/run.sh b/6_micro-cluster/example-2-mono-repo/inventory/scripts/run.sh new file mode 100644 index 0000000..cd90b8e --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/inventory/scripts/run.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +serverName="inventory" + +binaryFile="cmd/${serverName}/${serverName}" + +osType=$(uname -s) +if [ "${osType%%_*}"x = "MINGW64"x ];then + binaryFile="${binaryFile}.exe" +fi + +if [ -f "${binaryFile}" ] ;then + rm "${binaryFile}" +fi + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +sleep 0.2 + +go build -o ${binaryFile} cmd/${serverName}/main.go +checkResult $? + +# running server +./${binaryFile} diff --git a/6_micro-cluster/example-2-mono-repo/product.proto b/6_micro-cluster/example-2-mono-repo/product.proto new file mode 100644 index 0000000..826fceb --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; + +package api.product.v1; + +import "validate/validate.proto"; + +option go_package = "eshop/api/product/v1;v1"; + +service Product { + // get product by id + rpc GetByID(GetByIDRequest) returns (GetByIDReply) {} +} + +message GetByIDRequest { + uint64 id = 1 [(validate.rules).uint64.gte = 1]; +} + +message ProductDetail { + uint64 id = 1; + string name = 2; + float price = 3; + string description = 4; +} + +message GetByIDReply { + ProductDetail productDetail = 1; + uint64 inventoryID = 2; +} diff --git a/6_micro-cluster/example-2-mono-repo/product/.gitignore b/6_micro-cluster/example-2-mono-repo/product/.gitignore new file mode 100644 index 0000000..7ceab4a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/.gitignore @@ -0,0 +1,26 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib +*.log + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +vendor/ +dist/ + +# idea +.idea +*.iml +*.ipr +*.iws + +cmd/product/product + diff --git a/6_micro-cluster/example-2-mono-repo/product/.golangci.yml b/6_micro-cluster/example-2-mono-repo/product/.golangci.yml new file mode 100644 index 0000000..d17ff22 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/.golangci.yml @@ -0,0 +1,342 @@ +# This file configures eshop. + +run: + # timeout for analysis, e.g. 30s, 5m, default is 1m + timeout: 10m + # default concurrency is available CPU number + concurrency: 4 + # include test files or not, default is true + tests: false + # which dirs to skip: issues from them won't be reported; + # can use regexp here: generated.*, regexp is applied on full path; + # default value is empty list, but default dirs are skipped independently + # from this option's value (see skip-dirs-use-default). + skip-dirs: + - docs + - api + # which files to skip: they will be analyzed, but issues from them + # won't be reported. Default value is empty list, but there is + # no need to include all autogenerated files, we confidently recognize + # autogenerated files. If it's not please let us know. + skip-files: + - _test.go + + # exit code when at least one issue was found, default is 1 + issues-exit-code: 1 + + # list of build tags, all linters use it. Default is empty list. + build-tags: + - mytag + + # default is true. Enables skipping of directories: + # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ + skip-dirs-use-default: true + + +linters: + # please, do not use `enable-all`: it's deprecated and will be removed soon. + # inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint + disable-all: true + enable: + - revive + - goimports + - gofmt + - unused + #- depguard + - dogsled + - errcheck + #- gochecknoinits + - goconst + - gocyclo + - gosimple + - govet + - lll + - misspell + - typecheck + - unconvert + - whitespace + - staticcheck + #- bodyclose + #- dupl + #- goprintffuncname + #- gosec + #- unparam + #- ineffassign + + +linters-settings: + revive: + rules: + - name: argument-limit + arguments: [ 8 ] + - name: atomic + - name: bare-return + - name: blank-imports + - name: bool-literal-in-expr + - name: call-to-gc + - name: confusing-naming + - name: confusing-results + - name: constant-logical-expr + - name: context-as-argument + - name: context-keys-type + - name: deep-exit + - name: defer + - name: dot-imports + - name: duplicated-imports + - name: early-return + - name: empty-block + #- name: empty-lines + - name: error-naming + - name: error-return + - name: error-strings + - name: errorf + - name: function-result-limit + arguments: [ 3 ] + - name: identical-branches + - name: if-return + - name: import-shadowing + - name: increment-decrement + - name: indent-error-flow + - name: modifies-parameter + - name: modifies-value-receiver + - name: package-comments + - name: range + - name: range-val-address + - name: range-val-in-closure + - name: receiver-naming + - name: redefines-builtin-id + - name: string-of-int + - name: struct-tag + - name: superfluous-else + - name: time-naming + - name: unconditional-recursion + - name: unexported-naming + - name: unnecessary-stmt + - name: unreachable-code + - name: unused-parameter + - name: var-declaration + - name: var-naming + - name: waitgroup-by-value + + dogsled: + # checks assignments with too many blank identifiers; default is 2 + max-blank-identifiers: 2 + + dupl: + # tokens count to trigger issue, 150 by default + threshold: 100 + + errcheck: + # report about not checking of errors in type assertions: `a := b.(MyStruct)`; + # default is false: such cases aren't reported by default. + check-type-assertions: false + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; + # default is false: such cases aren't reported by default. + check-blank: false + + # [deprecated] comma-separated list of pairs of the form pkg:regex + # the regex is used to ignore names within pkg. (default "fmt:.*"). + # see https://github.com/kisielk/errcheck#the-deprecated-method for details + ignore: fmt:.*,io/ioutil:^Read.* + + # path to a file containing a list of functions to exclude from checking + # see https://github.com/kisielk/errcheck#excluding-functions for details + # exclude: /path/to/file.txt + funlen: + lines: 60 + statements: 40 + + gocognit: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 10 + + goconst: + # minimal length of string constant, 3 by default + min-len: 4 + # minimal occurrences count to trigger, 3 by default + min-occurrences: 4 + + gocyclo: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 20 + + godox: + # report any comments starting with keywords, this is useful for TODO or FIXME comments that + # might be left in the code accidentally and should be resolved before merging + keywords: # default keywords are TODO, BUG, and FIXME, these can be overwritten by this setting + - NOTE + - OPTIMIZE # marks code that should be optimized before merging + - HACK # marks hack-arounds that should be removed before merging + + gofmt: + # simplify code: gofmt with `-s` option, true by default + simplify: true + + goimports: + # put imports beginning with prefix after 3rd-party packages; + # it's a comma-separated list of prefixes + local-prefixes: eshop + + gomnd: + settings: + mnd: + # the list of enabled checks, see https://github.com/tommy-muehle/go-mnd/#checks for description. + checks: argument,case,condition,operation,return,assign + + govet: + # report about shadowed variables + check-shadowing: true + + # settings per analyzer + settings: + printf: # analyzer name, run `go tool vet help` to see all analyzers + funcs: # run `go tool vet help printf` to see available settings for `printf` analyzer + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf + + # enable or disable analyzers by name + enable: + - atomicalign + enable-all: false + disable: + - shadow + disable-all: false + + depguard: + list-type: blacklist + include-go-root: false + #packages: + # - github.com/user/name + #packages-with-error-message: + # specify an error message to output when a blacklisted package is used + # - github.com/user/name: "logging is allowed only by logutils.Log" + + lll: + # max line length, lines longer will be reported. Default is 120. + # '\t' is counted as 1 character by default, and can be changed with the tab-width option + line-length: 200 + # tab width in spaces. Default to 1. + tab-width: 1 + + maligned: + # print struct with more effective memory layout or not, false by default + suggest-new: true + + misspell: + # Correct spellings using locale preferences for US or UK. + # Default is to use a neutral variety of English. + # Setting locale to US will correct the British spelling of 'colour' to 'color'. + locale: US + ignore-words: + - someword + + nakedret: + # make an issue if func has more lines of code than this setting and it has naked returns; default is 30 + max-func-lines: 30 + + prealloc: + # XXX: we don't recommend using this linter before doing performance profiling. + # For most programs usage of prealloc will be a premature optimization. + + # Report preallocation suggestions only on simple loops that have no returns/breaks/continues/gotos in them. + # True by default. + simple: true + range-loops: true # Report preallocation suggestions on range loops, true by default + for-loops: false # Report preallocation suggestions on for loops, false by default + + #rowserrcheck: + # packages: + # - github.com/user/name + + unparam: + # Inspect exported functions, default is false. Set to true if no external program/library imports your code. + # XXX: if you enable this setting, unparam will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find external interfaces. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + unused: + # treat code as a program (not a library) and report unused exported identifiers; default is false. + # XXX: if you enable this setting, unused will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find funcs usages. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + whitespace: + multi-if: false # Enforces newlines (or comments) after every multi-line if statement + multi-func: false # Enforces newlines (or comments) after every multi-line function signature + + wsl: + # If true append is only allowed to be cuddled if appending value is + # matching variables, fields or types on line above. Default is true. + strict-append: true + # Allow calls and assignments to be cuddled as long as the lines have any + # matching variables, fields or types. Default is true. + allow-assign-and-call: true + # Allow multiline assignments to be cuddled. Default is true. + allow-multiline-assign: true + # Allow declarations (var) to be cuddled. + allow-cuddle-declarations: false + # Allow trailing comments in ending of blocks + allow-trailing-comment: false + # Force newlines in end of case at this limit (0 = never). + force-case-trailing-whitespace: 0 + +issues: + # List of regexps of issue texts to exclude, empty list by default. + # But independently from this option we use default exclude patterns, + # it can be disabled by `exclude-use-default: false`. To list all + # excluded by default patterns execute `golangci-lint run --help` + exclude: + - abcdef + + # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: + # Exclude some linters from running on tests files. + - path: _test\.go + linters: + - gocyclo + - errcheck + - dupl + - gosec + + # Exclude known linters from partially hard-vendored code, + # which is impossible to exclude via "nolint" comments. + - path: internal/hmac/ + text: "weak cryptographic primitive" + linters: + - gosec + + # Exclude lll issues for long lines with go:generate + - linters: + - lll + source: "^//go:generate " + + # Independently from option `exclude` we use default exclude patterns, + # it can be disabled by this option. To list all + # excluded by default patterns execute `golangci-lint run --help`. + # Default value for this option is true. + exclude-use-default: false + + # Maximum issues count per one linter. Set to 0 to disable. Default is 50. + max-issues-per-linter: 0 + + # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. + max-same-issues: 0 + + # Show only new issues: if there are unstaged changes or untracked files, + # only those changes are analyzed, else only changes in HEAD~ are analyzed. + # It's a super-useful option for integration of golangci-lint into existing + # large codebase. It's not practical to fix all existing issues at the moment + # of integration: much better don't allow issues in new code. + # Default is false. + new: false + + # Show only new issues created after git revision `REV` + new-from-rev: "" + +service: + golangci-lint-version: 1.48.0 # use the fixed version to not introduce new linters unexpectedly diff --git a/6_micro-cluster/example-2-mono-repo/product/Jenkinsfile b/6_micro-cluster/example-2-mono-repo/product/Jenkinsfile new file mode 100644 index 0000000..cc76915 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/Jenkinsfile @@ -0,0 +1,200 @@ +pipeline { + agent any + + stages { + stage("Check Build Branch") { + steps { + echo "Checking build branch in progress ......" + script { + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + echo "building production environment, tag=${env.GIT_BRANCH}" + } else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + echo "building test environment, tag=${env.GIT_BRANCH}" + } else if (env.GIT_BRANCH ==~ /(origin\/develop)/) { + echo "building development environment, /origin/develop" + } else { + echo "The build branch ${env.GIT_BRANCH} is not legal, allowing to build the development environment branch (/origin/develop), the test environment branch (e.g. test-1.0.0), and the production environment branch (e.g. v1.0.0)" + sh 'exit 1' + } + } + echo "Check build branch complete." + } + } + + stage("Check Code") { + steps { + echo "Checking code in progress ......" + sh 'make ci-lint' + echo "Check code complete." + } + } + + stage("Unit Testing") { + steps { + echo "Unit testing in progress ......" + sh 'make test' + echo "Unit testing complete." + } + } + + stage("Compile Code") { + steps { + echo "Compiling code in progress ......" + sh 'make build' + echo "compile code complete." + } + } + + stage("Build Image") { + steps { + echo "building image in progress ......" + script { + registryHost="" + tagName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.PROD_REPO_HOST == null) { + echo "The value of environment variable PROD_REPO_HOST is empty, please set the value of PROD_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the production environment image repository ${env.PROD_REPO_HOST}" + registryHost=env.PROD_REPO_HOST + tagName=env.GIT_BRANCH + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.TEST_REPO_HOST == null) { + echo "The value of environment variable TEST_REPO_HOST is empty, please set the value of TEST_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the test environment image repository ${env.TEST_REPO_HOST}" + registryHost=env.TEST_REPO_HOST + tagName=env.GIT_BRANCH + } + else { + if (env.DEV_REPO_HOST == null) { + echo "The value of environment variable DEV_REPO_HOST is empty, please set the value of DEV_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Using the development environment ${env.DEV_REPO_HOST}" + registryHost=env.DEV_REPO_HOST + } + sh "make image-build REPO_HOST=$registryHost TAG=$tagName" + } + echo "Build image complete" + } + } + + stage("Push Image") { + steps { + echo "pushing image in progress ......" + script { + registryHost="" + tagName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.PROD_REPO_HOST == null) { + echo "The value of environment variable PROD_REPO_HOST is empty, please set the value of PROD_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the production environment image repository ${env.PROD_REPO_HOST}" + registryHost=env.PROD_REPO_HOST + tagName=env.GIT_BRANCH + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.TEST_REPO_HOST == null) { + echo "The value of environment variable TEST_REPO_HOST is empty, please set the value of TEST_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the test environment image repository ${env.TEST_REPO_HOST}" + registryHost=env.TEST_REPO_HOST + tagName=env.GIT_BRANCH + } + else { + if (env.DEV_REPO_HOST == null) { + echo "The value of environment variable DEV_REPO_HOST is empty, please set the value of DEV_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Using the development environment ${env.DEV_REPO_HOST}" + registryHost=env.DEV_REPO_HOST + } + sh "make image-push REPO_HOST=$registryHost TAG=$tagName" + } + echo "push image complete, clear image complete." + } + } + + stage("Deploy to k8s") { + when { expression { return env.GIT_BRANCH ==~ /(origin\/staging|origin\/develop)/ } } + steps { + echo "Deploying to k8s in progress ......" + sh 'make deploy-k8s' + echo "Deploy to k8s complete." + } + } + } + + post { + always { + echo 'One way or another, I have finished' + echo sh(returnStdout: true, script: 'env') + deleteDir() /* clean up our workspace */ + } + success { + SendDingding("success") + //SendEmail("success") + echo 'structure success' + } + failure { + SendDingding("failure") + //SendEmail("failure") + echo 'structure failure' + } + } +} + +// Notifications using dingding +void SendDingding(res) +{ + // Fill in the corresponding cell phone number and specify a person to be notified in the pinned group + tel_num="xxxxxxxxxxx" + dingding_url="https://oapi.dingtalk.com/robot/send\\?access_token\\=your dingding robot token" + + branchName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + branchName="${env.SERVER_PLATFORM} production environment, tag=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/){ + branchName="${env.SERVER_PLATFORM} test environment, tag=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + else { + branchName="${env.SERVER_PLATFORM} develop environment, branch=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + + json_msg="" + if( res == "success" ) { + json_msg='{\\"msgtype\\":\\"text\\",\\"text\\":{\\"content\\":\\"@' + tel_num +' [OK] ' + "${branchName} ${env.BUILD_NUMBER}th " + 'build success. \\"},\\"at\\":{\\"atMobiles\\":[\\"' + tel_num + '\\"],\\"isAtAll\\":false}}' + } + else { + json_msg='{\\"msgtype\\":\\"text\\",\\"text\\":{\\"content\\":\\"@' + tel_num +' [cry] ' + "${branchName} ${env.BUILD_NUMBER}th " + 'build failed, please deal with it promptly! \\"},\\"at\\":{\\"atMobiles\\":[\\"' + tel_num + '\\"],\\"isAtAll\\":false}}' + } + + post_header="Content-Type:application/json;charset=utf-8" + sh_cmd="curl -X POST " + dingding_url + " -H " + "\'" + post_header + "\'" + " -d " + "\"" + json_msg + "\"" + sh sh_cmd +} + +// Notifications using email +void SendEmail(res) +{ + emailAddr="xxx@xxx.com" + if( res == "success" ) + { + mail to: emailAddr, + subject: "Build Success: ${currentBuild.fullDisplayName}", + body: "\nJob name: ${env.JOB_NAME} ${env.BUILD_NUMBER}th build. \n\n For more information, please see: ${env.BUILD_URL}" + } + else + { + mail to: emailAddr, + subject: "Build Failed: ${currentBuild.fullDisplayName}", + body: "\nJob name: ${env.JOB_NAME} ${env.BUILD_NUMBER}th build. \n\n For more information, please see: ${env.BUILD_URL}" + } +} diff --git a/6_micro-cluster/example-2-mono-repo/product/Makefile b/6_micro-cluster/example-2-mono-repo/product/Makefile new file mode 100644 index 0000000..c49acd7 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/Makefile @@ -0,0 +1,183 @@ +SHELL := /bin/bash + +PROJECT_NAME := "eshop" +PKG := "$(PROJECT_NAME)" +PKG_LIST := $(shell go list ${PKG}/... | grep -v /vendor/ | grep -v /api/) + + + + +.PHONY: ci-lint +# Check code formatting, naming conventions, security, maintainability, etc. the rules in the .golangci.yml file +ci-lint: + @gofmt -s -w . + golangci-lint run ./... + + +.PHONY: test +# Test *_test.go files, the parameter -count=1 means that caching is disabled +test: + go test -count=1 -short ${PKG_LIST} + + +.PHONY: cover +# Generate test coverage +cover: + go test -short -coverprofile=cover.out -covermode=atomic ${PKG_LIST} + go tool cover -html=cover.out + + +.PHONY: graph +# Generate interactive visual function dependency graphs +graph: + @echo "generating graph ......" + @cp -f cmd/product/main.go . + go-callvis -skipbrowser -format=svg -nostd -file=product eshop + @rm -f main.go product.gv + + + +.PHONY: proto +# Generate *.go and template code by proto files, the default is all the proto files in the api directory. you can specify the proto file, multiple files are separated by commas, e.g. make proto FILES=api/user/v1/user.proto +proto: + @bash scripts/protoc.sh $(FILES) + go mod tidy + @gofmt -s -w . + + +.PHONY: proto-doc +# Generate doc from *.proto files +proto-doc: + @bash scripts/proto-doc.sh + + +.PHONY: build +# Build product for linux amd64 binary +build: + @echo "building 'product', linux binary file will output to 'cmd/product'" + @cd cmd/product && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build + + + +.PHONY: run +# Build and run service +run: + @bash scripts/run.sh + + +.PHONY: run-nohup +# Run service with nohup in local, if you want to stop the server, pass the parameter stop, e.g. make run-nohup CMD=stop +run-nohup: + @bash scripts/run-nohup.sh $(CMD) + + +.PHONY: run-docker +# Run service in local docker, if you want to update the service, run the make run-docker command again +run-docker: image-build-local + @bash scripts/deploy-docker.sh + + +.PHONY: binary-package +# Packaged binary files +binary-package: build + @bash scripts/binary-package.sh + + +.PHONY: deploy-binary +# Deploy binary to remote linux server, e.g. make deploy-binary USER=root PWD=123456 IP=192.168.1.10 +deploy-binary: binary-package + @expect scripts/deploy-binary.sh $(USER) $(PWD) $(IP) + + +.PHONY: image-build-local +# Build image for local docker, tag=latest, use binary files to build +image-build-local: build + @bash scripts/image-build-local.sh + + +.PHONY: image-build +# Build image for remote repositories, use binary files to build, e.g. make image-build REPO_HOST=addr TAG=latest +image-build: + @bash scripts/image-build.sh $(REPO_HOST) $(TAG) + + +.PHONY: image-build2 +# Build image for remote repositories, phase II build, e.g. make image-build2 REPO_HOST=addr TAG=latest +image-build2: + @bash scripts/image-build2.sh $(REPO_HOST) $(TAG) + + +.PHONY: image-push +# Push docker image to remote repositories, e.g. make image-push REPO_HOST=addr TAG=latest +image-push: + @bash scripts/image-push.sh $(REPO_HOST) $(TAG) + + +.PHONY: deploy-k8s +# Deploy service to k8s +deploy-k8s: + @bash scripts/deploy-k8s.sh + + +.PHONY: image-build-rpc-test +# Build grpc test image for remote repositories, e.g. make image-build-rpc-test REPO_HOST=addr TAG=latest +image-build-rpc-test: + @bash scripts/image-rpc-test.sh $(REPO_HOST) $(TAG) + + +.PHONY: patch +# Patch some dependent code, e.g. make patch TYPE=types-pb , make patch TYPE=init-, your_db_driver is mysql, mongodb, postgresql, tidb, sqlite, for example: make patch TYPE=init-mysql +patch: + @bash scripts/patch.sh $(TYPE) + + +.PHONY: copy-proto +# Copy proto file from the grpc server directory, multiple directories or proto files separated by commas. default is to copy all proto files, e.g. make copy-proto SERVER=yourServerDir, copy specified proto files, e.g. make copy-proto SERVER=yourServerDir PROTO_FILE=yourProtoFile1,yourProtoFile2 +copy-proto: + @sponge patch copy-proto --server-dir=$(SERVER) --proto-file=$(PROTO_FILE) + + +.PHONY: modify-proto-pkg-name +# Modify the 'package' and 'go_package' names of all proto files in the 'api' directory +modify-proto-pkg-name: + @sponge patch modify-proto-package --dir=api --server-dir=. + + +.PHONY: update-config +# Update internal/config code base on yaml file +update-config: + @sponge config --server-dir=. + + +.PHONY: clean +# Clean binary file, cover.out, template file +clean: + @rm -vrf cmd/product/product* + @rm -vrf cover.out + @rm -vrf main.go product.gv + @rm -vrf internal/ecode/*.go.gen* + @rm -vrf internal/routers/*.go.gen* + @rm -vrf internal/handler/*.go.gen* + @rm -vrf internal/service/*.go.gen* + @rm -rf product-binary.tar.gz + @echo "clean finished" + + +# Show help +help: + @echo '' + @echo 'Usage:' + @echo ' make ' + @echo '' + @echo 'Targets:' + @awk '/^[a-zA-Z\-_0-9]+:/ { \ + helpMessage = match(lastLine, /^# (.*)/); \ + if (helpMessage) { \ + helpCommand = substr($$1, 0, index($$1, ":")-1); \ + helpMessage = substr(lastLine, RSTART + 2, RLENGTH); \ + printf "\033[1;36m %-22s\033[0m %s\n", helpCommand,helpMessage; \ + } \ + } \ + { lastLine = $$0 }' $(MAKEFILE_LIST) + +.DEFAULT_GOAL := all diff --git a/6_micro-cluster/example-2-mono-repo/product/README.md b/6_micro-cluster/example-2-mono-repo/product/README.md new file mode 100644 index 0000000..713700f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/README.md @@ -0,0 +1,9 @@ +## product + +| Feature | Value | +| :----------------: | :-----------: | +| Server name | `product` | +| Server type | `grpc-pb` | +| Go module name | `eshop` | +| Repository type | `mono-repo` | + diff --git a/6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/close.go b/6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/close.go new file mode 100644 index 0000000..777644b --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/close.go @@ -0,0 +1,44 @@ +package initial + +import ( + "context" + "time" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/tracer" + + "eshop/product/internal/config" + //"eshop/product/internal/model" +) + +// Close releasing resources after service exit +func Close(servers []app.IServer) []app.Close { + var closes []app.Close + + // close server + for _, s := range servers { + closes = append(closes, s.Stop) + } + + // close database + //closes = append(closes, func() error { + // return model.CloseDB() + //}) + + // close redis + //if config.Get().App.CacheType == "redis" { + // closes = append(closes, func() error { + // return model.CloseRedis() + // }) + //} + + // close tracing + if config.Get().App.EnableTrace { + closes = append(closes, func() error { + ctx, _ := context.WithTimeout(context.Background(), 2*time.Second) //nolint + return tracer.Close(ctx) + }) + } + + return closes +} diff --git a/6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/createService.go b/6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/createService.go new file mode 100644 index 0000000..2715c78 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/createService.go @@ -0,0 +1,97 @@ +package initial + +import ( + "fmt" + "strconv" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/consul" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" + + "eshop/product/internal/config" + "eshop/product/internal/server" +) + +// CreateServices create grpc or http service +func CreateServices() []app.IServer { + var cfg = config.Get() + var servers []app.IServer + + // creating grpc service + grpcAddr := ":" + strconv.Itoa(cfg.Grpc.Port) + grpcRegistry, grpcInstance := registerService("grpc", cfg.App.Host, cfg.Grpc.Port) + grpcServer := server.NewGRPCServer(grpcAddr, + server.WithGrpcRegistry(grpcRegistry, grpcInstance), + ) + servers = append(servers, grpcServer) + + return servers +} + +func registerService(scheme string, host string, port int) (registry.Registry, *registry.ServiceInstance) { + var ( + instanceEndpoint = fmt.Sprintf("%s://%s:%d", scheme, host, port) + cfg = config.Get() + + iRegistry registry.Registry + instance *registry.ServiceInstance + err error + + id = cfg.App.Name + "_" + scheme + "_" + host + logField logger.Field + ) + + switch cfg.App.RegistryDiscoveryType { + // registering service with consul + case "consul": + iRegistry, instance, err = consul.NewRegistry( + cfg.Consul.Addr, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.Any("consulAddress", cfg.Consul.Addr) + + // registering service with etcd + case "etcd": + iRegistry, instance, err = etcd.NewRegistry( + cfg.Etcd.Addrs, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.Any("etcdAddress", cfg.Etcd.Addrs) + + // registering service with nacos + case "nacos": + iRegistry, instance, err = nacos.NewRegistry( + cfg.NacosRd.IPAddr, + cfg.NacosRd.Port, + cfg.NacosRd.NamespaceID, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.String("nacosAddress", fmt.Sprintf("%v:%d", cfg.NacosRd.IPAddr, cfg.NacosRd.Port)) + } + + if instance != nil { + msg := fmt.Sprintf("register service address to %s", cfg.App.RegistryDiscoveryType) + logger.Info(msg, logField, logger.String("id", id), logger.String("name", cfg.App.Name), logger.String("endpoint", instanceEndpoint)) + return iRegistry, instance + } + + return nil, nil +} diff --git a/6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/initApp.go b/6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/initApp.go new file mode 100644 index 0000000..bc6059c --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/cmd/product/initial/initApp.go @@ -0,0 +1,132 @@ +// Package initial is the package that starts the service to initialize the service, including +// the initialization configuration, service configuration, connecting to the database, and +// resource release needed when shutting down the service. +package initial + +import ( + "flag" + "fmt" + "strconv" + + "github.com/jinzhu/copier" + + "github.com/zhufuyi/sponge/pkg/conf" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/nacoscli" + "github.com/zhufuyi/sponge/pkg/stat" + "github.com/zhufuyi/sponge/pkg/tracer" + + "eshop/product/configs" + "eshop/product/internal/config" + //"eshop/product/internal/model" +) + +var ( + version string + configFile string + enableConfigCenter bool +) + +// InitApp initial app configuration +func InitApp() { + initConfig() + cfg := config.Get() + + // initializing log + _, err := logger.Init( + logger.WithLevel(cfg.Logger.Level), + logger.WithFormat(cfg.Logger.Format), + logger.WithSave( + cfg.Logger.IsSave, + //logger.WithFileName(cfg.Logger.LogFileConfig.Filename), + //logger.WithFileMaxSize(cfg.Logger.LogFileConfig.MaxSize), + //logger.WithFileMaxBackups(cfg.Logger.LogFileConfig.MaxBackups), + //logger.WithFileMaxAge(cfg.Logger.LogFileConfig.MaxAge), + //logger.WithFileIsCompression(cfg.Logger.LogFileConfig.IsCompression), + ), + ) + if err != nil { + panic(err) + } + logger.Debug(config.Show()) + logger.Info("[logger] was initialized") + + // initializing tracing + if cfg.App.EnableTrace { + tracer.InitWithConfig( + cfg.App.Name, + cfg.App.Env, + cfg.App.Version, + cfg.Jaeger.AgentHost, + strconv.Itoa(cfg.Jaeger.AgentPort), + cfg.App.TracingSamplingRate, + ) + logger.Info("[tracer] was initialized") + } + + // initializing the print system and process resources + if cfg.App.EnableStat { + stat.Init( + stat.WithLog(logger.Get()), + stat.WithAlarm(), // invalid if it is windows, the default threshold for cpu and memory is 0.8, you can modify them + stat.WithPrintField(logger.String("service_name", cfg.App.Name), logger.String("host", cfg.App.Host)), + ) + logger.Info("[resource statistics] was initialized") + } + + // initializing database + //model.InitDB() + //logger.Infof("[%s] was initialized", cfg.Database.Driver) + //model.InitCache(cfg.App.CacheType) + //if cfg.App.CacheType != "" { + // logger.Infof("[%s] was initialized", cfg.App.CacheType) + //} +} + +func initConfig() { + flag.StringVar(&version, "version", "", "service Version Number") + flag.BoolVar(&enableConfigCenter, "enable-cc", false, "whether to get from the configuration center, "+ + "if true, the '-c' parameter indicates the configuration center") + flag.StringVar(&configFile, "c", "", "configuration file") + flag.Parse() + + if enableConfigCenter { + // get the configuration from the configuration center (first get the nacos configuration, + // then read the service configuration according to the nacos configuration center) + if configFile == "" { + configFile = configs.Path("product_cc.yml") + } + nacosConfig, err := config.NewCenter(configFile) + if err != nil { + panic(err) + } + appConfig := &config.Config{} + params := &nacoscli.Params{} + _ = copier.Copy(params, &nacosConfig.Nacos) + format, data, err := nacoscli.GetConfig(params) + if err != nil { + panic(fmt.Sprintf("connect to configuration center err, %v", err)) + } + err = conf.ParseConfigData(data, format, appConfig) + if err != nil { + panic(fmt.Sprintf("parse configuration data err, %v", err)) + } + if appConfig.App.Name == "" { + panic("read the config from center error, config data is empty") + } + config.Set(appConfig) + } else { + // get configuration from local configuration file + if configFile == "" { + configFile = configs.Path("product.yml") + } + err := config.Init(configFile) + if err != nil { + panic("init config error: " + err.Error()) + } + } + + if version != "" { + config.Get().App.Version = version + } +} diff --git a/6_micro-cluster/example-2-mono-repo/product/cmd/product/main.go b/6_micro-cluster/example-2-mono-repo/product/cmd/product/main.go new file mode 100644 index 0000000..cdbddc4 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/cmd/product/main.go @@ -0,0 +1,17 @@ +// Package main is the grpc server of the application. +package main + +import ( + "github.com/zhufuyi/sponge/pkg/app" + + "eshop/product/cmd/product/initial" +) + +func main() { + initial.InitApp() + services := initial.CreateServices() + closes := initial.Close(services) + + a := app.New(services, closes) + a.Run() +} diff --git a/6_micro-cluster/example-2-mono-repo/product/configs/location.go b/6_micro-cluster/example-2-mono-repo/product/configs/location.go new file mode 100644 index 0000000..6b610a6 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/configs/location.go @@ -0,0 +1,23 @@ +// Package configs used to locate config file. +package configs + +import ( + "path/filepath" + "runtime" +) + +var basePath string + +func init() { + _, currentFile, _, _ := runtime.Caller(0) //nolint + basePath = filepath.Dir(currentFile) +} + +// Path return absolute path +func Path(rel string) string { + if filepath.IsAbs(rel) { + return rel + } + + return filepath.Join(basePath, rel) +} diff --git a/6_micro-cluster/example-2-mono-repo/product/configs/product.yml b/6_micro-cluster/example-2-mono-repo/product/configs/product.yml new file mode 100644 index 0000000..976bd12 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/configs/product.yml @@ -0,0 +1,117 @@ +# Generate the go struct command: sponge config --server-dir=./serverDir + +# app settings +app: + name: "product" # server name + env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment + version: "v0.0.0" + host: "127.0.0.1" # domain or ip, for service registration + enableStat: true # whether to turn on printing statistics, true:enable, false:disable + enableMetrics: true # whether to turn on indicator collection, true:enable, false:disable + enableHTTPProfile: false # whether to turn on performance analysis, true:enable, false:disable + enableLimit: false # whether to turn on rate limiting (adaptive), true:on, false:off + enableCircuitBreaker: false # whether to turn on circuit breaker(adaptive), true:on, false:off + enableTrace: false # whether to turn on trace, true:enable, false:disable, if true jaeger configuration must be set + tracingSamplingRate: 1.0 # tracing sampling rate, between 0 and 1, 0 means no sampling, 1 means sampling all links + registryDiscoveryType: "" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used + cacheType: "" # cache type, if empty, the cache is not used, support for "memory" and "redis", if set to redis, must set redis configuration + + +# grpc server settings +grpc: + port: 38282 # listen port + httpPort: 38283 # profile and metrics ports + enableToken: false # whether to enable server-side token authentication, default appID=grpc, appKey=123456 + # serverSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'certFile' and 'keyFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + serverSecure: + type: "" # secures type, "", "one-way", "two-way" + caFile: "" # ca certificate file, valid only in "two-way", absolute path + certFile: "" # server side cert file, absolute path + keyFile: "" # server side key file, absolute path + + +# grpc client-side settings, support for setting up multiple grpc clients. +grpcClient: + - name: "your_grpc_service_name" # grpc service name, used for service discovery + host: "127.0.0.1" # grpc service address, used for direct connection + port: 8282 # grpc service port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, valid only for unary grpc type + registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port + enableLoadBalance: true # whether to turn on the load balancer + # clientSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'serverName' and 'certFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + clientSecure: + type: "" # secures type, "", "one-way", "two-way" + serverName: "" # server name, e.g. *.foo.com + caFile: "" # client side ca file, valid only in "two-way", absolute path + certFile: "" # client side cert file, absolute path, if secureType="one-way", fill in server side cert file here + keyFile: "" # client side key file, valid only in "two-way", absolute path + clientToken: + enable: false # whether to enable token authentication + appID: "" # app id + appKey: "" # app key + + + +# logger settings +logger: + level: "info" # output log levels debug, info, warn, error, default is debug + format: "console" # output format, console or json, default is console + isSave: false # false:output to terminal, true:output to file, default is false + #logFileConfig: # Effective when isSave=true + #filename: "out.log" # File name (default is out.log) + #maxSize: 20 # Maximum file size (MB, default is 10MB) + #maxBackups: 50 # Maximum number of old files to retain (default is 100) + #maxAge: 15 # Maximum number of days to retain old files (default is 30 days) + #isCompression: true # Whether to compress/archive old files (default is false) + + +# set database configuration. reference-db-config-url +database: + driver: "mysql" # database driver + # mysql settings + mysql: + # dsn format, :@(:)/?[k=v& ......] + dsn: "root:123456@(192.168.3.37:3306)/account?parseTime=true&loc=Local&charset=utf8,utf8mb4" + enableLog: true # whether to turn on printing of all logs + maxIdleConns: 10 # set the maximum number of connections in the idle connection pool + maxOpenConns: 100 # set the maximum number of open database connections + connMaxLifetime: 30 # sets the maximum time for which the connection can be reused, in minutes + + + +# redis settings +redis: + # dsn format, [user]:@127.0.0.1:6379/[db], the default user is default, redis version 6.0 and above only supports user. + dsn: "default:123456@192.168.3.37:6379/0" + dialTimeout: 10 # connection timeout, unit(second) + readTimeout: 2 # read timeout, unit(second) + writeTimeout: 2 # write timeout, unit(second) + + +# jaeger settings +jaeger: + agentHost: "192.168.3.37" + agentPort: 6831 + + +# consul settings +consul: + addr: "192.168.3.37:8500" + + +# etcd settings +etcd: + addrs: ["192.168.3.37:2379"] + + +# nacos settings, used in service registration discovery +nacosRd: + ipAddr: "192.168.3.37" + port: 8848 + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id diff --git a/6_micro-cluster/example-2-mono-repo/product/configs/product_cc.yml b/6_micro-cluster/example-2-mono-repo/product/configs/product_cc.yml new file mode 100644 index 0000000..619f5c9 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/configs/product_cc.yml @@ -0,0 +1,13 @@ +# Generate the go struct command: sponge config --server-dir=./serverDir +# App config from nacos + +# nacos settings +nacos: + ipAddr: "192.168.3.37" # server address + port: 8848 # listening port + scheme: "http" # http or grpc + contextPath: "/nacos" # path + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id + group: "dev" # group name: dev, prod, test + dataID: "product.yml" # config file id + format: "yaml" # configuration file type: json,yaml,toml diff --git a/6_micro-cluster/example-2-mono-repo/product/deployments/binary/README.md b/6_micro-cluster/example-2-mono-repo/product/deployments/binary/README.md new file mode 100644 index 0000000..f10bb3a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/deployments/binary/README.md @@ -0,0 +1,26 @@ + +copy the configuration file to the configs directory and binary file before starting the service. + +``` +├── configs +│ └── product.yml +├── product +├── deploy.sh +└── run.sh +``` + +### Running and stopping service manually + +Running service: + +> ./run.sh + +Stopping the service: + +> ./run.sh stop + +
+ +### Automated deployment service + +> ./deploy.sh diff --git a/6_micro-cluster/example-2-mono-repo/product/deployments/binary/deploy.sh b/6_micro-cluster/example-2-mono-repo/product/deployments/binary/deploy.sh new file mode 100644 index 0000000..209de3a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/deployments/binary/deploy.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +serviceName="product" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# determine if the startup service script run.sh exists +runFile="~/app/${serviceName}/run.sh" +if [ ! -f "$runFile" ]; then + # if it does not exist, copy the entire directory + mkdir -p ~/app + cp -rf /tmp/${serviceName}-binary ~/app/ + checkResult $? + rm -rf /tmp/${serviceName}-binary* +else + # replace only the binary file if it exists + cp -f ${serviceName}-binary/${serviceName} ~/app/${serviceName}-binary/${serviceName} + checkResult $? + rm -rf /tmp/${serviceName}-binary* +fi + +# running service +cd ~/app/${serviceName}-binary +chmod +x run.sh +./run.sh +checkResult $? + +echo "server directory is ~/app/${serviceName}-binary" diff --git a/6_micro-cluster/example-2-mono-repo/product/deployments/binary/run.sh b/6_micro-cluster/example-2-mono-repo/product/deployments/binary/run.sh new file mode 100644 index 0000000..e846df8 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/deployments/binary/run.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +serviceName="product" +cmdStr="./${serviceName} -c configs/${serviceName}.yml" + +chmod +x ./${serviceName} + +stopService(){ + NAME=$1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + for id in $ID + do + kill -9 $id + echo "Stopped ${NAME} service successfully, process ID=${ID}" + done + fi +} + +startService() { + NAME=$1 + + nohup ${cmdStr} > ${serviceName}.log 2>&1 & + sleep 1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + echo "Start the ${NAME} service ...... process ID=${ID}" + else + echo "Failed to start ${NAME} service" + return 1 + fi + return 0 +} + + +stopService ${serviceName} +if [ "$1"x != "stop"x ] ;then + sleep 1 + startService ${serviceName} + exit $? + echo "" +else + echo "Service ${serviceName} has stopped" +fi diff --git a/6_micro-cluster/example-2-mono-repo/product/deployments/docker-compose/README.md b/6_micro-cluster/example-2-mono-repo/product/deployments/docker-compose/README.md new file mode 100644 index 0000000..e2cccb1 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/deployments/docker-compose/README.md @@ -0,0 +1,12 @@ + +copy the configuration file to the configs directory before starting the service. + +``` +├── configs +│ └── product.yml +└── docker-compose.yml +``` + +running service: + +> docker-compose up -d diff --git a/6_micro-cluster/example-2-mono-repo/product/deployments/docker-compose/docker-compose.yml b/6_micro-cluster/example-2-mono-repo/product/deployments/docker-compose/docker-compose.yml new file mode 100644 index 0000000..69d4175 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/deployments/docker-compose/docker-compose.yml @@ -0,0 +1,21 @@ +version: "3.7" + +services: + product: + image: eshop/product:latest + container_name: product + restart: always + command: ["./product", "-c", "/app/configs/product.yml"] + volumes: + - $PWD/configs:/app/configs + + ports: + - "8282:8282" # grpc port + - "8283:8283" # grpc metrics or pprof port + healthcheck: + test: ["CMD", "grpc_health_probe", "-addr=localhost:8282"] # grpc health check, note: the image must contain the grpc_health_probe command + + interval: 10s # interval time + timeout: 5s # timeout time + retries: 3 # number of retries + start_period: 10s # how long after start-up does the check begin diff --git a/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/README.md b/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/README.md new file mode 100644 index 0000000..0569984 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/README.md @@ -0,0 +1,32 @@ +Before deploying the service to k8s, create a Secret that pulls image permissions for k8s in a docker host that is already logged into the image repository, with the following command. + +```bash +kubectl create secret generic docker-auth-secret \ + --from-file=.dockerconfigjson=/root/.docker/config.json \ + --type=kubernetes.io/dockerconfigjson +``` + +
+ +run server: + +```bash +cd deployments + +kubectl apply -f ./*namespace.yml + +kubectl apply -f ./ +``` + +view the start-up status. + +> kubectl get all -n eshop + +
+ +simple test of http port + +```bash +# mapping to the http port of the service on the local port +kubectl port-forward --address=0.0.0.0 service/ 8080:8080 -n +``` diff --git a/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/eshop-namespace.yml b/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/eshop-namespace.yml new file mode 100644 index 0000000..eba474f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/eshop-namespace.yml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: eshop diff --git a/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-configmap.yml b/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-configmap.yml new file mode 100644 index 0000000..1a26193 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-configmap.yml @@ -0,0 +1,124 @@ +kind: ConfigMap +apiVersion: v1 +metadata: + name: product-config + namespace: eshop +data: + product.yml: |- + # Generate the go struct command: sponge config --server-dir=./serverDir + + # app settings + app: + name: "product" # server name + env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment + version: "v0.0.0" + host: "127.0.0.1" # domain or ip, for service registration + enableStat: true # whether to turn on printing statistics, true:enable, false:disable + enableMetrics: true # whether to turn on indicator collection, true:enable, false:disable + enableHTTPProfile: false # whether to turn on performance analysis, true:enable, false:disable + enableLimit: false # whether to turn on rate limiting (adaptive), true:on, false:off + enableCircuitBreaker: false # whether to turn on circuit breaker(adaptive), true:on, false:off + enableTrace: false # whether to turn on trace, true:enable, false:disable, if true jaeger configuration must be set + tracingSamplingRate: 1.0 # tracing sampling rate, between 0 and 1, 0 means no sampling, 1 means sampling all links + registryDiscoveryType: "" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used + cacheType: "" # cache type, if empty, the cache is not used, support for "memory" and "redis", if set to redis, must set redis configuration + + + # grpc server settings + grpc: + port: 8282 # listen port + httpPort: 8283 # profile and metrics ports + enableToken: false # whether to enable server-side token authentication, default appID=grpc, appKey=123456 + # serverSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'certFile' and 'keyFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + serverSecure: + type: "" # secures type, "", "one-way", "two-way" + caFile: "" # ca certificate file, valid only in "two-way", absolute path + certFile: "" # server side cert file, absolute path + keyFile: "" # server side key file, absolute path + + + # grpc client-side settings, support for setting up multiple grpc clients. + grpcClient: + - name: "your_grpc_service_name" # grpc service name, used for service discovery + host: "127.0.0.1" # grpc service address, used for direct connection + port: 8282 # grpc service port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, valid only for unary grpc type + registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port + enableLoadBalance: true # whether to turn on the load balancer + # clientSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'serverName' and 'certFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + clientSecure: + type: "" # secures type, "", "one-way", "two-way" + serverName: "" # server name, e.g. *.foo.com + caFile: "" # client side ca file, valid only in "two-way", absolute path + certFile: "" # client side cert file, absolute path, if secureType="one-way", fill in server side cert file here + keyFile: "" # client side key file, valid only in "two-way", absolute path + clientToken: + enable: false # whether to enable token authentication + appID: "" # app id + appKey: "" # app key + + + + # logger settings + logger: + level: "info" # output log levels debug, info, warn, error, default is debug + format: "console" # output format, console or json, default is console + isSave: false # false:output to terminal, true:output to file, default is false + #logFileConfig: # Effective when isSave=true + #filename: "out.log" # File name (default is out.log) + #maxSize: 20 # Maximum file size (MB, default is 10MB) + #maxBackups: 50 # Maximum number of old files to retain (default is 100) + #maxAge: 15 # Maximum number of days to retain old files (default is 30 days) + #isCompression: true # Whether to compress/archive old files (default is false) + + + # set database configuration. reference-db-config-url + database: + driver: "mysql" # database driver + # mysql settings + mysql: + # dsn format, :@(:)/?[k=v& ......] + dsn: "root:123456@(192.168.3.37:3306)/account?parseTime=true&loc=Local&charset=utf8,utf8mb4" + enableLog: true # whether to turn on printing of all logs + maxIdleConns: 10 # set the maximum number of connections in the idle connection pool + maxOpenConns: 100 # set the maximum number of open database connections + connMaxLifetime: 30 # sets the maximum time for which the connection can be reused, in minutes + + + + # redis settings + redis: + # dsn format, [user]:@127.0.0.1:6379/[db], the default user is default, redis version 6.0 and above only supports user. + dsn: "default:123456@192.168.3.37:6379/0" + dialTimeout: 10 # connection timeout, unit(second) + readTimeout: 2 # read timeout, unit(second) + writeTimeout: 2 # write timeout, unit(second) + + + # jaeger settings + jaeger: + agentHost: "192.168.3.37" + agentPort: 6831 + + + # consul settings + consul: + addr: "192.168.3.37:8500" + + + # etcd settings + etcd: + addrs: ["192.168.3.37:2379"] + + + # nacos settings, used in service registration discovery + nacosRd: + ipAddr: "192.168.3.37" + port: 8848 + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id diff --git a/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-deployment.yml b/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-deployment.yml new file mode 100644 index 0000000..731a15f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-deployment.yml @@ -0,0 +1,63 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: product-dm + namespace: eshop +spec: + replicas: 1 + selector: + matchLabels: + app: product + template: + metadata: + name: product-pod + labels: + app: product + spec: + containers: + - name: product + image: /eshop/product:latest + # If using a local image, use Never, default is Always + #imagePullPolicy: Never + command: ["./product", "-c", "/app/configs/product.yml"] + resources: + requests: + cpu: 10m + memory: 10Mi + limits: + cpu: 1000m + memory: 1000Mi + volumeMounts: + - name: product-vl + mountPath: /app/configs/ + readOnly: true + + ports: + - name: grpc-port + containerPort: 8282 + - name: metrics-port + containerPort: 8283 + readinessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:8282"] + initialDelaySeconds: 10 + timeoutSeconds: 2 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + livenessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:8282"] + + initialDelaySeconds: 10 + timeoutSeconds: 2 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + # todo for private repositories, you need to create a secret (here docker-auth-secret) to store the account and password to log into docker + imagePullSecrets: + - name: docker-auth-secret + volumes: + - name: product-vl + configMap: + name: product-config diff --git a/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-svc.yml b/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-svc.yml new file mode 100644 index 0000000..ab5b761 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/deployments/kubernetes/product-svc.yml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: Service +metadata: + name: product-svc + namespace: eshop +spec: + selector: + app: product + type: ClusterIP + ports: + - name: product-svc-grpc-port + port: 8282 + targetPort: 8282 + - name: product-svc-grpc-metrics-port + port: 8283 + targetPort: 8283 + diff --git a/6_micro-cluster/example-2-mono-repo/product/docs/gen.info b/6_micro-cluster/example-2-mono-repo/product/docs/gen.info new file mode 100644 index 0000000..2edeb4b --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/docs/gen.info @@ -0,0 +1 @@ +eshop,product,true \ No newline at end of file diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/config/product.go b/6_micro-cluster/example-2-mono-repo/product/internal/config/product.go new file mode 100644 index 0000000..2991648 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/config/product.go @@ -0,0 +1,173 @@ +// code generated by https://eshop + +package config + +import ( + "github.com/zhufuyi/sponge/pkg/conf" +) + +var config *Config + +func Init(configFile string, fs ...func()) error { + config = &Config{} + return conf.Parse(configFile, config, fs...) +} + +func Show(hiddenFields ...string) string { + return conf.Show(config, hiddenFields...) +} + +func Get() *Config { + if config == nil { + panic("config is nil, please call config.Init() first") + } + return config +} + +func Set(conf *Config) { + config = conf +} + +type Config struct { + App App `yaml:"app" json:"app"` + Consul Consul `yaml:"consul" json:"consul"` + Database Database `yaml:"database" json:"database"` + Etcd Etcd `yaml:"etcd" json:"etcd"` + Grpc Grpc `yaml:"grpc" json:"grpc"` + GrpcClient []GrpcClient `yaml:"grpcClient" json:"grpcClient"` + HTTP HTTP `yaml:"http" json:"http"` + Jaeger Jaeger `yaml:"jaeger" json:"jaeger"` + Logger Logger `yaml:"logger" json:"logger"` + NacosRd NacosRd `yaml:"nacosRd" json:"nacosRd"` + Redis Redis `yaml:"redis" json:"redis"` +} + +type Consul struct { + Addr string `yaml:"addr" json:"addr"` +} + +type Etcd struct { + Addrs []string `yaml:"addrs" json:"addrs"` +} + +type Jaeger struct { + AgentHost string `yaml:"agentHost" json:"agentHost"` + AgentPort int `yaml:"agentPort" json:"agentPort"` +} + +type ClientToken struct { + AppID string `yaml:"appID" json:"appID"` + AppKey string `yaml:"appKey" json:"appKey"` + Enable bool `yaml:"enable" json:"enable"` +} + +type ClientSecure struct { + CaFile string `yaml:"caFile" json:"caFile"` + CertFile string `yaml:"certFile" json:"certFile"` + KeyFile string `yaml:"keyFile" json:"keyFile"` + ServerName string `yaml:"serverName" json:"serverName"` + Type string `yaml:"type" json:"type"` +} + +type ServerSecure struct { + CaFile string `yaml:"caFile" json:"caFile"` + CertFile string `yaml:"certFile" json:"certFile"` + KeyFile string `yaml:"keyFile" json:"keyFile"` + Type string `yaml:"type" json:"type"` +} + +type App struct { + CacheType string `yaml:"cacheType" json:"cacheType"` + EnableCircuitBreaker bool `yaml:"enableCircuitBreaker" json:"enableCircuitBreaker"` + EnableHTTPProfile bool `yaml:"enableHTTPProfile" json:"enableHTTPProfile"` + EnableLimit bool `yaml:"enableLimit" json:"enableLimit"` + EnableMetrics bool `yaml:"enableMetrics" json:"enableMetrics"` + EnableStat bool `yaml:"enableStat" json:"enableStat"` + EnableTrace bool `yaml:"enableTrace" json:"enableTrace"` + Env string `yaml:"env" json:"env"` + Host string `yaml:"host" json:"host"` + Name string `yaml:"name" json:"name"` + RegistryDiscoveryType string `yaml:"registryDiscoveryType" json:"registryDiscoveryType"` + TracingSamplingRate float64 `yaml:"tracingSamplingRate" json:"tracingSamplingRate"` + Version string `yaml:"version" json:"version"` +} + +type GrpcClient struct { + ClientSecure ClientSecure `yaml:"clientSecure" json:"clientSecure"` + ClientToken ClientToken `yaml:"clientToken" json:"clientToken"` + EnableLoadBalance bool `yaml:"enableLoadBalance" json:"enableLoadBalance"` + Host string `yaml:"host" json:"host"` + Name string `yaml:"name" json:"name"` + Port int `yaml:"port" json:"port"` + RegistryDiscoveryType string `yaml:"registryDiscoveryType" json:"registryDiscoveryType"` + Timeout int `yaml:"timeout" json:"timeout"` +} + +type Sqlite struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + DBFile string `yaml:"dbFile" json:"dbFile"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` +} + +type Mysql struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + Dsn string `yaml:"dsn" json:"dsn"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MastersDsn []string `yaml:"mastersDsn" json:"mastersDsn"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` + SlavesDsn []string `yaml:"slavesDsn" json:"slavesDsn"` +} + +type Postgresql struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + Dsn string `yaml:"dsn" json:"dsn"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` +} + +type Redis struct { + DialTimeout int `yaml:"dialTimeout" json:"dialTimeout"` + Dsn string `yaml:"dsn" json:"dsn"` + ReadTimeout int `yaml:"readTimeout" json:"readTimeout"` + WriteTimeout int `yaml:"writeTimeout" json:"writeTimeout"` +} + +type Database struct { + Driver string `yaml:"driver" json:"driver"` + Mongodb Mongodb `yaml:"mongodb" json:"mongodb"` + Mysql Mysql `yaml:"mysql" json:"mysql"` + Postgresql Mysql `yaml:"postgresql" json:"postgresql"` + Sqlite Sqlite `yaml:"sqlite" json:"sqlite"` +} + +type Mongodb struct { + Dsn string `yaml:"dsn" json:"dsn"` +} + +type Grpc struct { + EnableToken bool `yaml:"enableToken" json:"enableToken"` + HTTPPort int `yaml:"httpPort" json:"httpPort"` + Port int `yaml:"port" json:"port"` + ServerSecure ServerSecure `yaml:"serverSecure" json:"serverSecure"` +} + +type Logger struct { + Format string `yaml:"format" json:"format"` + IsSave bool `yaml:"isSave" json:"isSave"` + Level string `yaml:"level" json:"level"` +} + +type NacosRd struct { + IPAddr string `yaml:"ipAddr" json:"ipAddr"` + NamespaceID string `yaml:"namespaceID" json:"namespaceID"` + Port int `yaml:"port" json:"port"` +} + +type HTTP struct { + Port int `yaml:"port" json:"port"` + Timeout int `yaml:"timeout" json:"timeout"` +} diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/config/product_cc.go b/6_micro-cluster/example-2-mono-repo/product/internal/config/product_cc.go new file mode 100644 index 0000000..326e1bc --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/config/product_cc.go @@ -0,0 +1,28 @@ +// code generated by https://eshop + +package config + +import ( + "github.com/zhufuyi/sponge/pkg/conf" +) + +func NewCenter(configFile string) (*Center, error) { + nacosConf := &Center{} + err := conf.Parse(configFile, nacosConf) + return nacosConf, err +} + +type Center struct { + Nacos Nacos `yaml:"nacos" json:"nacos"` +} + +type Nacos struct { + ContextPath string `yaml:"contextPath" json:"contextPath"` + DataID string `yaml:"dataID" json:"dataID"` + Format string `yaml:"format" json:"format"` + Group string `yaml:"group" json:"group"` + IPAddr string `yaml:"ipAddr" json:"ipAddr"` + NamespaceID string `yaml:"namespaceID" json:"namespaceID"` + Port int `yaml:"port" json:"port"` + Scheme string `yaml:"scheme" json:"scheme"` +} diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/config/product_test.go b/6_micro-cluster/example-2-mono-repo/product/internal/config/product_test.go new file mode 100644 index 0000000..91135fe --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/config/product_test.go @@ -0,0 +1,45 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/zhufuyi/sponge/pkg/gofile" + + "eshop/product/configs" +) + +func TestInit(t *testing.T) { + configFile := configs.Path("product.yml") + err := Init(configFile) + if gofile.IsExists(configFile) { + assert.NoError(t, err) + } else { + assert.Error(t, err) + } + + c := Get() + assert.NotNil(t, c) + + str := Show() + assert.NotEmpty(t, str) + t.Log(str) + + // set nil + Set(nil) + defer func() { + recover() + }() + Get() +} + +func TestInitNacos(t *testing.T) { + configFile := configs.Path("product_cc.yml") + _, err := NewCenter(configFile) + if gofile.IsExists(configFile) { + assert.NoError(t, err) + } else { + assert.Error(t, err) + } +} diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/ecode/product_rpc.go b/6_micro-cluster/example-2-mono-repo/product/internal/ecode/product_rpc.go new file mode 100644 index 0000000..2ff868d --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/ecode/product_rpc.go @@ -0,0 +1,19 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// product business-level rpc error codes. +// the _productNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + _productNO = 79 + _productName = "product" + _productBaseCode = errcode.RCode(_productNO) + + StatusGetByIDProduct = errcode.NewRPCStatus(_productBaseCode+1, "failed to GetByID "+_productName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/ecode/systemCode_rpc.go b/6_micro-cluster/example-2-mono-repo/product/internal/ecode/systemCode_rpc.go new file mode 100644 index 0000000..8a88afd --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/ecode/systemCode_rpc.go @@ -0,0 +1,46 @@ +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// rpc system level error code, with status prefix, error code range 30000~40000 +var ( + StatusSuccess = errcode.StatusSuccess + + StatusCanceled = errcode.StatusCanceled + StatusUnknown = errcode.StatusUnknown + StatusInvalidParams = errcode.StatusInvalidParams + StatusDeadlineExceeded = errcode.StatusDeadlineExceeded + StatusNotFound = errcode.StatusNotFound + StatusAlreadyExists = errcode.StatusAlreadyExists + StatusPermissionDenied = errcode.StatusPermissionDenied + StatusResourceExhausted = errcode.StatusResourceExhausted + StatusFailedPrecondition = errcode.StatusFailedPrecondition + StatusAborted = errcode.StatusAborted + StatusOutOfRange = errcode.StatusOutOfRange + StatusUnimplemented = errcode.StatusUnimplemented + StatusInternalServerError = errcode.StatusInternalServerError + StatusServiceUnavailable = errcode.StatusServiceUnavailable + StatusDataLoss = errcode.StatusDataLoss + StatusUnauthorized = errcode.StatusUnauthorized + + StatusTimeout = errcode.StatusTimeout + StatusTooManyRequests = errcode.StatusTooManyRequests + StatusForbidden = errcode.StatusForbidden + StatusLimitExceed = errcode.StatusLimitExceed + StatusMethodNotAllowed = errcode.StatusMethodNotAllowed + StatusAccessDenied = errcode.StatusAccessDenied + StatusConflict = errcode.StatusConflict +) + +// Any kev-value +func Any(key string, val interface{}) errcode.Detail { + return errcode.Any(key, val) +} + +// StatusSkipResponse is only use for grpc-gateway +var StatusSkipResponse = errcode.SkipResponse + +// GetStatusCode get status code from error returned by RPC invoke +var GetStatusCode = errcode.GetStatusCode diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/server/grpc.go b/6_micro-cluster/example-2-mono-repo/product/internal/server/grpc.go new file mode 100644 index 0000000..912082f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/server/grpc.go @@ -0,0 +1,334 @@ +// Package server is a package that holds the http or grpc service. +package server + +import ( + "context" + "fmt" + "net" + "net/http" + "time" + + grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/grpc/gtls" + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/grpc/metrics" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/prof" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + + "eshop/product/internal/config" + "eshop/product/internal/ecode" + "eshop/product/internal/service" +) + +var _ app.IServer = (*grpcServer)(nil) + +var ( + defaultTokenAppID = "grpc" + defaultTokenAppKey = "mko09ijn" +) + +type grpcServer struct { + addr string + server *grpc.Server + listen net.Listener + + mux *http.ServeMux + httpServer *http.Server + registerMetricsMuxAndMethodFunc func() error + + iRegistry registry.Registry + instance *registry.ServiceInstance +} + +// Start grpc service +func (s *grpcServer) Start() error { + // registration Services + if s.iRegistry != nil { + ctx, _ := context.WithTimeout(context.Background(), 5*time.Second) //nolint + if err := s.iRegistry.Register(ctx, s.instance); err != nil { + return err + } + } + + if s.registerMetricsMuxAndMethodFunc != nil { + if err := s.registerMetricsMuxAndMethodFunc(); err != nil { + return err + } + } + + // if either pprof or metrics is enabled, the http service will be started + if s.mux != nil { + addr := fmt.Sprintf(":%d", config.Get().Grpc.HTTPPort) + s.httpServer = &http.Server{ + Addr: addr, + Handler: s.mux, + } + go func() { + fmt.Printf("http address of pprof and metrics %s\n", addr) + if err := s.httpServer.ListenAndServe(); err != nil && err != http.ErrServerClosed { + panic("listen and serve error: " + err.Error()) + } + }() + } + + if err := s.server.Serve(s.listen); err != nil { // block + return err + } + + return nil +} + +// Stop grpc service +func (s *grpcServer) Stop() error { + if s.iRegistry != nil { + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) + go func() { + _ = s.iRegistry.Deregister(ctx, s.instance) + cancel() + }() + <-ctx.Done() + } + + s.server.GracefulStop() + + if s.httpServer != nil { + ctx, _ := context.WithTimeout(context.Background(), 3*time.Second) //nolint + if err := s.httpServer.Shutdown(ctx); err != nil { + return err + } + } + + return nil +} + +// String comment +func (s *grpcServer) String() string { + return "grpc service address " + s.addr +} + +// secure option +func (s *grpcServer) secureServerOption() grpc.ServerOption { + switch config.Get().Grpc.ServerSecure.Type { + case "one-way": // server side certification + credentials, err := gtls.GetServerTLSCredentials( + config.Get().Grpc.ServerSecure.CertFile, + config.Get().Grpc.ServerSecure.KeyFile, + ) + if err != nil { + panic(err) + } + logger.Info("grpc security type: sever-side certification") + return grpc.Creds(credentials) + + case "two-way": // both client and server side certification + credentials, err := gtls.GetServerTLSCredentialsByCA( + config.Get().Grpc.ServerSecure.CaFile, + config.Get().Grpc.ServerSecure.CertFile, + config.Get().Grpc.ServerSecure.KeyFile, + ) + if err != nil { + panic(err) + } + logger.Info("grpc security type: both client-side and server-side certification") + return grpc.Creds(credentials) + } + + logger.Info("grpc security type: insecure") + return nil +} + +// setting up unary server interceptors +func (s *grpcServer) unaryServerOptions() grpc.ServerOption { + unaryServerInterceptors := []grpc.UnaryServerInterceptor{ + interceptor.UnaryServerRecovery(), + interceptor.UnaryServerRequestID(), + } + + // logger interceptor, to print simple messages, replace interceptor.UnaryServerLog with interceptor.UnaryServerSimpleLog + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerLog( + logger.Get(), + interceptor.WithReplaceGRPCLogger(), + )) + + // token interceptor + if config.Get().Grpc.EnableToken { + checkToken := func(appID string, appKey string) error { + // todo the defaultTokenAppID and defaultTokenAppKey are usually retrieved from the cache or database + if appID != defaultTokenAppID || appKey != defaultTokenAppKey { + return status.Errorf(codes.Unauthenticated, "app id or app key checksum failure") + } + return nil + } + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerToken(checkToken)) + } + + // jwt token interceptor + //unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerJwtAuth( + // // choose a verification method as needed + //interceptor.WithStandardVerify(standardVerifyFn), // standard verify (default), you can set standardVerifyFn to nil if you don't need it + //interceptor.WithCustomVerify(customVerifyFn), // custom verify + // // specify the grpc API to ignore token verification(full path) + //interceptor.WithAuthIgnoreMethods("/api.user.v1.User/Register", "/api.user.v1.User/Login"), + //)) + + // metrics interceptor + if config.Get().App.EnableMetrics { + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerMetrics()) + s.registerMetricsMuxAndMethodFunc = s.registerMetricsMuxAndMethod() + } + + // limit interceptor + if config.Get().App.EnableLimit { + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerRateLimit()) + } + + // circuit breaker interceptor + if config.Get().App.EnableCircuitBreaker { + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerCircuitBreaker( + // set rpc code for circuit breaker, default already includes codes.Internal and codes.Unavailable + interceptor.WithValidCode(ecode.StatusInternalServerError.Code()), + interceptor.WithValidCode(ecode.StatusServiceUnavailable.Code()), + )) + } + + // trace interceptor + if config.Get().App.EnableTrace { + unaryServerInterceptors = append(unaryServerInterceptors, interceptor.UnaryServerTracing()) + } + + return grpc_middleware.WithUnaryServerChain(unaryServerInterceptors...) +} + +// setting up stream server interceptors +func (s *grpcServer) streamServerOptions() grpc.ServerOption { + streamServerInterceptors := []grpc.StreamServerInterceptor{ + interceptor.StreamServerRecovery(), + //interceptor.StreamServerRequestID(), + } + + // logger interceptor, to print simple messages, replace interceptor.StreamServerLog with interceptor.StreamServerSimpleLog + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerLog( + logger.Get(), + interceptor.WithReplaceGRPCLogger(), + )) + + // token interceptor + if config.Get().Grpc.EnableToken { + checkToken := func(appID string, appKey string) error { + // todo the defaultTokenAppID and defaultTokenAppKey are usually retrieved from the cache or database + if appID != defaultTokenAppID || appKey != defaultTokenAppKey { + return status.Errorf(codes.Unauthenticated, "app id or app key checksum failure") + } + return nil + } + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerToken(checkToken)) + } + + // jwt token interceptor + //streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerJwtAuth( + // // choose a verification method as needed + //interceptor.WithStandardVerify(standardVerifyFn), // standard verify (default), you can set standardVerifyFn to nil if you don't need it + //interceptor.WithCustomVerify(customVerifyFn), // custom verify + // // specify the grpc API to ignore token verification(full path) + // interceptor.WithAuthIgnoreMethods("/api.user.v1.User/Register", "/api.user.v1.User/Login"), + //)) + + // metrics interceptor + if config.Get().App.EnableMetrics { + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerMetrics()) + } + + // limit interceptor + if config.Get().App.EnableLimit { + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerRateLimit()) + } + + // circuit breaker interceptor + if config.Get().App.EnableCircuitBreaker { + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerCircuitBreaker( + // set rpc code for circuit breaker, default already includes codes.Internal and codes.Unavailable + interceptor.WithValidCode(ecode.StatusInternalServerError.Code()), + interceptor.WithValidCode(ecode.StatusServiceUnavailable.Code()), + )) + } + + // trace interceptor + if config.Get().App.EnableTrace { + streamServerInterceptors = append(streamServerInterceptors, interceptor.StreamServerTracing()) + } + + return grpc_middleware.WithStreamServerChain(streamServerInterceptors...) +} + +func (s *grpcServer) getOptions() []grpc.ServerOption { + var options []grpc.ServerOption + + secureOption := s.secureServerOption() + if secureOption != nil { + options = append(options, secureOption) + } + + options = append(options, s.unaryServerOptions()) + options = append(options, s.streamServerOptions()) + + return options +} + +func (s *grpcServer) registerMetricsMuxAndMethod() func() error { + return func() error { + if s.mux == nil { + s.mux = http.NewServeMux() + } + metrics.Register(s.mux, s.server) + return nil + } +} + +func (s *grpcServer) registerProfMux() { + if s.mux == nil { + s.mux = http.NewServeMux() + } + prof.Register(s.mux, prof.WithIOWaitTime()) +} + +func (s *grpcServer) addHTTPRouter() { + if s.mux == nil { + s.mux = http.NewServeMux() + } + s.mux.HandleFunc("/codes", errcode.ListGRPCErrCodes) // error codes router + + cfgStr := config.Show() + s.mux.HandleFunc("/config", errcode.ShowConfig([]byte(cfgStr))) // config router +} + +// NewGRPCServer creates a new grpc server +func NewGRPCServer(addr string, opts ...GrpcOption) app.IServer { + var err error + o := defaultGrpcOptions() + o.apply(opts...) + s := &grpcServer{ + addr: addr, + iRegistry: o.iRegistry, + instance: o.instance, + } + s.addHTTPRouter() + if config.Get().App.EnableHTTPProfile { + s.registerProfMux() + } + + s.listen, err = net.Listen("tcp", addr) + if err != nil { + panic(err) + } + + s.server = grpc.NewServer(s.getOptions()...) + service.RegisterAllService(s.server) // register for all services + return s +} diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/server/grpc_option.go b/6_micro-cluster/example-2-mono-repo/product/internal/server/grpc_option.go new file mode 100644 index 0000000..02c37d7 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/server/grpc_option.go @@ -0,0 +1,34 @@ +package server + +import ( + "github.com/zhufuyi/sponge/pkg/servicerd/registry" +) + +// GrpcOption grpc settings +type GrpcOption func(*grpcOptions) + +type grpcOptions struct { + instance *registry.ServiceInstance + iRegistry registry.Registry +} + +func defaultGrpcOptions() *grpcOptions { + return &grpcOptions{ + instance: nil, + iRegistry: nil, + } +} + +func (o *grpcOptions) apply(opts ...GrpcOption) { + for _, opt := range opts { + opt(o) + } +} + +// WithGrpcRegistry registration services +func WithGrpcRegistry(iRegistry registry.Registry, instance *registry.ServiceInstance) GrpcOption { + return func(o *grpcOptions) { + o.iRegistry = iRegistry + o.instance = instance + } +} diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/server/grpc_test.go b/6_micro-cluster/example-2-mono-repo/product/internal/server/grpc_test.go new file mode 100644 index 0000000..8b15552 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/server/grpc_test.go @@ -0,0 +1,130 @@ +package server + +import ( + "context" + "fmt" + "net" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/grpc/gtls/certfile" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + "github.com/zhufuyi/sponge/pkg/utils" + + "eshop/product/configs" + "eshop/product/internal/config" +) + +func TestGRPCServer(t *testing.T) { + err := config.Init(configs.Path("product.yml")) + if err != nil { + t.Fatal(err) + } + + config.Get().App.EnableMetrics = true + config.Get().App.EnableTrace = true + config.Get().App.EnableHTTPProfile = true + config.Get().App.EnableLimit = true + config.Get().App.EnableCircuitBreaker = true + config.Get().Grpc.EnableToken = true + + port, _ := utils.GetAvailablePort() + addr := fmt.Sprintf(":%d", port) + instance := registry.NewServiceInstance("foo", "bar", []string{"grpc://127.0.0.1:8282"}) + + utils.SafeRunWithTimeout(time.Second*2, func(cancel context.CancelFunc) { + server := NewGRPCServer(addr, + WithGrpcRegistry(nil, instance), + ) + assert.NotNil(t, server) + cancel() + }) +} + +func TestGRPCServerMock(t *testing.T) { + err := config.Init(configs.Path("product.yml")) + if err != nil { + t.Fatal(err) + } + config.Get().App.EnableMetrics = true + config.Get().App.EnableTrace = true + config.Get().App.EnableHTTPProfile = true + config.Get().App.EnableLimit = true + config.Get().App.EnableCircuitBreaker = true + config.Get().Grpc.EnableToken = true + + port, _ := utils.GetAvailablePort() + addr := fmt.Sprintf(":%d", port) + instance := registry.NewServiceInstance("foo", "bar", []string{"grpc://127.0.0.1:8282"}) + + o := defaultGrpcOptions() + o.apply(WithGrpcRegistry(&gRegistry{}, instance)) + + s := &grpcServer{ + addr: addr, + iRegistry: o.iRegistry, + instance: o.instance, + } + + s.listen, err = net.Listen("tcp", addr) + if err != nil { + t.Fatal(err) + } + s.server = grpc.NewServer(s.unaryServerOptions(), s.streamServerOptions()) + + go func() { + time.Sleep(time.Second * 3) + s.server.Stop() + }() + + str := s.String() + assert.NotEmpty(t, str) + err = s.Start() + assert.NoError(t, err) + err = s.Stop() + assert.NoError(t, err) +} + +type gRegistry struct{} + +func (g gRegistry) Register(ctx context.Context, service *registry.ServiceInstance) error { + return nil +} + +func (g gRegistry) Deregister(ctx context.Context, service *registry.ServiceInstance) error { + return nil +} + +func Test_grpcServer_getOptions(t *testing.T) { + err := config.Init(configs.Path("product.yml")) + if err != nil { + t.Fatal(err) + } + s := &grpcServer{} + + defer func() { + recover() + }() + + config.Get().Grpc.ServerSecure.Type = "" + opt := s.secureServerOption() + assert.Equal(t, nil, opt) + + config.Get().Grpc.ServerSecure.Type = "one-way" + config.Get().Grpc.ServerSecure.CertFile = certfile.Path("one-way/server.crt") + config.Get().Grpc.ServerSecure.KeyFile = certfile.Path("one-way/server.key") + opt = s.secureServerOption() + assert.NotNil(t, opt) + + config.Get().Grpc.ServerSecure.Type = "two-way" + config.Get().Grpc.ServerSecure.CaFile = certfile.Path("two-way/ca.pem") + config.Get().Grpc.ServerSecure.CertFile = certfile.Path("two-way/server/server.pem") + config.Get().Grpc.ServerSecure.KeyFile = certfile.Path("two-way/server/server.key") + opt = s.secureServerOption() + assert.NotNil(t, opt) + + fmt.Println(certfile.Path("one-way/server.crt"), certfile.Path("one-way/server.key")) +} diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/service/product.go b/6_micro-cluster/example-2-mono-repo/product/internal/service/product.go new file mode 100644 index 0000000..c1bff94 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/service/product.go @@ -0,0 +1,62 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package service + +import ( + "context" + + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/logger" + + productV1 "eshop/api/product/v1" + "eshop/product/internal/ecode" +) + +func init() { + registerFns = append(registerFns, func(server *grpc.Server) { + productV1.RegisterProductServer(server, NewProductServer()) + }) +} + +var _ productV1.ProductServer = (*product)(nil) + +type product struct { + productV1.UnimplementedProductServer + + // example: + // iDao dao.ProductDao +} + +// NewProductServer create a server +func NewProductServer() productV1.ProductServer { + return &product{ + // example: + // iDao: dao.NewProductDao( + // model.GetDB(), + // cache.NewProductCache(model.GetCacheType()), + // ), + } +} + +// GetByID get product by id +func (s *product) GetByID(ctx context.Context, req *productV1.GetByIDRequest) (*productV1.GetByIDReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + + // fill in the business logic code here + + return &productV1.GetByIDReply{ + ProductDetail: &productV1.ProductDetail{ + Id: 1, + Name: "Data cable", + Price: 10, + Description: "Android type C data cable", + }, + InventoryID: 1, + }, nil +} diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/service/product_client_test.go b/6_micro-cluster/example-2-mono-repo/product/internal/service/product_client_test.go new file mode 100644 index 0000000..16d6c5d --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/service/product_client_test.go @@ -0,0 +1,112 @@ +// Code generated by https://github.com/zhufuyi/sponge +// Test_service_product_methods is used to test the product api +// Test_service_product_benchmark is used to performance test the product api + +package service + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/zhufuyi/sponge/pkg/grpc/benchmark" + + productV1 "eshop/api/product/v1" + "eshop/product/configs" + "eshop/product/internal/config" +) + +// Test service product api via grpc client +func Test_service_product_methods(t *testing.T) { + conn := getRPCClientConnForTest() + cli := productV1.NewProductClient(conn) + ctx, _ := context.WithTimeout(context.Background(), time.Second*30) + + tests := []struct { + name string + fn func() (interface{}, error) + wantErr bool + }{ + + { + name: "GetByID", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &productV1.GetByIDRequest{ + Id: 0, + } + + return cli.GetByID(ctx, req) + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + data, _ := json.MarshalIndent(got, "", " ") + fmt.Println(string(data)) + }) + } +} + +// performance test service product api, copy the report to +// the browser to view when the pressure test is finished. +func Test_service_product_benchmark(t *testing.T) { + err := config.Init(configs.Path("product.yml")) + if err != nil { + panic(err) + } + + grpcClientCfg := getGRPCClientCfg() + host := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + protoFile := configs.Path("../api/product/v1/product.proto") + // If third-party dependencies are missing during the press test, + // copy them to the project's third_party directory. + dependentProtoFilePath := []string{ + configs.Path("../third_party"), // third_party directory + configs.Path(".."), // Previous level of third_party + } + + tests := []struct { + name string + fn func() error + wantErr bool + }{ + + { + name: "GetByID", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &productV1.GetByIDRequest{ + Id: 0, + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "GetByID", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + }) + } +} diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/service/service.go b/6_micro-cluster/example-2-mono-repo/product/internal/service/service.go new file mode 100644 index 0000000..83c3ce5 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/service/service.go @@ -0,0 +1,22 @@ +// Package service A grpc server-side or client-side package that handles business logic. +package service + +import ( + "google.golang.org/grpc" + "google.golang.org/grpc/health" + healthPB "google.golang.org/grpc/health/grpc_health_v1" +) + +var ( + // registerFns collection of registration methods + registerFns []func(server *grpc.Server) +) + +// RegisterAllService register all services to the service +func RegisterAllService(server *grpc.Server) { + healthPB.RegisterHealthServer(server, health.NewServer()) // Register for Health Screening + + for _, fn := range registerFns { + fn(server) + } +} diff --git a/6_micro-cluster/example-2-mono-repo/product/internal/service/service_test.go b/6_micro-cluster/example-2-mono-repo/product/internal/service/service_test.go new file mode 100644 index 0000000..06fe1bf --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/internal/service/service_test.go @@ -0,0 +1,173 @@ +package service + +import ( + "context" + "io" + "strconv" + "testing" + "time" + + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/consulcli" + "github.com/zhufuyi/sponge/pkg/etcdcli" + "github.com/zhufuyi/sponge/pkg/grpc/grpccli" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/nacoscli" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/consul" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" + "github.com/zhufuyi/sponge/pkg/utils" + + "eshop/product/configs" + "eshop/product/internal/config" +) + +var ioEOF = io.EOF + +func TestRegisterAllService(t *testing.T) { + utils.SafeRunWithTimeout(time.Second*2, func(cancel context.CancelFunc) { + server := grpc.NewServer() + RegisterAllService(server) + cancel() + }) +} + +// The default is to connect to the local grpc server, if you want to connect to a remote grpc server, +// pass in the parameter grpcClient. +func getRPCClientConnForTest(grpcClient ...config.GrpcClient) *grpc.ClientConn { + err := config.Init(configs.Path("product.yml")) + if err != nil { + panic(err) + } + grpcClientCfg := getGRPCClientCfg(grpcClient...) + + var cliOptions []grpccli.Option + + if grpcClientCfg.Timeout > 0 { + cliOptions = append(cliOptions, grpccli.WithTimeout(time.Second*time.Duration(grpcClientCfg.Timeout))) + } + + // load balance + if grpcClientCfg.EnableLoadBalance { + cliOptions = append(cliOptions, grpccli.WithEnableLoadBalance()) + } + + // secure + cliOptions = append(cliOptions, grpccli.WithSecure( + grpcClientCfg.ClientSecure.Type, + grpcClientCfg.ClientSecure.ServerName, + grpcClientCfg.ClientSecure.CaFile, + grpcClientCfg.ClientSecure.CertFile, + grpcClientCfg.ClientSecure.KeyFile, + )) + + // token + cliOptions = append(cliOptions, grpccli.WithToken( + grpcClientCfg.ClientToken.Enable, + grpcClientCfg.ClientToken.AppID, + grpcClientCfg.ClientToken.AppKey, + )) + + cliOptions = append(cliOptions, + grpccli.WithEnableRequestID(), + grpccli.WithEnableLog(logger.Get()), + ) + + var ( + endpoint string + isUseDiscover bool + iDiscovery registry.Discovery + ) + + switch grpcClientCfg.RegistryDiscoveryType { + case "consul": + endpoint = "discovery:///" + grpcClientCfg.Name // Connecting to grpc services by service name + cli, err := consulcli.Init(config.Get().Consul.Addr, consulcli.WithWaitTime(time.Second*2)) + if err != nil { + panic(err) + } + iDiscovery = consul.New(cli) + isUseDiscover = true + + case "etcd": + endpoint = "discovery:///" + grpcClientCfg.Name // Connecting to grpc services by service name + cli, err := etcdcli.Init(config.Get().Etcd.Addrs, etcdcli.WithDialTimeout(time.Second*2)) + if err != nil { + panic(err) + } + iDiscovery = etcd.New(cli) + isUseDiscover = true + case "nacos": + // example: endpoint = "discovery:///serverName.scheme" + endpoint = "discovery:///" + grpcClientCfg.Name + ".grpc" + cli, err := nacoscli.NewNamingClient( + config.Get().NacosRd.IPAddr, + config.Get().NacosRd.Port, + config.Get().NacosRd.NamespaceID) + if err != nil { + panic(err) + } + iDiscovery = nacos.New(cli) + isUseDiscover = true + + default: + endpoint = grpcClientCfg.Host + ":" + strconv.Itoa(grpcClientCfg.Port) + iDiscovery = nil + isUseDiscover = false + } + + if iDiscovery != nil { + cliOptions = append(cliOptions, grpccli.WithDiscovery(iDiscovery)) + } + + msg := "dialing grpc server" + if isUseDiscover { + msg += " with discovery from " + grpcClientCfg.RegistryDiscoveryType + } + logger.Info(msg, logger.String("name", grpcClientCfg.Name), logger.String("endpoint", endpoint)) + + conn, err := grpccli.Dial(context.Background(), endpoint, cliOptions...) + if err != nil { + panic(err) + } + + return conn +} + +func getGRPCClientCfg(grpcClient ...config.GrpcClient) config.GrpcClient { + var grpcClientCfg config.GrpcClient + + // custom config + if len(grpcClient) > 0 { + // parameter config, highest priority + grpcClientCfg = grpcClient[0] + } else { + // grpcClient config in the yaml file, second priority + if len(config.Get().GrpcClient) > 0 { + for _, v := range config.Get().GrpcClient { + if v.Name == config.Get().App.Name { // match the current app name + grpcClientCfg = v + break + } + } + } + } + + // if there is no custom configuration, use the default configuration + if grpcClientCfg.Name == "" { + grpcClientCfg = config.GrpcClient{ + Host: config.Get().App.Host, + Port: config.Get().Grpc.Port, + // If RegistryDiscoveryType is not empty, service discovery is used, and Host and Port values are invalid + RegistryDiscoveryType: config.Get().App.RegistryDiscoveryType, // supports consul, etcd and nacos + Name: config.Get().App.Name, + } + if grpcClientCfg.RegistryDiscoveryType != "" { + grpcClientCfg.EnableLoadBalance = true + } + } + + return grpcClientCfg +} diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/binary-package.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/binary-package.sh new file mode 100644 index 0000000..80b757a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/binary-package.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +serviceName="product" + +mkdir -p ${serviceName}-binary/configs + +cp -f deployments/binary/run.sh ${serviceName}-binary +chmod +x ${serviceName}-binary/run.sh + +cp -f deployments/binary/deploy.sh ${serviceName}-binary +chmod +x ${serviceName}-binary/deploy.sh + +cp -f cmd/${serviceName}/${serviceName} ${serviceName}-binary +cp -f configs/${serviceName}.yml ${serviceName}-binary/configs +cp -f configs/${serviceName}_cc.yml ${serviceName}-binary/configs + +# compressing binary file +#upx -9 ${serviceName} + +tar zcvf ${serviceName}-binary.tar.gz ${serviceName}-binary +rm -rf ${serviceName}-binary + +echo "" +echo "package binary successfully, output file = ${serviceName}-binary.tar.gz" diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile b/6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile new file mode 100644 index 0000000..745b292 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile @@ -0,0 +1,26 @@ +FROM alpine:latest +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# set the time zone to Shanghai +RUN apk add tzdata \ + && cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && echo "Asia/Shanghai" > /etc/timezone \ + && apk del tzdata + +# add grpc_health_probe for health check of grpc services +COPY grpc_health_probe /bin/grpc_health_probe +RUN chmod +x /bin/grpc_health_probe + +COPY configs/ /app/configs/ +COPY product /app/product +RUN chmod +x /app/product + +# grpc and http port +EXPOSE 8282 8283 + + +WORKDIR /app + +CMD ["./product", "-c", "configs/product.yml"] +# if you use the Configuration Center, product.yml is changed to the Configuration Center configuration. +#CMD ["./product", "-c", "configs/product.yml", "-enable-cc"] diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile_build b/6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile_build new file mode 100644 index 0000000..678c75f --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile_build @@ -0,0 +1,47 @@ +# Need to package the code first `tar zcf product.tar.gz $(ls)` and move it to the same directory as Dokerfile + +# Compile the go code, you can specify the golang version +FROM golang:1.21-alpine as build +COPY . /go/src/product +WORKDIR /go/src/product +RUN tar zxf product.tar.gz +RUN go env -w GOPROXY=https://goproxy.cn,direct +RUN go mod download +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o /product cmd/product/main.go + +# install grpc-health-probe, for health check of grpc service +RUN go install github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 +RUN cd $GOPATH/pkg/mod/github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 \ + && go mod download \ + && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "all=-s -w" -o /grpc_health_probe + +# compressing binary files +#cd / +#upx -9 product +#upx -9 grpc_health_probe + + +# building images with binary +FROM alpine:latest +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# set the time zone to Shanghai +RUN apk add tzdata \ + && cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && echo "Asia/Shanghai" > /etc/timezone \ + && apk del tzdata + +# add grpc_health_probe for health check of grpc services +COPY --from=build /grpc_health_probe /bin/grpc_health_probe +COPY --from=build /product /app/product +COPY --from=build /go/src/product/configs/product.yml /app/configs/product.yml + +# grpc and http port +EXPOSE 8282 8283 + + +WORKDIR /app + +CMD ["./product", "-c", "configs/product.yml"] +# if you use the Configuration Center, product.yml is changed to the Configuration Center configuration. +#CMD ["./product", "-c", "configs/product.yml", "-enable-cc"] diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile_test b/6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile_test new file mode 100644 index 0000000..03c9cb8 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/build/Dockerfile_test @@ -0,0 +1,16 @@ +# Need to package the code first `tar zcf product.tar.gz $(ls)` and move it to the same directory as Dokerfile +# rpc server source code, used to test rpc methods +FROM golang:1.21-alpine +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# go test dependency packages +RUN apk add bash alpine-sdk build-base gcc + +COPY . /go/src/product +WORKDIR /go/src/product +RUN tar zxf product.tar.gz +RUN go env -w GOPROXY=https://goproxy.cn,direct +RUN go mod download +RUN rm -f product.tar.gz + +CMD ["sleep","86400"] diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/build/README.md b/6_micro-cluster/example-2-mono-repo/product/scripts/build/README.md new file mode 100644 index 0000000..ba0f3e8 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/build/README.md @@ -0,0 +1,4 @@ + +- `Dockerfile`: build the image by directly copying the compiled binaries, fast build speed. +- `Dockerfile_build`: two-stage build of the image, slower build speed, you can specify the golang version. +- `Dockerfile_test`: container for testing rpc services. diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/deploy-binary.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/deploy-binary.sh new file mode 100644 index 0000000..16cf9bb --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/deploy-binary.sh @@ -0,0 +1,35 @@ +#!/usr/bin/expect + +set serviceName "product" + +# parameters +set username [lindex $argv 0] +set password [lindex $argv 1] +set hostname [lindex $argv 2] + +set timeout 30 + +spawn scp -r ./${serviceName}-binary.tar.gz ${username}@${hostname}:/tmp/ +#expect "*yes/no*" +#send "yes\r" +expect "*password:*" +send "${password}\r" +expect eof + +spawn ssh ${username}@${hostname} +#expect "*yes/no*" +#send "yes\r" +expect "*password:*" +send "${password}\r" + +# execute a command or script +expect "*${username}@*" +send "cd /tmp && tar zxvf ${serviceName}-binary.tar.gz\r" +expect "*${username}@*" +send "bash /tmp/${serviceName}-binary/deploy.sh\r" + +# logging out of a session +expect "*${username}@*" +send "exit\r" + +expect eof diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/deploy-docker.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/deploy-docker.sh new file mode 100644 index 0000000..81eaa5a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/deploy-docker.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +dockerComposeFilePath="deployments/docker-compose" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +mkdir -p ${dockerComposeFilePath}/configs +if [ ! -f "${dockerComposeFilePath}/configs/product.yml" ];then + cp configs/product.yml ${dockerComposeFilePath}/configs +fi + +# shellcheck disable=SC2164 +cd ${dockerComposeFilePath} + +docker-compose down +checkResult $? + +docker-compose up -d +checkResult $? + +colorCyan='\033[1;36m' +highBright='\033[1m' +markEnd='\033[0m' + +echo "" +echo -e "run service successfully, if you want to stop the service, go into the ${highBright}${dockerComposeFilePath}${markEnd} directory and execute the command ${colorCyan}docker-compose down${markEnd}." +echo "" diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/deploy-k8s.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/deploy-k8s.sh new file mode 100644 index 0000000..3704efa --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/deploy-k8s.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +SERVER_NAME="product" +DEPLOY_FILE="deployments/kubernetes/${SERVER_NAME}-deployment.yml" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# Determining whether a file exists +if [ ! -f "${DEPLOY_FILE}" ];then + echo "Deployment file file ${DEPLOY_FILE} does not exist" + checkResult 1 +fi + +# Check if you are authorised to operate k8s +echo "kubectl version" +kubectl version +checkResult $? + +echo "kubectl delete -f ${DEPLOY_FILE} --ignore-not-found" +kubectl delete -f ${DEPLOY_FILE} --ignore-not-found +checkResult $? + +sleep 1 + +echo "kubectl apply -f ${DEPLOY_FILE}" +kubectl apply -f ${DEPLOY_FILE} diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/image-build-local.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/image-build-local.sh new file mode 100644 index 0000000..fd62f19 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/image-build-local.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +# build the image for local docker, using the binaries, if you want to reduce the size of the image, +# use upx to compress the binaries before building the image. + +serverName="product" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/product" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" + +mv -f cmd/${serverName}/${serverName} ${DOCKERFILE_PATH}/${serverName} + +# install grpc-health-probe, for health check of grpc service +rootDockerFilePath=$(pwd)/${DOCKERFILE_PATH} +go install github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 +cd $GOPATH/pkg/mod/github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 \ + && go mod download \ + && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "all=-s -w" -o "${rootDockerFilePath}/grpc_health_probe" +cd - + +# compressing binary file +#cd ${DOCKERFILE_PATH} +#upx -9 ${serverName} +#upx -9 grpc_health_probe +#cd - + +mkdir -p ${DOCKERFILE_PATH}/configs && cp -f configs/${serverName}.yml ${DOCKERFILE_PATH}/configs/ +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME}:latest ${DOCKERFILE_PATH}" +docker build -f ${DOCKERFILE} -t ${IMAGE_NAME}:latest ${DOCKERFILE_PATH} + +if [ -f "${DOCKERFILE_PATH}/grpc_health_probe" ]; then + rm -f ${DOCKERFILE_PATH}/grpc_health_probe +fi + + +if [ -f "${DOCKERFILE_PATH}/${serverName}" ]; then + rm -f ${DOCKERFILE_PATH}/${serverName} +fi + +if [ -d "${DOCKERFILE_PATH}/configs" ]; then + rm -rf ${DOCKERFILE_PATH}/configs +fi + +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/image-build.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/image-build.sh new file mode 100644 index 0000000..f3004a6 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/image-build.sh @@ -0,0 +1,71 @@ +#!/bin/bash + +# build the docker image using the binaries, if you want to reduce the size of the image, +# use upx to compress the binaries before building the image. + +serverName="product" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/product" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-build.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +# binary executable files +BIN_FILE="cmd/${serverName}/${serverName}" +# configuration file directory +CONFIG_PATH="configs" + +CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o ${BIN_FILE} cmd/${serverName}/*.go +mv -f ${BIN_FILE} ${DOCKERFILE_PATH} +mkdir -p ${DOCKERFILE_PATH}/${CONFIG_PATH} && cp -f ${CONFIG_PATH}/${serverName}.yml ${DOCKERFILE_PATH}/${CONFIG_PATH} + +# install grpc-health-probe, for health check of grpc service +rootDockerFilePath=$(pwd)/${DOCKERFILE_PATH} +go install github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 +cd $GOPATH/pkg/mod/github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 \ + && go mod download \ + && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "all=-s -w" -o "${rootDockerFilePath}/grpc_health_probe" +cd - + +# compressing binary file +#cd ${DOCKERFILE_PATH} +#upx -9 ${serverName} +#upx -9 grpc_health_probe +#cd - + +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} + +if [ -f "${DOCKERFILE_PATH}/grpc_health_probe" ]; then + rm -f ${DOCKERFILE_PATH}/grpc_health_probe +fi + + +if [ -f "${DOCKERFILE_PATH}/${serverName}" ]; then + rm -f ${DOCKERFILE_PATH}/${serverName} +fi + +if [ -d "${DOCKERFILE_PATH}/configs" ]; then + rm -rf ${DOCKERFILE_PATH}/configs +fi + +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/image-build2.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/image-build2.sh new file mode 100644 index 0000000..6350c24 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/image-build2.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +# two-stage build docker image + +serverName="product" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/product" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_build" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-build.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +PROJECT_FILES=$(ls) +tar zcf ${serverName}.tar.gz ${PROJECT_FILES} +mv -f ${serverName}.tar.gz ${DOCKERFILE_PATH} +echo "docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} +rm -rf ${DOCKERFILE_PATH}/${serverName}.tar.gz +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 + diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/image-push.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/image-push.sh new file mode 100644 index 0000000..2c73dea --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/image-push.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +# image name, prohibit uppercase letters in names. +IMAGE_NAME="eshop/product" + +# image repo address, passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-push.sh hub.docker.com v1.0.0" + exit 1 +fi + +# version tag, passed in via the second parameter, if empty, defaults to latest +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# image repository host, https://index.docker.io/v1 is the official docker image repository +IMAGE_REPO_HOST="https://index.docker.io/v1" +# check if you are authorized to log into docker +function checkLogin() { + loginStatus=$(cat /root/.docker/config.json | grep "${IMAGE_REPO_HOST}") + if [ "X${loginStatus}" = "X" ];then + echo "docker is not logged into the image repository" + checkResult 1 + fi +} + +checkLogin + +# push image to image repository +echo "docker push ${IMAGE_NAME_TAG}" +docker push ${IMAGE_NAME_TAG} +checkResult $? +echo "docker push image success." + +sleep 1 + +# delete image +echo "docker rmi -f ${IMAGE_NAME_TAG}" +docker rmi -f ${IMAGE_NAME_TAG} +checkResult $? +echo "docker remove image success." diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/image-rpc-test.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/image-rpc-test.sh new file mode 100644 index 0000000..5aff190 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/image-rpc-test.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# build rpc service test image + +serverName="product" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/product.rpc-test" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_test" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-rpc-test.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +PROJECT_FILES=$(ls) +tar zcf ${serverName}.tar.gz ${PROJECT_FILES} +mv -f ${serverName}.tar.gz ${DOCKERFILE_PATH} + +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} + +rm -rf ${DOCKERFILE_PATH}/${serverName}.tar.gz diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/patch-mono.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/patch-mono.sh new file mode 100644 index 0000000..7d00974 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/patch-mono.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +goModFile="go.mod" +thirdPartyProtoDir="third_party" +genServerType="grpc-pb" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +if [ ! -f "../$goModFile" ]; then + sponge patch copy-go-mod -f + checkResult $? + mv -f go.mod .. + mv -f go.sum .. +fi + +if [ "$genServerType"x != "http"x ]; then + if [ ! -d "../$thirdPartyProtoDir" ]; then + sponge patch copy-third-party-proto + checkResult $? + mv -f $thirdPartyProtoDir .. + fi +fi + +if [ "$genServerType"x = "grpc"x ]; then + if [ ! -d "../api/types" ]; then + sponge patch gen-types-pb --out=. + checkResult $? + mv -f api/types ../api + rmdir api + fi +fi diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/patch.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/patch.sh new file mode 100644 index 0000000..f06f10a --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/patch.sh @@ -0,0 +1,81 @@ +#!/bin/bash + +patchType=$1 +typesPb="types-pb" +initMysql="init-mysql" +initMongodb="init-mongodb" +initTidb="init-tidb" +initPostgresql="init-postgresql" +initSqlite="init-sqlite" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +function importPkg() { + go mod tidy +} + +function generateTypesPbCode() { + + if [ ! -d "../api/types" ]; then + sponge patch gen-types-pb --out=./ + checkResult $? + mv -f api/types ../api + rmdir api + fi + checkResult $? +} + +function generateInitMysqlCode() { + sponge patch gen-db-init --db-driver=mysql --out=./ + checkResult $? + importPkg +} + +function generateInitMongodbCode() { + sponge patch gen-db-init --db-driver=mongodb --out=./ + checkResult $? + importPkg +} + +function generateInitTidbCode() { + sponge patch gen-db-init --db-driver=tidb --out=./ + checkResult $? + importPkg +} + +function generateInitPostgresqlCode() { + sponge patch gen-db-init --db-driver=postgresql --out=./ + checkResult $? + importPkg +} + +function generateInitSqliteCode() { + sponge patch gen-db-init --db-driver=sqlite --out=./ + checkResult $? + importPkg +} + +if [ "$patchType" = "$typesPb" ]; then + generateTypesPbCode +elif [ "$patchType" = "$initMysql" ]; then + generateInitMysqlCode +elif [ "$patchType" = "$initMongodb" ]; then + generateInitMongodbCode +elif [ "$patchType" = "$initTidb" ]; then + generateInitTidbCode +elif [ "$patchType" = "$initPostgresql" ]; then + generateInitPostgresqlCode +elif [ "$patchType" = "$initSqlite" ]; then + generateInitSqliteCode +else + echo "invalid patch type: '$patchType'" + echo "supported types: $initMysql, $initMongodb, $initTidb, $initPostgresql, $initSqlite, $typesPb" + echo "e.g. make patch TYPE=init-mysql" + echo "" + exit 1 +fi diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/proto-doc.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/proto-doc.sh new file mode 100644 index 0000000..cb4b0fe --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/proto-doc.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# the directory where the proto files are located +bash scripts/patch-mono.sh +cd .. + +protoBasePath="api" +allProtoFiles="" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +function listFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + listFiles $item + else + if [ "${item#*.}"x = "proto"x ];then + file=$(pwd)/${item} + protoFile="${protoBasePath}${file#*${protoBasePath}}" + allProtoFiles="${allProtoFiles} ${protoFile}" + fi + fi + done + cd .. +} + +# get all proto file paths +listFiles $protoBasePath + +protoc --proto_path=. --proto_path=./third_party \ + --doc_out=. --doc_opt=html,apis.html \ + $allProtoFiles + +checkResult $? + +mv -f apis.html product/docs/apis.html + +echo "generate proto doc file successfully, view in product/docs/apis.html" diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/protoc.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/protoc.sh new file mode 100644 index 0000000..c41faa9 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/protoc.sh @@ -0,0 +1,211 @@ +#!/bin/bash + +bash scripts/patch-mono.sh +cd .. + +protoBasePath="api" +allProtoFiles="" + +specifiedProtoFilePath=$1 +specifiedProtoFilePaths="" + +colorGray='\033[1;30m' +colorGreen='\033[1;32m' +colorMagenta='\033[1;35m' +colorCyan='\033[1;36m' +highBright='\033[1m' +markEnd='\033[0m' + +tipMsg="" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# get specified proto files, if empty, return 0 else return 1 +function getSpecifiedProtoFiles() { + if [ "$specifiedProtoFilePath"x = x ];then + return 0 + fi + + specifiedProtoFilePaths=${specifiedProtoFilePath//,/ } + + for v in $specifiedProtoFilePaths; do + if [ ! -f "$v" ];then + echo "Error: not found specified proto file $v" + echo "example: make proto FILES=api/user/v1/user.proto,api/types/types.proto" + checkResult 1 + fi + done + + return 1 +} + +# add the import of useless packages from the generated *.pb.go code here +function deleteUnusedPkg() { + file=$1 + osType=$(uname -s) + if [ "${osType}"x = "Darwin"x ];then + sed -i '' 's#_ \"github.com/envoyproxy/protoc-gen-validate/validate\"##g' ${file} + sed -i '' 's#_ \"github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options\"##g' ${file} + sed -i '' 's#_ \"github.com/srikrsna/protoc-gen-gotag/tagger\"##g' ${file} + sed -i '' 's#_ \"google.golang.org/genproto/googleapis/api/annotations\"##g' ${file} + else + sed -i "s#_ \"github.com/envoyproxy/protoc-gen-validate/validate\"##g" ${file} + sed -i "s#_ \"github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options\"##g" ${file} + sed -i "s#_ \"github.com/srikrsna/protoc-gen-gotag/tagger\"##g" ${file} + sed -i "s#_ \"google.golang.org/genproto/googleapis/api/annotations\"##g" ${file} + fi + checkResult $? +} + +function listProtoFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + listProtoFiles $item + else + if [ "${item#*.}"x = "proto"x ];then + file=$(pwd)/${item} + protoFile="${protoBasePath}${file#*${protoBasePath}}" + allProtoFiles="${allProtoFiles} ${protoFile}" + fi + fi + done + cd .. +} + +function handlePbGoFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + handlePbGoFiles $item + else + if [ "${item#*.}"x = "pb.go"x ];then + deleteUnusedPkg $item + fi + fi + done + cd .. +} + +function generateByAllProto(){ + getSpecifiedProtoFiles + if [ $? -eq 0 ]; then + listProtoFiles $protoBasePath + else + allProtoFiles=$specifiedProtoFilePaths + fi + + if [ "$allProtoFiles"x = x ];then + echo "Error: not found proto file in path $protoBasePath" + exit 1 + fi + echo -e "generate *pb.go by proto files: ${colorGray}$allProtoFiles${markEnd}" + echo "" + + # generate files *_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --go_out=. --go_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + # generate files *_grpc_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --go-grpc_out=. --go-grpc_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + + # generate the file *_pb.validate.go + protoc --proto_path=. --proto_path=./third_party \ + --validate_out=lang=go:. --validate_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? + + # embed the tag field into *_pb.go + protoc --proto_path=. --proto_path=./third_party \ + --gotag_out=:. --gotag_opt=paths=source_relative \ + $allProtoFiles + + checkResult $? +} + +function generateBySpecifiedProto(){ + # get the proto file of the product server + allProtoFiles="" + listProtoFiles ${protoBasePath}/product + cd .. + specifiedProtoFiles="" + getSpecifiedProtoFiles + if [ $? -eq 0 ]; then + specifiedProtoFiles=$allProtoFiles + else + for v1 in $specifiedProtoFilePaths; do + for v2 in $allProtoFiles; do + if [ "$v1"x = "$v2"x ];then + specifiedProtoFiles="$specifiedProtoFiles $v1" + fi + done + done + fi + + if [ "$specifiedProtoFiles"x = x ];then + return + fi + echo -e "generate template code by proto files: ${colorMagenta}$specifiedProtoFiles${markEnd}" + echo "" + + moduleName=$(cat product/docs/gen.info | head -1 | cut -d , -f 1) + serverName=$(cat product/docs/gen.info | head -1 | cut -d , -f 2) + suitedMonoRepo=$(cat product/docs/gen.info | head -1 | cut -d , -f 3) + + protoc --proto_path=. --proto_path=./third_party \ + --go-rpc-tmpl_out=. --go-rpc-tmpl_opt=paths=source_relative \ + --go-rpc-tmpl_opt=moduleName=${moduleName} --go-rpc-tmpl_opt=serverName=${serverName} --go-rpc-tmpl_opt=suitedMonoRepo=${suitedMonoRepo} \ + $specifiedProtoFiles + + checkResult $? + + sponge merge rpc-pb --dir=product + checkResult $? + + tipMsg="${highBright}Tip:${markEnd} execute the command ${colorCyan}make run${markEnd} and then test grpc api in the file ${colorCyan}internal/service/xxx_client_test.go${markEnd}." + + + + if [ "$suitedMonoRepo" == "true" ]; then + sponge patch adapt-mono-repo --dir=product + fi +} + +# generate pb.go by all proto files +generateByAllProto + +# generate pb.go by specified proto files +generateBySpecifiedProto + +# delete unused packages in pb.go +handlePbGoFiles $protoBasePath + +# delete json tag omitempty +sponge patch del-omitempty --dir=$protoBasePath --suffix-name=pb.go > /dev/null + +# modify duplicate numbers and error codes +sponge patch modify-dup-num --dir=product/internal/ecode +sponge patch modify-dup-err-code --dir=product/internal/ecode + +echo -e "${colorGreen}generated code done.${markEnd}" +echo "" +echo -e $tipMsg +echo "" diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/run-nohup.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/run-nohup.sh new file mode 100644 index 0000000..b1adc8d --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/run-nohup.sh @@ -0,0 +1,61 @@ +#!/bin/bash + +# chkconfig: - 85 15 +# description: product + +serverName="product" +cmdStr="cmd/${serverName}/${serverName}" + + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +stopService(){ + NAME=$1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + for id in $ID + do + kill -9 $id + echo "Stopped ${NAME} service successfully, process ID=${ID}" + done + fi +} + +startService() { + NAME=$1 + + if [ -f "${NAME}" ] ;then + rm "${NAME}" + fi + sleep 0.2 + go build -o ${cmdStr} cmd/${NAME}/main.go + checkResult $? + + nohup ${cmdStr} > ${NAME}.log 2>&1 & + sleep 1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + echo "Start the ${NAME} service successfully, process ID=${ID}" + else + echo "Failed to start ${NAME} service" + return 1 + fi + return 0 +} + + +stopService ${serverName} +if [ "$1"x != "stop"x ] ;then + sleep 1 + startService ${serverName} + checkResult $? +else + echo "Service ${serverName} has stopped" +fi diff --git a/6_micro-cluster/example-2-mono-repo/product/scripts/run.sh b/6_micro-cluster/example-2-mono-repo/product/scripts/run.sh new file mode 100644 index 0000000..c1a3e99 --- /dev/null +++ b/6_micro-cluster/example-2-mono-repo/product/scripts/run.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +serverName="product" + +binaryFile="cmd/${serverName}/${serverName}" + +osType=$(uname -s) +if [ "${osType%%_*}"x = "MINGW64"x ];then + binaryFile="${binaryFile}.exe" +fi + +if [ -f "${binaryFile}" ] ;then + rm "${binaryFile}" +fi + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +sleep 0.2 + +go build -o ${binaryFile} cmd/${serverName}/main.go +checkResult $? + +# running server +./${binaryFile} diff --git a/a_micro-grpc-http-protobuf/third_party/gogo/protobuf/gogoproto/gogo.proto b/6_micro-cluster/example-2-mono-repo/third_party/gogo/protobuf/gogoproto/gogo.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/gogo/protobuf/gogoproto/gogo.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/gogo/protobuf/gogoproto/gogo.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/BUILD.bazel b/6_micro-cluster/example-2-mono-repo/third_party/google/api/BUILD.bazel similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/BUILD.bazel rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/BUILD.bazel diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/README.md b/6_micro-cluster/example-2-mono-repo/third_party/google/api/README.md similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/README.md rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/README.md diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/annotations.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/annotations.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/annotations.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/annotations.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/auth.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/auth.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/auth.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/auth.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/backend.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/backend.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/backend.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/backend.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/billing.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/billing.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/billing.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/billing.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/client.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/client.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/client.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/client.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/config_change.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/config_change.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/config_change.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/config_change.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/consumer.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/consumer.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/consumer.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/consumer.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/context.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/context.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/context.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/context.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/control.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/control.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/control.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/control.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/distribution.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/distribution.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/distribution.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/distribution.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/documentation.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/documentation.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/documentation.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/documentation.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/endpoint.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/endpoint.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/endpoint.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/endpoint.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/BUILD.bazel b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/BUILD.bazel similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/BUILD.bazel rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/BUILD.bazel diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/cel.yaml b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/cel.yaml similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/cel.yaml rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/cel.yaml diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/BUILD.bazel b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/BUILD.bazel similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/BUILD.bazel rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/BUILD.bazel diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/checked.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/checked.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/checked.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/checked.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/conformance_service.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/conformance_service.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/conformance_service.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/conformance_service.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/eval.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/eval.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/eval.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/eval.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/explain.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/explain.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/explain.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/explain.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/syntax.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/syntax.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/syntax.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/syntax.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/value.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/value.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1alpha1/value.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1alpha1/value.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/BUILD.bazel b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/BUILD.bazel similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/BUILD.bazel rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/BUILD.bazel diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/decl.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/decl.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/decl.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/decl.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/eval.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/eval.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/eval.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/eval.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/expr.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/expr.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/expr.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/expr.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/source.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/source.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/source.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/source.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/value.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/value.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/expr/v1beta1/value.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/expr/v1beta1/value.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/field_behavior.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/field_behavior.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/field_behavior.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/field_behavior.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/http.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/http.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/http.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/http.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/httpbody.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/httpbody.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/httpbody.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/httpbody.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/label.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/label.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/label.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/label.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/launch_stage.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/launch_stage.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/launch_stage.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/launch_stage.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/log.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/log.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/log.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/log.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/logging.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/logging.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/logging.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/logging.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/metric.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/metric.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/metric.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/metric.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/monitored_resource.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/monitored_resource.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/monitored_resource.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/monitored_resource.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/monitoring.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/monitoring.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/monitoring.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/monitoring.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/quota.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/quota.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/quota.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/quota.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/resource.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/resource.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/resource.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/resource.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/service.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/service.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/service.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/service.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/serviceconfig.yaml b/6_micro-cluster/example-2-mono-repo/third_party/google/api/serviceconfig.yaml similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/serviceconfig.yaml rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/serviceconfig.yaml diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/BUILD.bazel b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/BUILD.bazel similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/BUILD.bazel rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/BUILD.bazel diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/README.md b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/README.md similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/README.md rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/README.md diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/BUILD.bazel b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/BUILD.bazel similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/BUILD.bazel rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/BUILD.bazel diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/check_error.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/check_error.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/check_error.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/check_error.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/distribution.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/distribution.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/distribution.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/distribution.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/http_request.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/http_request.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/http_request.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/http_request.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/log_entry.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/log_entry.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/log_entry.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/log_entry.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/metric_value.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/metric_value.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/metric_value.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/metric_value.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/operation.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/operation.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/operation.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/operation.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/quota_controller.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/quota_controller.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/quota_controller.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/quota_controller.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/service_controller.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/service_controller.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/service_controller.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/service_controller.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/servicecontrol.yaml b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/servicecontrol.yaml similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicecontrol/v1/servicecontrol.yaml rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicecontrol/v1/servicecontrol.yaml diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/BUILD.bazel b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/BUILD.bazel similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/BUILD.bazel rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/BUILD.bazel diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/README.md b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/README.md similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/README.md rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/README.md diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/BUILD.bazel b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/BUILD.bazel similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/BUILD.bazel rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/BUILD.bazel diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/resources.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/resources.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/resources.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/resources.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/servicemanager.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/servicemanager.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/servicemanagement/v1/servicemanager.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/servicemanagement/v1/servicemanager.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/source_info.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/source_info.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/source_info.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/source_info.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/system_parameter.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/system_parameter.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/system_parameter.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/system_parameter.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/api/usage.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/api/usage.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/api/usage.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/api/usage.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/annotations.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/annotations.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/annotations.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/annotations.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/any.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/any.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/any.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/any.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/api.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/api.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/api.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/api.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/compiler/plugin.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/compiler/plugin.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/compiler/plugin.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/compiler/plugin.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/descriptor.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/descriptor.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/descriptor.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/descriptor.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/duration.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/duration.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/duration.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/duration.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/empty.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/empty.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/empty.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/empty.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/field_mask.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/field_mask.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/field_mask.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/field_mask.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/source_context.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/source_context.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/source_context.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/source_context.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/struct.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/struct.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/struct.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/struct.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/timestamp.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/timestamp.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/timestamp.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/timestamp.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/type.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/type.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/type.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/type.proto diff --git a/a_micro-grpc-http-protobuf/third_party/google/protobuf/wrappers.proto b/6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/wrappers.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/google/protobuf/wrappers.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/google/protobuf/wrappers.proto diff --git a/a_micro-grpc-http-protobuf/third_party/protoc-gen-openapiv2/options/annotations.proto b/6_micro-cluster/example-2-mono-repo/third_party/protoc-gen-openapiv2/options/annotations.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/protoc-gen-openapiv2/options/annotations.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/protoc-gen-openapiv2/options/annotations.proto diff --git a/a_micro-grpc-http-protobuf/third_party/protoc-gen-openapiv2/options/openapiv2.proto b/6_micro-cluster/example-2-mono-repo/third_party/protoc-gen-openapiv2/options/openapiv2.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/protoc-gen-openapiv2/options/openapiv2.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/protoc-gen-openapiv2/options/openapiv2.proto diff --git a/a_micro-grpc-http-protobuf/third_party/tagger/tagger.proto b/6_micro-cluster/example-2-mono-repo/third_party/tagger/tagger.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/tagger/tagger.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/tagger/tagger.proto diff --git a/a_micro-grpc-http-protobuf/third_party/validate/validate.proto b/6_micro-cluster/example-2-mono-repo/third_party/validate/validate.proto similarity index 100% rename from a_micro-grpc-http-protobuf/third_party/validate/validate.proto rename to 6_micro-cluster/example-2-mono-repo/third_party/validate/validate.proto diff --git a/README.md b/README.md index 78cd23f..c888b32 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,7 @@ ## Sponge Examples -[Sponge](https://github.com/zhufuyi/sponge) is a powerful development framework that integrates `code auto generation`, `gin and grpc framework`. It is easy to build a complete project from development to deployment, just fill in the business logic code on the generated template code, implementation of "low-code way" development projects. - -Here are some examples of using sponge to develop go projects, the database type used in the example is mysql, also support database mongodb, postgresql, tidb, sqlite. +Here are some examples of using sponge to develop go projects. [Sponge](https://github.com/zhufuyi/sponge) is a powerful development framework that integrates `code auto generation`, `gin and grpc framework`. It is easy to build a complete project from development to deployment, just fill in the business logic code on the generated template code, implementation of "low-code way" development projects.
@@ -14,14 +12,17 @@ Here are some examples of using sponge to develop go projects, the database type - [Create **web** service based on **protobuf**](https://github.com/zhufuyi/sponge_examples/tree/main/3_web-gin-protobuf) - [Create **grpc** service based on **protobuf** ](https://github.com/zhufuyi/sponge_examples/tree/main/4_micro-grpc-protobuf) - [Create **grpc gateway** service based on **protobuf**](https://github.com/zhufuyi/sponge_examples/tree/main/5_micro-gin-rpc-gateway) -- [Create **grpc+http** service based on **protobuf**](https://github.com/zhufuyi/sponge_examples/tree/main/a_micro-grpc-http-protobuf) -- [Build a microservice cluster](https://github.com/zhufuyi/sponge_examples/tree/main/6_micro-cluster) +- [Create **grpc+http** service based on **protobuf**](https://github.com/zhufuyi/sponge_examples/tree/main/_10_micro-grpc-http-protobuf) +- [Build a simple microservice cluster (multi-repo and mono-repo)](https://github.com/zhufuyi/sponge_examples/tree/main/6_micro-cluster) ### Examples of developing a complete project using sponge - [Simple community web backend service](https://github.com/zhufuyi/sponge_examples/tree/main/7_community-single) - [Simple community web service broken down into microservice](https://github.com/zhufuyi/sponge_examples/tree/main/8_community-cluster) -### Distributed transaction examples +### Sponge+DTM distributed transaction examples +- [Service registration and discovery (consul,etcd,nacos)](https://github.com/zhufuyi/sponge_examples/tree/main/_11_sponge-dtm-service-registration-discovery) +- [Flash sale](https://github.com/zhufuyi/sponge_examples/tree/main/_12_sponge-dtm-flashSale) +- [Cache consistency (redis, mysql)](https://github.com/zhufuyi/sponge_examples/tree/main/_13_sponge-dtm-cache) - [Simple distributed order system](https://github.com/zhufuyi/sponge_examples/tree/main/9_order-grpc-distributed-transaction) diff --git a/_10_micro-grpc-http-protobuf/go.mod b/_10_micro-grpc-http-protobuf/go.mod index d873573..49e25fd 100644 --- a/_10_micro-grpc-http-protobuf/go.mod +++ b/_10_micro-grpc-http-protobuf/go.mod @@ -1,13 +1,13 @@ module user -go 1.20 +go 1.21 require ( github.com/gin-gonic/gin v1.9.1 github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/jinzhu/copier v0.3.5 github.com/stretchr/testify v1.9.0 - github.com/zhufuyi/sponge v1.9.2 + github.com/zhufuyi/sponge v1.10.1 go.uber.org/zap v1.24.0 google.golang.org/grpc v1.61.0 google.golang.org/protobuf v1.34.2 diff --git a/_10_micro-grpc-http-protobuf/go.sum b/_10_micro-grpc-http-protobuf/go.sum index bbe0f22..c0fc4b0 100644 --- a/_10_micro-grpc-http-protobuf/go.sum +++ b/_10_micro-grpc-http-protobuf/go.sum @@ -85,6 +85,7 @@ github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= @@ -153,6 +154,7 @@ github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYF github.com/felixge/fgprof v0.9.3 h1:VvyZxILNuCiUCSXtPtYmmtGvb65nqXh2QFWc0Wpf2/g= github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= @@ -198,6 +200,7 @@ github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyr github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.12.1/go.mod h1:IUMDtCfWo/w/mtMfIE/IG2K+Ey3ygWanZIBtBW0W2TM= github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= @@ -267,6 +270,7 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -318,6 +322,7 @@ github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iP github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= @@ -327,6 +332,7 @@ github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdv github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= @@ -545,6 +551,7 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -592,8 +599,10 @@ github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= -github.com/zhufuyi/sponge v1.9.2 h1:6zqJCWhcsnHfygN09qGgh2YRMwbZHNyiFPwx+9FYuJ0= -github.com/zhufuyi/sponge v1.9.2/go.mod h1:Dc/e5m8wIDyhD9hu/fF8GCiFjhXbxqBadfvTZZ1rbfY= +github.com/zhufuyi/sponge v1.9.3 h1:Zv2Gr756fau41XSEnWaZlcVljfjLZO1WZNKsrQ63ENM= +github.com/zhufuyi/sponge v1.9.3/go.mod h1:Dc/e5m8wIDyhD9hu/fF8GCiFjhXbxqBadfvTZZ1rbfY= +github.com/zhufuyi/sponge v1.10.1 h1:feB75axQtMJ5EkC9M6WgUC+VIqZlB+K6zmNul9xlB0c= +github.com/zhufuyi/sponge v1.10.1/go.mod h1:g6oDmwPTUrCL9+RJbSbjQEtmc0yhJ1vUD5rCdF1QNG4= go.etcd.io/etcd/api/v3 v3.5.4 h1:OHVyt3TopwtUQ2GKdd5wu3PmmipR4FTwCqoEjSyRdIc= go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= go.etcd.io/etcd/client/pkg/v3 v3.5.4 h1:lrneYvz923dvC14R54XcA7FXoZ3mlGZAgmwhfm7HqOg= @@ -690,6 +699,7 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= diff --git a/_11_sponge-dtm-service-registration-discovery/go.mod b/_11_sponge-dtm-service-registration-discovery/go.mod index 82993a6..2dbe2f1 100644 --- a/_11_sponge-dtm-service-registration-discovery/go.mod +++ b/_11_sponge-dtm-service-registration-discovery/go.mod @@ -1,13 +1,13 @@ module transfer -go 1.19 +go 1.21 require ( github.com/dtm-labs/client v1.18.7 github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/stretchr/testify v1.9.0 github.com/zhufuyi/dtmdriver-sponge v0.0.2 - github.com/zhufuyi/sponge v1.9.2 + github.com/zhufuyi/sponge v1.10.1 google.golang.org/grpc v1.61.0 google.golang.org/protobuf v1.34.2 ) diff --git a/_11_sponge-dtm-service-registration-discovery/go.sum b/_11_sponge-dtm-service-registration-discovery/go.sum index 3590504..cb8c928 100644 --- a/_11_sponge-dtm-service-registration-discovery/go.sum +++ b/_11_sponge-dtm-service-registration-discovery/go.sum @@ -160,6 +160,7 @@ github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8Wlg github.com/felixge/fgprof v0.9.3 h1:VvyZxILNuCiUCSXtPtYmmtGvb65nqXh2QFWc0Wpf2/g= github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= @@ -190,6 +191,7 @@ github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= @@ -261,6 +263,7 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -294,6 +297,7 @@ github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFb github.com/hashicorp/consul/api v1.19.1 h1:GLeK1WD4VIRvt4wRhQKHFudztEkRb8pDs+uRiJgNwes= github.com/hashicorp/consul/api v1.19.1/go.mod h1:jAt316eYgWGNLJtxkMQrcqRpuDE/kFJdqkEFwRXFv8U= github.com/hashicorp/consul/sdk v0.13.1 h1:EygWVWWMczTzXGpO93awkHFzfUka6hLYJ0qhETd+6lY= +github.com/hashicorp/consul/sdk v0.13.1/go.mod h1:SW/mM4LbKfqmMvcFu8v+eiQQ7oitXEFeiBe9StxERb0= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= @@ -309,16 +313,20 @@ github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iP github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc= +github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.4.0 h1:aAQzgqIrRKRa7w75CKpbBxYsmUoPjzVm1W59ca1L0J4= +github.com/hashicorp/go-version v1.4.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= @@ -373,9 +381,11 @@ github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFB github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= @@ -526,6 +536,7 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -573,8 +584,10 @@ github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFi github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/zhufuyi/dtmdriver-sponge v0.0.2 h1:YCXEGtO+9ThVzOlbbcOUPai69CfBUcNm6KuYFGDySec= github.com/zhufuyi/dtmdriver-sponge v0.0.2/go.mod h1:IIXUNeJis54f2CL5CNJqIpqRAdRKNpQdsA1LQMYv+vo= -github.com/zhufuyi/sponge v1.9.2 h1:6zqJCWhcsnHfygN09qGgh2YRMwbZHNyiFPwx+9FYuJ0= -github.com/zhufuyi/sponge v1.9.2/go.mod h1:Dc/e5m8wIDyhD9hu/fF8GCiFjhXbxqBadfvTZZ1rbfY= +github.com/zhufuyi/sponge v1.9.3 h1:Zv2Gr756fau41XSEnWaZlcVljfjLZO1WZNKsrQ63ENM= +github.com/zhufuyi/sponge v1.9.3/go.mod h1:Dc/e5m8wIDyhD9hu/fF8GCiFjhXbxqBadfvTZZ1rbfY= +github.com/zhufuyi/sponge v1.10.1 h1:feB75axQtMJ5EkC9M6WgUC+VIqZlB+K6zmNul9xlB0c= +github.com/zhufuyi/sponge v1.10.1/go.mod h1:g6oDmwPTUrCL9+RJbSbjQEtmc0yhJ1vUD5rCdF1QNG4= go.etcd.io/etcd/api/v3 v3.5.5 h1:BX4JIbQ7hl7+jL+g+2j5UAr0o1bctCm6/Ct+ArBGkf0= go.etcd.io/etcd/api/v3 v3.5.5/go.mod h1:KFtNaxGDw4Yx/BA4iPPwevUTAuqcsPxzyX8PHydchN8= go.etcd.io/etcd/client/pkg/v3 v3.5.5 h1:9S0JUVvmrVl7wCF39iTQthdaaNIiAaQbmK75ogO6GU8= diff --git a/_12_sponge-dtm-flashSale/README.md b/_12_sponge-dtm-flashSale/README.md index e32d0e1..9466e50 100644 --- a/_12_sponge-dtm-flashSale/README.md +++ b/_12_sponge-dtm-flashSale/README.md @@ -1,6 +1,6 @@ ## Sponge DTM Flash Sale -## Overview +### Overview -- [**http**](http): http Flash Sale service. -- [**grpc+http**](grpc+http): Flash Sale service that supports both grpc and http protocols. +- [**http**](http): Example of HTTP service with flash sale. +- [**grpc+http**](grpc+http): Example of gRPC+HTTP service with flash sale. diff --git a/_12_sponge-dtm-flashSale/grpc+http/README.md b/_12_sponge-dtm-flashSale/grpc+http/README.md index 5a12aa6..7ac26bc 100644 --- a/_12_sponge-dtm-flashSale/grpc+http/README.md +++ b/_12_sponge-dtm-flashSale/grpc+http/README.md @@ -21,7 +21,7 @@ This is a sample project that demonstrates how to use Sponge to implement a flas 4. **Compile and Start the Service** - you can compile and run the service using the following command: ```bash - cd cmd/stock + cd cmd/flashSale go run main.go ``` - Alternatively, if sponge is installed, you can directly run the service with: diff --git a/_12_sponge-dtm-flashSale/grpc+http/go.mod b/_12_sponge-dtm-flashSale/grpc+http/go.mod index a03a6c5..7df7255 100644 --- a/_12_sponge-dtm-flashSale/grpc+http/go.mod +++ b/_12_sponge-dtm-flashSale/grpc+http/go.mod @@ -10,7 +10,7 @@ require ( github.com/jinzhu/copier v0.3.5 github.com/stretchr/testify v1.9.0 github.com/zhufuyi/dtmdriver-sponge v1.0.0 - github.com/zhufuyi/sponge v1.9.2 + github.com/zhufuyi/sponge v1.10.1 go.uber.org/zap v1.24.0 google.golang.org/grpc v1.61.0 google.golang.org/protobuf v1.34.2 diff --git a/_12_sponge-dtm-flashSale/grpc+http/go.sum b/_12_sponge-dtm-flashSale/grpc+http/go.sum index 88ec541..64874b1 100644 --- a/_12_sponge-dtm-flashSale/grpc+http/go.sum +++ b/_12_sponge-dtm-flashSale/grpc+http/go.sum @@ -656,8 +656,10 @@ github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFi github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/zhufuyi/dtmdriver-sponge v1.0.0 h1:4cMdQAQAVrGVYPt89u1/kJlF/gVUTOcJROkE03sddk0= github.com/zhufuyi/dtmdriver-sponge v1.0.0/go.mod h1:eQz+TsQnW4BX0cuXlwbiQxwl8u19o1BnuNlntFxDmAU= -github.com/zhufuyi/sponge v1.9.2 h1:6zqJCWhcsnHfygN09qGgh2YRMwbZHNyiFPwx+9FYuJ0= -github.com/zhufuyi/sponge v1.9.2/go.mod h1:Dc/e5m8wIDyhD9hu/fF8GCiFjhXbxqBadfvTZZ1rbfY= +github.com/zhufuyi/sponge v1.9.3 h1:Zv2Gr756fau41XSEnWaZlcVljfjLZO1WZNKsrQ63ENM= +github.com/zhufuyi/sponge v1.9.3/go.mod h1:Dc/e5m8wIDyhD9hu/fF8GCiFjhXbxqBadfvTZZ1rbfY= +github.com/zhufuyi/sponge v1.10.1 h1:feB75axQtMJ5EkC9M6WgUC+VIqZlB+K6zmNul9xlB0c= +github.com/zhufuyi/sponge v1.10.1/go.mod h1:g6oDmwPTUrCL9+RJbSbjQEtmc0yhJ1vUD5rCdF1QNG4= go.etcd.io/etcd/api/v3 v3.5.5 h1:BX4JIbQ7hl7+jL+g+2j5UAr0o1bctCm6/Ct+ArBGkf0= go.etcd.io/etcd/api/v3 v3.5.5/go.mod h1:KFtNaxGDw4Yx/BA4iPPwevUTAuqcsPxzyX8PHydchN8= go.etcd.io/etcd/client/pkg/v3 v3.5.5 h1:9S0JUVvmrVl7wCF39iTQthdaaNIiAaQbmK75ogO6GU8= diff --git a/_12_sponge-dtm-flashSale/grpc+http/internal/service/flashSale.go b/_12_sponge-dtm-flashSale/grpc+http/internal/service/flashSale.go index 908ac7b..85f39be 100644 --- a/_12_sponge-dtm-flashSale/grpc+http/internal/service/flashSale.go +++ b/_12_sponge-dtm-flashSale/grpc+http/internal/service/flashSale.go @@ -139,9 +139,7 @@ func (s *flashSale) SendSubmitOrderNotify(ctx context.Context, req *flashSaleV1. } func newGid() string { - // 年月日时分秒毫秒微妙+随机数, 长度为26,可以使用uuid、雪花算法等替换 - dt := time.Now().Format("20060102150405.000000") - return dt[:14] + dt[15:] + krand.String(krand.R_NUM, 6) + return krand.NewSeriesID() } func getStockKey(productID uint64) string { diff --git a/_12_sponge-dtm-flashSale/http/README.md b/_12_sponge-dtm-flashSale/http/README.md index 8ed2a97..f50bd7e 100644 --- a/_12_sponge-dtm-flashSale/http/README.md +++ b/_12_sponge-dtm-flashSale/http/README.md @@ -19,7 +19,7 @@ This is a sample project that demonstrates how to use Sponge to implement a flas 4. **Compile and Start the Service** - you can compile and run the service using the following command: ```bash - cd cmd/stock + cd cmd/flashSale go run main.go ``` - Alternatively, if sponge is installed, you can directly run the service with: diff --git a/_12_sponge-dtm-flashSale/http/go.mod b/_12_sponge-dtm-flashSale/http/go.mod index 9042848..6e69ab4 100644 --- a/_12_sponge-dtm-flashSale/http/go.mod +++ b/_12_sponge-dtm-flashSale/http/go.mod @@ -1,6 +1,6 @@ module flashSale -go 1.20 +go 1.21 require ( github.com/dtm-labs/dtmcli v1.15.0 @@ -8,7 +8,7 @@ require ( github.com/go-redis/redis/v8 v8.11.5 github.com/jinzhu/copier v0.3.5 github.com/stretchr/testify v1.9.0 - github.com/zhufuyi/sponge v1.9.2 + github.com/zhufuyi/sponge v1.10.1 go.uber.org/zap v1.24.0 google.golang.org/protobuf v1.34.2 ) diff --git a/_12_sponge-dtm-flashSale/http/go.sum b/_12_sponge-dtm-flashSale/http/go.sum index 24d5a06..59e9ce5 100644 --- a/_12_sponge-dtm-flashSale/http/go.sum +++ b/_12_sponge-dtm-flashSale/http/go.sum @@ -39,6 +39,7 @@ dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7 github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v1.0.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/toml v1.1.0 h1:ksErzDEI1khOiGPgpwuI7x2ebx/uXQNw7xJpn9Eq1+I= +github.com/BurntSushi/toml v1.1.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= @@ -132,6 +133,7 @@ github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYF github.com/felixge/fgprof v0.9.3 h1:VvyZxILNuCiUCSXtPtYmmtGvb65nqXh2QFWc0Wpf2/g= github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= @@ -179,6 +181,7 @@ github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyr github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.12.1/go.mod h1:IUMDtCfWo/w/mtMfIE/IG2K+Ey3ygWanZIBtBW0W2TM= github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= @@ -257,6 +260,7 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -302,6 +306,7 @@ github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iP github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= @@ -311,6 +316,7 @@ github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdv github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= @@ -433,10 +439,12 @@ github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE= +github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs= github.com/otiai10/copy v1.7.0/go.mod h1:rmRl6QPdJj6EiUqXQ/4Nn2lLXoNQjFCQbbNrxgc/t3U= github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= @@ -528,6 +536,7 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -586,8 +595,10 @@ github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= -github.com/zhufuyi/sponge v1.9.2 h1:6zqJCWhcsnHfygN09qGgh2YRMwbZHNyiFPwx+9FYuJ0= -github.com/zhufuyi/sponge v1.9.2/go.mod h1:Dc/e5m8wIDyhD9hu/fF8GCiFjhXbxqBadfvTZZ1rbfY= +github.com/zhufuyi/sponge v1.9.3 h1:Zv2Gr756fau41XSEnWaZlcVljfjLZO1WZNKsrQ63ENM= +github.com/zhufuyi/sponge v1.9.3/go.mod h1:Dc/e5m8wIDyhD9hu/fF8GCiFjhXbxqBadfvTZZ1rbfY= +github.com/zhufuyi/sponge v1.10.1 h1:feB75axQtMJ5EkC9M6WgUC+VIqZlB+K6zmNul9xlB0c= +github.com/zhufuyi/sponge v1.10.1/go.mod h1:g6oDmwPTUrCL9+RJbSbjQEtmc0yhJ1vUD5rCdF1QNG4= go.etcd.io/etcd/api/v3 v3.5.4 h1:OHVyt3TopwtUQ2GKdd5wu3PmmipR4FTwCqoEjSyRdIc= go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= go.etcd.io/etcd/client/pkg/v3 v3.5.4 h1:lrneYvz923dvC14R54XcA7FXoZ3mlGZAgmwhfm7HqOg= @@ -684,6 +695,7 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= diff --git a/_12_sponge-dtm-flashSale/http/internal/handler/flashSale.go b/_12_sponge-dtm-flashSale/http/internal/handler/flashSale.go index b34de8f..9340614 100644 --- a/_12_sponge-dtm-flashSale/http/internal/handler/flashSale.go +++ b/_12_sponge-dtm-flashSale/http/internal/handler/flashSale.go @@ -131,9 +131,7 @@ func (h *flashSaleHandler) RedisQueryPrepared(ctx context.Context, req *flashSal } func newGid() string { - // 这里生成的订单号格式为:年月日时分秒毫秒微妙+随机数, 长度为26,可以使用uuid、雪花算法等替换 - dt := time.Now().Format("20060102150405.000000") - return dt[:14] + dt[15:] + krand.String(krand.R_NUM, 6) + return krand.NewSeriesID() } func getCallbackFlashSaleAddr() string { diff --git a/_13_sponge-dtm-cache/README.md b/_13_sponge-dtm-cache/README.md new file mode 100644 index 0000000..c8c79d7 --- /dev/null +++ b/_13_sponge-dtm-cache/README.md @@ -0,0 +1,6 @@ +## Sponge DTM Cache Consistency + +### Overview + +- [**http**](http): example of HTTP service with cache consistency. +- [**grpc+http**](grpc+http): Example of gRPC+HTTP service with cache consistency. diff --git a/a_micro-grpc-http-protobuf/.gitignore b/_13_sponge-dtm-cache/grpc+http/.gitignore similarity index 95% rename from a_micro-grpc-http-protobuf/.gitignore rename to _13_sponge-dtm-cache/grpc+http/.gitignore index 955faad..f91912d 100644 --- a/a_micro-grpc-http-protobuf/.gitignore +++ b/_13_sponge-dtm-cache/grpc+http/.gitignore @@ -22,5 +22,5 @@ dist/ *.ipr *.iws -cmd/user/user +cmd/stock/stock diff --git a/_13_sponge-dtm-cache/grpc+http/.golangci.yml b/_13_sponge-dtm-cache/grpc+http/.golangci.yml new file mode 100644 index 0000000..3994c5b --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/.golangci.yml @@ -0,0 +1,342 @@ +# This file configures stock. + +run: + # timeout for analysis, e.g. 30s, 5m, default is 1m + timeout: 10m + # default concurrency is available CPU number + concurrency: 4 + # include test files or not, default is true + tests: false + # which dirs to skip: issues from them won't be reported; + # can use regexp here: generated.*, regexp is applied on full path; + # default value is empty list, but default dirs are skipped independently + # from this option's value (see skip-dirs-use-default). + skip-dirs: + - docs + - api + # which files to skip: they will be analyzed, but issues from them + # won't be reported. Default value is empty list, but there is + # no need to include all autogenerated files, we confidently recognize + # autogenerated files. If it's not please let us know. + skip-files: + - _test.go + + # exit code when at least one issue was found, default is 1 + issues-exit-code: 1 + + # list of build tags, all linters use it. Default is empty list. + build-tags: + - mytag + + # default is true. Enables skipping of directories: + # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ + skip-dirs-use-default: true + + +linters: + # please, do not use `enable-all`: it's deprecated and will be removed soon. + # inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint + disable-all: true + enable: + - revive + - goimports + - gofmt + - unused + #- depguard + - dogsled + - errcheck + #- gochecknoinits + - goconst + - gocyclo + - gosimple + - govet + - lll + - misspell + - typecheck + - unconvert + - whitespace + - staticcheck + #- bodyclose + #- dupl + #- goprintffuncname + #- gosec + #- unparam + #- ineffassign + + +linters-settings: + revive: + rules: + - name: argument-limit + arguments: [ 8 ] + - name: atomic + - name: bare-return + - name: blank-imports + - name: bool-literal-in-expr + - name: call-to-gc + - name: confusing-naming + - name: confusing-results + - name: constant-logical-expr + - name: context-as-argument + - name: context-keys-type + - name: deep-exit + - name: defer + - name: dot-imports + - name: duplicated-imports + - name: early-return + - name: empty-block + #- name: empty-lines + - name: error-naming + - name: error-return + - name: error-strings + - name: errorf + - name: function-result-limit + arguments: [ 3 ] + - name: identical-branches + - name: if-return + - name: import-shadowing + - name: increment-decrement + - name: indent-error-flow + - name: modifies-parameter + - name: modifies-value-receiver + - name: package-comments + - name: range + - name: range-val-address + - name: range-val-in-closure + - name: receiver-naming + - name: redefines-builtin-id + - name: string-of-int + - name: struct-tag + - name: superfluous-else + - name: time-naming + - name: unconditional-recursion + - name: unexported-naming + - name: unnecessary-stmt + - name: unreachable-code + - name: unused-parameter + - name: var-declaration + - name: var-naming + - name: waitgroup-by-value + + dogsled: + # checks assignments with too many blank identifiers; default is 2 + max-blank-identifiers: 2 + + dupl: + # tokens count to trigger issue, 150 by default + threshold: 100 + + errcheck: + # report about not checking of errors in type assertions: `a := b.(MyStruct)`; + # default is false: such cases aren't reported by default. + check-type-assertions: false + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; + # default is false: such cases aren't reported by default. + check-blank: false + + # [deprecated] comma-separated list of pairs of the form pkg:regex + # the regex is used to ignore names within pkg. (default "fmt:.*"). + # see https://github.com/kisielk/errcheck#the-deprecated-method for details + ignore: fmt:.*,io/ioutil:^Read.* + + # path to a file containing a list of functions to exclude from checking + # see https://github.com/kisielk/errcheck#excluding-functions for details + # exclude: /path/to/file.txt + funlen: + lines: 60 + statements: 40 + + gocognit: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 10 + + goconst: + # minimal length of string constant, 3 by default + min-len: 4 + # minimal occurrences count to trigger, 3 by default + min-occurrences: 4 + + gocyclo: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 20 + + godox: + # report any comments starting with keywords, this is useful for TODO or FIXME comments that + # might be left in the code accidentally and should be resolved before merging + keywords: # default keywords are TODO, BUG, and FIXME, these can be overwritten by this setting + - NOTE + - OPTIMIZE # marks code that should be optimized before merging + - HACK # marks hack-arounds that should be removed before merging + + gofmt: + # simplify code: gofmt with `-s` option, true by default + simplify: true + + goimports: + # put imports beginning with prefix after 3rd-party packages; + # it's a comma-separated list of prefixes + local-prefixes: stock + + gomnd: + settings: + mnd: + # the list of enabled checks, see https://github.com/tommy-muehle/go-mnd/#checks for description. + checks: argument,case,condition,operation,return,assign + + govet: + # report about shadowed variables + check-shadowing: true + + # settings per analyzer + settings: + printf: # analyzer name, run `go tool vet help` to see all analyzers + funcs: # run `go tool vet help printf` to see available settings for `printf` analyzer + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf + + # enable or disable analyzers by name + enable: + - atomicalign + enable-all: false + disable: + - shadow + disable-all: false + + depguard: + list-type: blacklist + include-go-root: false + #packages: + # - github.com/user/name + #packages-with-error-message: + # specify an error message to output when a blacklisted package is used + # - github.com/user/name: "logging is allowed only by logutils.Log" + + lll: + # max line length, lines longer will be reported. Default is 120. + # '\t' is counted as 1 character by default, and can be changed with the tab-width option + line-length: 200 + # tab width in spaces. Default to 1. + tab-width: 1 + + maligned: + # print struct with more effective memory layout or not, false by default + suggest-new: true + + misspell: + # Correct spellings using locale preferences for US or UK. + # Default is to use a neutral variety of English. + # Setting locale to US will correct the British spelling of 'colour' to 'color'. + locale: US + ignore-words: + - someword + + nakedret: + # make an issue if func has more lines of code than this setting and it has naked returns; default is 30 + max-func-lines: 30 + + prealloc: + # XXX: we don't recommend using this linter before doing performance profiling. + # For most programs usage of prealloc will be a premature optimization. + + # Report preallocation suggestions only on simple loops that have no returns/breaks/continues/gotos in them. + # True by default. + simple: true + range-loops: true # Report preallocation suggestions on range loops, true by default + for-loops: false # Report preallocation suggestions on for loops, false by default + + #rowserrcheck: + # packages: + # - github.com/user/name + + unparam: + # Inspect exported functions, default is false. Set to true if no external program/library imports your code. + # XXX: if you enable this setting, unparam will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find external interfaces. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + unused: + # treat code as a program (not a library) and report unused exported identifiers; default is false. + # XXX: if you enable this setting, unused will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find funcs usages. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + whitespace: + multi-if: false # Enforces newlines (or comments) after every multi-line if statement + multi-func: false # Enforces newlines (or comments) after every multi-line function signature + + wsl: + # If true append is only allowed to be cuddled if appending value is + # matching variables, fields or types on line above. Default is true. + strict-append: true + # Allow calls and assignments to be cuddled as long as the lines have any + # matching variables, fields or types. Default is true. + allow-assign-and-call: true + # Allow multiline assignments to be cuddled. Default is true. + allow-multiline-assign: true + # Allow declarations (var) to be cuddled. + allow-cuddle-declarations: false + # Allow trailing comments in ending of blocks + allow-trailing-comment: false + # Force newlines in end of case at this limit (0 = never). + force-case-trailing-whitespace: 0 + +issues: + # List of regexps of issue texts to exclude, empty list by default. + # But independently from this option we use default exclude patterns, + # it can be disabled by `exclude-use-default: false`. To list all + # excluded by default patterns execute `golangci-lint run --help` + exclude: + - abcdef + + # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: + # Exclude some linters from running on tests files. + - path: _test\.go + linters: + - gocyclo + - errcheck + - dupl + - gosec + + # Exclude known linters from partially hard-vendored code, + # which is impossible to exclude via "nolint" comments. + - path: internal/hmac/ + text: "weak cryptographic primitive" + linters: + - gosec + + # Exclude lll issues for long lines with go:generate + - linters: + - lll + source: "^//go:generate " + + # Independently from option `exclude` we use default exclude patterns, + # it can be disabled by this option. To list all + # excluded by default patterns execute `golangci-lint run --help`. + # Default value for this option is true. + exclude-use-default: false + + # Maximum issues count per one linter. Set to 0 to disable. Default is 50. + max-issues-per-linter: 0 + + # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. + max-same-issues: 0 + + # Show only new issues: if there are unstaged changes or untracked files, + # only those changes are analyzed, else only changes in HEAD~ are analyzed. + # It's a super-useful option for integration of golangci-lint into existing + # large codebase. It's not practical to fix all existing issues at the moment + # of integration: much better don't allow issues in new code. + # Default is false. + new: false + + # Show only new issues created after git revision `REV` + new-from-rev: "" + +service: + golangci-lint-version: 1.48.0 # use the fixed version to not introduce new linters unexpectedly diff --git a/_13_sponge-dtm-cache/grpc+http/Jenkinsfile b/_13_sponge-dtm-cache/grpc+http/Jenkinsfile new file mode 100644 index 0000000..cc76915 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/Jenkinsfile @@ -0,0 +1,200 @@ +pipeline { + agent any + + stages { + stage("Check Build Branch") { + steps { + echo "Checking build branch in progress ......" + script { + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + echo "building production environment, tag=${env.GIT_BRANCH}" + } else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + echo "building test environment, tag=${env.GIT_BRANCH}" + } else if (env.GIT_BRANCH ==~ /(origin\/develop)/) { + echo "building development environment, /origin/develop" + } else { + echo "The build branch ${env.GIT_BRANCH} is not legal, allowing to build the development environment branch (/origin/develop), the test environment branch (e.g. test-1.0.0), and the production environment branch (e.g. v1.0.0)" + sh 'exit 1' + } + } + echo "Check build branch complete." + } + } + + stage("Check Code") { + steps { + echo "Checking code in progress ......" + sh 'make ci-lint' + echo "Check code complete." + } + } + + stage("Unit Testing") { + steps { + echo "Unit testing in progress ......" + sh 'make test' + echo "Unit testing complete." + } + } + + stage("Compile Code") { + steps { + echo "Compiling code in progress ......" + sh 'make build' + echo "compile code complete." + } + } + + stage("Build Image") { + steps { + echo "building image in progress ......" + script { + registryHost="" + tagName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.PROD_REPO_HOST == null) { + echo "The value of environment variable PROD_REPO_HOST is empty, please set the value of PROD_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the production environment image repository ${env.PROD_REPO_HOST}" + registryHost=env.PROD_REPO_HOST + tagName=env.GIT_BRANCH + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.TEST_REPO_HOST == null) { + echo "The value of environment variable TEST_REPO_HOST is empty, please set the value of TEST_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the test environment image repository ${env.TEST_REPO_HOST}" + registryHost=env.TEST_REPO_HOST + tagName=env.GIT_BRANCH + } + else { + if (env.DEV_REPO_HOST == null) { + echo "The value of environment variable DEV_REPO_HOST is empty, please set the value of DEV_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Using the development environment ${env.DEV_REPO_HOST}" + registryHost=env.DEV_REPO_HOST + } + sh "make image-build REPO_HOST=$registryHost TAG=$tagName" + } + echo "Build image complete" + } + } + + stage("Push Image") { + steps { + echo "pushing image in progress ......" + script { + registryHost="" + tagName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.PROD_REPO_HOST == null) { + echo "The value of environment variable PROD_REPO_HOST is empty, please set the value of PROD_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the production environment image repository ${env.PROD_REPO_HOST}" + registryHost=env.PROD_REPO_HOST + tagName=env.GIT_BRANCH + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.TEST_REPO_HOST == null) { + echo "The value of environment variable TEST_REPO_HOST is empty, please set the value of TEST_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the test environment image repository ${env.TEST_REPO_HOST}" + registryHost=env.TEST_REPO_HOST + tagName=env.GIT_BRANCH + } + else { + if (env.DEV_REPO_HOST == null) { + echo "The value of environment variable DEV_REPO_HOST is empty, please set the value of DEV_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Using the development environment ${env.DEV_REPO_HOST}" + registryHost=env.DEV_REPO_HOST + } + sh "make image-push REPO_HOST=$registryHost TAG=$tagName" + } + echo "push image complete, clear image complete." + } + } + + stage("Deploy to k8s") { + when { expression { return env.GIT_BRANCH ==~ /(origin\/staging|origin\/develop)/ } } + steps { + echo "Deploying to k8s in progress ......" + sh 'make deploy-k8s' + echo "Deploy to k8s complete." + } + } + } + + post { + always { + echo 'One way or another, I have finished' + echo sh(returnStdout: true, script: 'env') + deleteDir() /* clean up our workspace */ + } + success { + SendDingding("success") + //SendEmail("success") + echo 'structure success' + } + failure { + SendDingding("failure") + //SendEmail("failure") + echo 'structure failure' + } + } +} + +// Notifications using dingding +void SendDingding(res) +{ + // Fill in the corresponding cell phone number and specify a person to be notified in the pinned group + tel_num="xxxxxxxxxxx" + dingding_url="https://oapi.dingtalk.com/robot/send\\?access_token\\=your dingding robot token" + + branchName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + branchName="${env.SERVER_PLATFORM} production environment, tag=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/){ + branchName="${env.SERVER_PLATFORM} test environment, tag=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + else { + branchName="${env.SERVER_PLATFORM} develop environment, branch=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + + json_msg="" + if( res == "success" ) { + json_msg='{\\"msgtype\\":\\"text\\",\\"text\\":{\\"content\\":\\"@' + tel_num +' [OK] ' + "${branchName} ${env.BUILD_NUMBER}th " + 'build success. \\"},\\"at\\":{\\"atMobiles\\":[\\"' + tel_num + '\\"],\\"isAtAll\\":false}}' + } + else { + json_msg='{\\"msgtype\\":\\"text\\",\\"text\\":{\\"content\\":\\"@' + tel_num +' [cry] ' + "${branchName} ${env.BUILD_NUMBER}th " + 'build failed, please deal with it promptly! \\"},\\"at\\":{\\"atMobiles\\":[\\"' + tel_num + '\\"],\\"isAtAll\\":false}}' + } + + post_header="Content-Type:application/json;charset=utf-8" + sh_cmd="curl -X POST " + dingding_url + " -H " + "\'" + post_header + "\'" + " -d " + "\"" + json_msg + "\"" + sh sh_cmd +} + +// Notifications using email +void SendEmail(res) +{ + emailAddr="xxx@xxx.com" + if( res == "success" ) + { + mail to: emailAddr, + subject: "Build Success: ${currentBuild.fullDisplayName}", + body: "\nJob name: ${env.JOB_NAME} ${env.BUILD_NUMBER}th build. \n\n For more information, please see: ${env.BUILD_URL}" + } + else + { + mail to: emailAddr, + subject: "Build Failed: ${currentBuild.fullDisplayName}", + body: "\nJob name: ${env.JOB_NAME} ${env.BUILD_NUMBER}th build. \n\n For more information, please see: ${env.BUILD_URL}" + } +} diff --git a/b_sponge-dtm-msg/Makefile b/_13_sponge-dtm-cache/grpc+http/Makefile similarity index 62% rename from b_sponge-dtm-msg/Makefile rename to _13_sponge-dtm-cache/grpc+http/Makefile index 501f443..5cf4b55 100644 --- a/b_sponge-dtm-msg/Makefile +++ b/_13_sponge-dtm-cache/grpc+http/Makefile @@ -1,6 +1,6 @@ SHELL := /bin/bash -PROJECT_NAME := "transfer" +PROJECT_NAME := "stock" PKG := "$(PROJECT_NAME)" PKG_LIST := $(shell go list ${PKG}/... | grep -v /vendor/ | grep -v /api/) @@ -8,37 +8,37 @@ PKG_LIST := $(shell go list ${PKG}/... | grep -v /vendor/ | grep -v /api/) .PHONY: ci-lint -# check code formatting, naming conventions, security, maintainability, etc. the rules in the .golangci.yml file +# Check code formatting, naming conventions, security, maintainability, etc. the rules in the .golangci.yml file ci-lint: @gofmt -s -w . golangci-lint run ./... .PHONY: test -# go test *_test.go files, the parameter -count=1 means that caching is disabled +# Test *_test.go files, the parameter -count=1 means that caching is disabled test: go test -count=1 -short ${PKG_LIST} .PHONY: cover -# generate test coverage +# Generate test coverage cover: go test -short -coverprofile=cover.out -covermode=atomic ${PKG_LIST} go tool cover -html=cover.out .PHONY: graph -# generate interactive visual function dependency graphs +# Generate interactive visual function dependency graphs graph: @echo "generating graph ......" - @cp -f cmd/transfer/main.go . - go-callvis -skipbrowser -format=svg -nostd -file=transfer transfer - @rm -f main.go transfer.gv + @cp -f cmd/stock/main.go . + go-callvis -skipbrowser -format=svg -nostd -file=stock stock + @rm -f main.go stock.gv .PHONY: proto -# generate *.go and template code by proto files, the default is all the proto files in the api directory. you can specify the proto file, multiple files are separated by commas, e.g. make proto FILES=api/user/v1/user.proto +# Generate *.go and template code by proto files, the default is all the proto files in the api directory. you can specify the proto file, multiple files are separated by commas, e.g. make proto FILES=api/user/v1/user.proto proto: @bash scripts/protoc.sh $(FILES) go mod tidy @@ -46,118 +46,124 @@ proto: .PHONY: proto-doc -# generate doc from *.proto files +# Generate doc from *.proto files proto-doc: @bash scripts/proto-doc.sh .PHONY: build -# build transfer for linux amd64 binary +# Build stock for linux amd64 binary build: - @echo "building 'transfer', linux binary file will output to 'cmd/transfer'" - @cd cmd/transfer && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build + @echo "building 'stock', linux binary file will output to 'cmd/stock'" + @cd cmd/stock && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build .PHONY: run -# build and run service +# Build and run service run: @bash scripts/run.sh .PHONY: run-nohup -# run service with nohup in local, if you want to stop the server, pass the parameter stop, e.g. make run-nohup CMD=stop +# Run service with nohup in local, if you want to stop the server, pass the parameter stop, e.g. make run-nohup CMD=stop run-nohup: @bash scripts/run-nohup.sh $(CMD) .PHONY: run-docker -# run service in local docker, if you want to update the service, run the make run-docker command again. +# Run service in local docker, if you want to update the service, run the make run-docker command again. run-docker: image-build-local @bash scripts/deploy-docker.sh .PHONY: binary-package -# packaged binary files +# Packaged binary files binary-package: build @bash scripts/binary-package.sh .PHONY: deploy-binary -# deploy binary to remote linux server, e.g. make deploy-binary USER=root PWD=123456 IP=192.168.1.10 +# Deploy binary to remote linux server, e.g. make deploy-binary USER=root PWD=123456 IP=192.168.1.10 deploy-binary: binary-package @expect scripts/deploy-binary.sh $(USER) $(PWD) $(IP) .PHONY: image-build-local -# build image for local docker, tag=latest, use binary files to build +# Build image for local docker, tag=latest, use binary files to build image-build-local: build @bash scripts/image-build-local.sh .PHONY: image-build -# build image for remote repositories, use binary files to build, e.g. make image-build REPO_HOST=addr TAG=latest +# Build image for remote repositories, use binary files to build, e.g. make image-build REPO_HOST=addr TAG=latest image-build: @bash scripts/image-build.sh $(REPO_HOST) $(TAG) .PHONY: image-build2 -# build image for remote repositories, phase II build, e.g. make image-build2 REPO_HOST=addr TAG=latest +# Build image for remote repositories, phase II build, e.g. make image-build2 REPO_HOST=addr TAG=latest image-build2: @bash scripts/image-build2.sh $(REPO_HOST) $(TAG) .PHONY: image-push -# push docker image to remote repositories, e.g. make image-push REPO_HOST=addr TAG=latest +# Push docker image to remote repositories, e.g. make image-push REPO_HOST=addr TAG=latest image-push: @bash scripts/image-push.sh $(REPO_HOST) $(TAG) .PHONY: deploy-k8s -# deploy service to k8s +# Deploy service to k8s deploy-k8s: @bash scripts/deploy-k8s.sh .PHONY: image-build-rpc-test -# build grpc test image for remote repositories, e.g. make image-build-rpc-test REPO_HOST=addr TAG=latest +# Build grpc test image for remote repositories, e.g. make image-build-rpc-test REPO_HOST=addr TAG=latest image-build-rpc-test: @bash scripts/image-rpc-test.sh $(REPO_HOST) $(TAG) .PHONY: patch -# patch some dependent code, e.g. make patch TYPE=types-pb , make patch TYPE=init-your_db_driver, replace "your_db_driver" with mysql, mongodb, postgresql, tidb, sqlite +# Patch some dependent code, e.g. make patch TYPE=types-pb , make patch TYPE=init-, your_db_driver is mysql, mongodb, postgresql, tidb, sqlite, for example: make patch TYPE=init-mysql patch: @bash scripts/patch.sh $(TYPE) .PHONY: copy-proto -# copy proto file from the grpc server directory, multiple directories or proto files separated by commas. default is to copy all proto files, e.g. make copy-proto SERVER=yourServerDir, copy specified proto files, e.g. make copy-proto SERVER=yourServerDir PROTO_FILE=yourProtoFile1,yourProtoFile2 +# Copy proto file from the grpc server directory, multiple directories or proto files separated by commas. default is to copy all proto files, e.g. make copy-proto SERVER=yourServerDir, copy specified proto files, e.g. make copy-proto SERVER=yourServerDir PROTO_FILE=yourProtoFile1,yourProtoFile2 copy-proto: @sponge patch copy-proto --server-dir=$(SERVER) --proto-file=$(PROTO_FILE) +.PHONY: modify-proto-pkg-name +# Modify the 'package' and 'go_package' names of all proto files in the 'api' directory. +modify-proto-pkg-name: + @sponge patch modify-proto-package --dir=api --server-dir=. + + .PHONY: update-config -# update internal/config code base on yaml file +# Update internal/config code base on yaml file update-config: @sponge config --server-dir=. .PHONY: clean -# clean binary file, cover.out, template file +# Clean binary file, cover.out, template file clean: - @rm -vrf cmd/transfer/transfer* + @rm -vrf cmd/stock/stock* @rm -vrf cover.out - @rm -vrf main.go transfer.gv + @rm -vrf main.go stock.gv @rm -vrf internal/ecode/*.go.gen* @rm -vrf internal/routers/*.go.gen* @rm -vrf internal/handler/*.go.gen* @rm -vrf internal/service/*.go.gen* - @rm -rf transfer-binary.tar.gz + @rm -rf stock-binary.tar.gz @echo "clean finished" -# show help +# Show help help: @echo '' @echo 'Usage:' diff --git a/_13_sponge-dtm-cache/grpc+http/README.md b/_13_sponge-dtm-cache/grpc+http/README.md new file mode 100644 index 0000000..302a0a2 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/README.md @@ -0,0 +1,76 @@ +## Cache Consistency + +Using a gRPC+HTTP hybrid service created by [Sponge](https://github.com/zhufuyi/sponge) combined with [DTM](https://github.com/dtm-labs/dtm) and [RocksCache](https://github.com/dtm-labs/rockscache), this example demonstrates cache consistency (with Redis and MySQL) including `eventual consistency`, `atomicity`, `strong consistency`, and `strong consistency during downgrade and upgrade`. + +
+ +### Quick Start + +- Start the Redis service. +- Start the MySQL service and import the [stock.sql](test/stock.sql) file into the database. +- Download the [DTM](https://github.com/dtm-labs/dtm/releases/tag/v1.18.0) executable, modify the default DTM configuration to use Redis, then start the DTM service with: `dtm -c conf.yml`. +- Clone the project code locally and modify the IP addresses in the MySQL, Redis, and DTM configurations in [config.yml](configs/stock.yml) (replace the default IP addresses 192.168.3.37 and 192.168.3.90). + +Compile and start the service: + +```bash +make run +``` + +Test four cache consistency schemes for both HTTP and GRPC protocols separately: + +1. Open [http://localhost:8080/apis/swagger/index.html](http://localhost:8080/apis/swagger/index.html) in your browser to test the four different cache consistency approaches. + + ![cache-http-pb-swagger](https://raw.githubusercontent.com/zhufuyi/sponge_examples/main/assets/cache-http-pb-swagger.png) + +2. You can also test the cache consistency of four different methods through the grpc client in `xxx_client_test.go` under the **internal/service** directory.. + + ![cache-grpc-http-pb-swagger](https://raw.githubusercontent.com/zhufuyi/sponge_examples/main/assets/cache-http-pb-test.png) + +#### Eventual Consistency + +Using the "mark deletion" strategy for the cache thoroughly solves the inconsistency between the database and the cache that is not resolved by merely deleting the cache, ensuring eventual consistency even under extreme conditions. + +**Example code: [final.go](internal/service/final.go).** + +
+ +#### Atomicity + +This approach ensures that even in the event of a process crash, updates to the database and the cache are either both successful or both fail, making it simpler than other architectures like local message tables, transaction messages, or binlog listeners. + +**Example code: [atomic.go](internal/service/atomic.go).** + +
+ +#### Strong Consistency + +The prerequisite for strong consistency is that "all data reads must be from the cache". For both the database and Redis, if all reads are provided only by the cache, strong consistency can be achieved easily. The `Fetch` function in RocksCache offers strong consistent cache reads by not returning outdated data and instead synchronously waits for the latest result. + +For example, in a recharge scenario, after a user successfully recharges, if the user queries the business result (by checking whether the two-phase global transaction status has succeeded), the system will inform the user of the incomplete status until the global transaction completes, even if the database has been updated. + +Strong consistency comes with a cost, mainly performance degradation. Compared to eventual consistency, strong consistency in data reads requires waiting for the latest results, which increases response latency, and also may involve waiting for results from other processes, which consumes resources. + +**Example code: [strong.go](internal/service/strong.go).** + +
+ +#### Strong Consistency During Downgrade and Upgrade + +Downgrade refers to reading data from the database when the cache is faulty, while upgrade refers to reading data from the cache after the cache recovers. During the short time window of downgrading and upgrading, strong consistency can still be maintained. + +- Use DTM's Saga mode to update data, ensuring atomicity of the three operations: locking the cache, updating the database, and deleting the cache. +- After updating the database but before updating the cache, the system can inform the user that the business is complete (unlike the earlier strong consistency scenario, this condition is relaxed). +- In strong consistency access mode, queries will wait for the data update result. +- During downgrade, first disable cache reads and wait until no read operations access the cache, then disable cache deletion. +- During upgrade, first enable cache deletion to ensure all database updates are reflected in the cache, then enable cache reads. + +**Example code: [downgrade.go](internal/service/downgrade.go).** + +
+ +Reference: + +- https://dtm.pub/app/cache.html +- https://github.com/dtm-labs/dtm-cases/tree/main/cache + diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.pb.go new file mode 100644 index 0000000..009de1e --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.pb.go @@ -0,0 +1,379 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/atomic.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type UpdateAtomicRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *UpdateAtomicRequest) Reset() { + *x = UpdateAtomicRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_atomic_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateAtomicRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateAtomicRequest) ProtoMessage() {} + +func (x *UpdateAtomicRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_atomic_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateAtomicRequest.ProtoReflect.Descriptor instead. +func (*UpdateAtomicRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_atomic_proto_rawDescGZIP(), []int{0} +} + +func (x *UpdateAtomicRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *UpdateAtomicRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type UpdateAtomicRequestReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateAtomicRequestReply) Reset() { + *x = UpdateAtomicRequestReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_atomic_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateAtomicRequestReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateAtomicRequestReply) ProtoMessage() {} + +func (x *UpdateAtomicRequestReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_atomic_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateAtomicRequestReply.ProtoReflect.Descriptor instead. +func (*UpdateAtomicRequestReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_atomic_proto_rawDescGZIP(), []int{1} +} + +type QueryAtomicRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *QueryAtomicRequest) Reset() { + *x = QueryAtomicRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_atomic_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryAtomicRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryAtomicRequest) ProtoMessage() {} + +func (x *QueryAtomicRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_atomic_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryAtomicRequest.ProtoReflect.Descriptor instead. +func (*QueryAtomicRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_atomic_proto_rawDescGZIP(), []int{2} +} + +func (x *QueryAtomicRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type QueryAtomicReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Stock uint32 `protobuf:"varint,1,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *QueryAtomicReply) Reset() { + *x = QueryAtomicReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_atomic_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryAtomicReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryAtomicReply) ProtoMessage() {} + +func (x *QueryAtomicReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_atomic_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryAtomicReply.ProtoReflect.Descriptor instead. +func (*QueryAtomicReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_atomic_proto_rawDescGZIP(), []int{3} +} + +func (x *QueryAtomicReply) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +var File_api_stock_v1_atomic_proto protoreflect.FileDescriptor + +var file_api_stock_v1_atomic_proto_rawDesc = []byte{ + 0x0a, 0x19, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x61, + 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x61, 0x70, 0x69, + 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, + 0x67, 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, + 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, + 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x5a, 0x0a, 0x13, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x41, + 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, + 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x1d, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0d, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x2a, 0x02, 0x20, 0x00, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x22, 0x1a, 0x0a, 0x18, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x41, 0x74, 0x6f, 0x6d, 0x69, + 0x63, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x3a, 0x0a, + 0x12, 0x51, 0x75, 0x65, 0x72, 0x79, 0x41, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, + 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x28, 0x0a, 0x10, 0x51, 0x75, 0x65, + 0x72, 0x79, 0x41, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x14, 0x0a, + 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, + 0x6f, 0x63, 0x6b, 0x32, 0xee, 0x02, 0x0a, 0x06, 0x61, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x12, 0xc2, + 0x01, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x41, + 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x41, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x6d, 0x92, 0x41, 0x46, 0x0a, 0x11, 0x63, 0x61, 0x73, 0x65, 0x20, + 0x32, 0x3a, 0x20, 0xe5, 0x8e, 0x9f, 0xe5, 0xad, 0x90, 0xe6, 0x80, 0xa7, 0x12, 0x0c, 0xe6, 0x9b, + 0xb4, 0xe6, 0x96, 0xb0, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x23, 0xe6, 0x9b, 0xb4, 0xe6, + 0x96, 0xb0, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0xef, 0xbc, 0x8c, 0x44, 0x42, 0xe5, 0x92, 0x8c, + 0xe7, 0xbc, 0x93, 0xe5, 0xad, 0x98, 0xe5, 0x8e, 0x9f, 0xe5, 0xad, 0x90, 0xe6, 0x80, 0xa7, 0x82, + 0xd3, 0xe4, 0x93, 0x02, 0x1e, 0x1a, 0x19, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x74, 0x6f, 0x6d, 0x69, 0x63, + 0x3a, 0x01, 0x2a, 0x12, 0x9e, 0x01, 0x0a, 0x05, 0x51, 0x75, 0x65, 0x72, 0x79, 0x12, 0x20, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, + 0x72, 0x79, 0x41, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x51, + 0x75, 0x65, 0x72, 0x79, 0x41, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, + 0x53, 0x92, 0x41, 0x2f, 0x0a, 0x11, 0x63, 0x61, 0x73, 0x65, 0x20, 0x32, 0x3a, 0x20, 0xe5, 0x8e, + 0x9f, 0xe5, 0xad, 0x90, 0xe6, 0x80, 0xa7, 0x12, 0x0c, 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, + 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x0c, 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, + 0xe6, 0x8d, 0xae, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1b, 0x12, 0x19, 0x2f, 0x61, 0x70, 0x69, 0x2f, + 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x74, + 0x6f, 0x6d, 0x69, 0x63, 0x42, 0xb4, 0x01, 0x5a, 0x15, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, + 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, 0x92, 0x41, + 0x99, 0x01, 0x12, 0x15, 0x0a, 0x0e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x61, 0x70, 0x69, 0x20, + 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, 0x32, 0x2e, 0x30, 0x1a, 0x0e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, 0x30, 0x38, 0x30, 0x2a, 0x02, 0x01, 0x02, 0x32, 0x10, 0x61, + 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x3a, + 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, + 0x6e, 0x5a, 0x48, 0x0a, 0x46, 0x0a, 0x0a, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x41, 0x75, 0x74, + 0x68, 0x12, 0x38, 0x08, 0x02, 0x12, 0x23, 0x54, 0x79, 0x70, 0x65, 0x20, 0x42, 0x65, 0x61, 0x72, + 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x72, 0x2d, 0x6a, 0x77, 0x74, 0x2d, 0x74, 0x6f, 0x6b, 0x65, + 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x0d, 0x41, 0x75, 0x74, 0x68, + 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_atomic_proto_rawDescOnce sync.Once + file_api_stock_v1_atomic_proto_rawDescData = file_api_stock_v1_atomic_proto_rawDesc +) + +func file_api_stock_v1_atomic_proto_rawDescGZIP() []byte { + file_api_stock_v1_atomic_proto_rawDescOnce.Do(func() { + file_api_stock_v1_atomic_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_atomic_proto_rawDescData) + }) + return file_api_stock_v1_atomic_proto_rawDescData +} + +var file_api_stock_v1_atomic_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_api_stock_v1_atomic_proto_goTypes = []interface{}{ + (*UpdateAtomicRequest)(nil), // 0: api.stock.v1.UpdateAtomicRequest + (*UpdateAtomicRequestReply)(nil), // 1: api.stock.v1.UpdateAtomicRequestReply + (*QueryAtomicRequest)(nil), // 2: api.stock.v1.QueryAtomicRequest + (*QueryAtomicReply)(nil), // 3: api.stock.v1.QueryAtomicReply +} +var file_api_stock_v1_atomic_proto_depIdxs = []int32{ + 0, // 0: api.stock.v1.atomic.Update:input_type -> api.stock.v1.UpdateAtomicRequest + 2, // 1: api.stock.v1.atomic.Query:input_type -> api.stock.v1.QueryAtomicRequest + 1, // 2: api.stock.v1.atomic.Update:output_type -> api.stock.v1.UpdateAtomicRequestReply + 3, // 3: api.stock.v1.atomic.Query:output_type -> api.stock.v1.QueryAtomicReply + 2, // [2:4] is the sub-list for method output_type + 0, // [0:2] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_atomic_proto_init() } +func file_api_stock_v1_atomic_proto_init() { + if File_api_stock_v1_atomic_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_atomic_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateAtomicRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_atomic_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateAtomicRequestReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_atomic_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryAtomicRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_atomic_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryAtomicReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_atomic_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_atomic_proto_goTypes, + DependencyIndexes: file_api_stock_v1_atomic_proto_depIdxs, + MessageInfos: file_api_stock_v1_atomic_proto_msgTypes, + }.Build() + File_api_stock_v1_atomic_proto = out.File + file_api_stock_v1_atomic_proto_rawDesc = nil + file_api_stock_v1_atomic_proto_goTypes = nil + file_api_stock_v1_atomic_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.pb.validate.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.pb.validate.go new file mode 100644 index 0000000..d43026a --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.pb.validate.go @@ -0,0 +1,477 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/atomic.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on UpdateAtomicRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateAtomicRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateAtomicRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateAtomicRequestMultiError, or nil if none found. +func (m *UpdateAtomicRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateAtomicRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := UpdateAtomicRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetStock() <= 0 { + err := UpdateAtomicRequestValidationError{ + field: "Stock", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return UpdateAtomicRequestMultiError(errors) + } + + return nil +} + +// UpdateAtomicRequestMultiError is an error wrapping multiple validation +// errors returned by UpdateAtomicRequest.ValidateAll() if the designated +// constraints aren't met. +type UpdateAtomicRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateAtomicRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateAtomicRequestMultiError) AllErrors() []error { return m } + +// UpdateAtomicRequestValidationError is the validation error returned by +// UpdateAtomicRequest.Validate if the designated constraints aren't met. +type UpdateAtomicRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateAtomicRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateAtomicRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateAtomicRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateAtomicRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateAtomicRequestValidationError) ErrorName() string { + return "UpdateAtomicRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateAtomicRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateAtomicRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateAtomicRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateAtomicRequestValidationError{} + +// Validate checks the field values on UpdateAtomicRequestReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateAtomicRequestReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateAtomicRequestReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateAtomicRequestReplyMultiError, or nil if none found. +func (m *UpdateAtomicRequestReply) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateAtomicRequestReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return UpdateAtomicRequestReplyMultiError(errors) + } + + return nil +} + +// UpdateAtomicRequestReplyMultiError is an error wrapping multiple validation +// errors returned by UpdateAtomicRequestReply.ValidateAll() if the designated +// constraints aren't met. +type UpdateAtomicRequestReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateAtomicRequestReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateAtomicRequestReplyMultiError) AllErrors() []error { return m } + +// UpdateAtomicRequestReplyValidationError is the validation error returned by +// UpdateAtomicRequestReply.Validate if the designated constraints aren't met. +type UpdateAtomicRequestReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateAtomicRequestReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateAtomicRequestReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateAtomicRequestReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateAtomicRequestReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateAtomicRequestReplyValidationError) ErrorName() string { + return "UpdateAtomicRequestReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateAtomicRequestReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateAtomicRequestReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateAtomicRequestReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateAtomicRequestReplyValidationError{} + +// Validate checks the field values on QueryAtomicRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryAtomicRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryAtomicRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryAtomicRequestMultiError, or nil if none found. +func (m *QueryAtomicRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryAtomicRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := QueryAtomicRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return QueryAtomicRequestMultiError(errors) + } + + return nil +} + +// QueryAtomicRequestMultiError is an error wrapping multiple validation errors +// returned by QueryAtomicRequest.ValidateAll() if the designated constraints +// aren't met. +type QueryAtomicRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryAtomicRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryAtomicRequestMultiError) AllErrors() []error { return m } + +// QueryAtomicRequestValidationError is the validation error returned by +// QueryAtomicRequest.Validate if the designated constraints aren't met. +type QueryAtomicRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryAtomicRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryAtomicRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryAtomicRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryAtomicRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryAtomicRequestValidationError) ErrorName() string { + return "QueryAtomicRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryAtomicRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryAtomicRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryAtomicRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryAtomicRequestValidationError{} + +// Validate checks the field values on QueryAtomicReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *QueryAtomicReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryAtomicReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryAtomicReplyMultiError, or nil if none found. +func (m *QueryAtomicReply) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryAtomicReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Stock + + if len(errors) > 0 { + return QueryAtomicReplyMultiError(errors) + } + + return nil +} + +// QueryAtomicReplyMultiError is an error wrapping multiple validation errors +// returned by QueryAtomicReply.ValidateAll() if the designated constraints +// aren't met. +type QueryAtomicReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryAtomicReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryAtomicReplyMultiError) AllErrors() []error { return m } + +// QueryAtomicReplyValidationError is the validation error returned by +// QueryAtomicReply.Validate if the designated constraints aren't met. +type QueryAtomicReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryAtomicReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryAtomicReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryAtomicReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryAtomicReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryAtomicReplyValidationError) ErrorName() string { return "QueryAtomicReplyValidationError" } + +// Error satisfies the builtin error interface +func (e QueryAtomicReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryAtomicReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryAtomicReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryAtomicReplyValidationError{} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.proto b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.proto new file mode 100644 index 0000000..9867bc7 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic.proto @@ -0,0 +1,82 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "tagger/tagger.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +// Default settings for generating swagger documents +// NOTE: because json does not support 64 bits, the int64 and uint64 types under *.swagger.json are automatically converted to string types +// Reference https://github.com/grpc-ecosystem/grpc-gateway/blob/db7fbefff7c04877cdb32e16d4a248a024428207/examples/internal/proto/examplepb/a_bit_of_everything.proto +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + host: "localhost:8080" + base_path: "" + info: { + title: "stock api docs"; + version: "2.0"; + } + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; + security_definitions: { + security: { + key: "BearerAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + description: "Type Bearer your-jwt-token to Value"; + } + } + } +}; + +// 使用dtm+rockscache实现原子性更新,比本地消息表、事务消息、binlog监听的这些架构更加简单 +service atomic{ + // 更新数据,保证DB与缓存操作的原子性。 + rpc Update(UpdateAtomicRequest) returns (UpdateAtomicRequestReply) { + option (google.api.http) = { + put: "/api/v1/stock/{id}/atomic" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "更新数据", + description: "更新数据,DB和缓存原子性", + tags: "case 2: 原子性" + }; + } + + // 查询 + rpc Query(QueryAtomicRequest) returns (QueryAtomicReply) { + option (google.api.http) = { + get: "/api/v1/stock/{id}/atomic" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "查询数据", + description: "查询数据", + tags: "case 2: 原子性" + }; + } +} + +message UpdateAtomicRequest { + uint64 id = 1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; + uint32 stock = 2 [(validate.rules).uint32.gt = 0]; // 库存数量 +} + +message UpdateAtomicRequestReply { + +} + +message QueryAtomicRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; +} + +message QueryAtomicReply { + uint32 stock = 1; // 库存数量 +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic_grpc.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic_grpc.pb.go new file mode 100644 index 0000000..e903a89 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic_grpc.pb.go @@ -0,0 +1,150 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.2 +// source: api/stock/v1/atomic.proto + +package v1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Atomic_Update_FullMethodName = "/api.stock.v1.atomic/Update" + Atomic_Query_FullMethodName = "/api.stock.v1.atomic/Query" +) + +// AtomicClient is the client API for Atomic service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type AtomicClient interface { + // 更新数据,保证DB与缓存操作的原子性。 + Update(ctx context.Context, in *UpdateAtomicRequest, opts ...grpc.CallOption) (*UpdateAtomicRequestReply, error) + // 查询 + Query(ctx context.Context, in *QueryAtomicRequest, opts ...grpc.CallOption) (*QueryAtomicReply, error) +} + +type atomicClient struct { + cc grpc.ClientConnInterface +} + +func NewAtomicClient(cc grpc.ClientConnInterface) AtomicClient { + return &atomicClient{cc} +} + +func (c *atomicClient) Update(ctx context.Context, in *UpdateAtomicRequest, opts ...grpc.CallOption) (*UpdateAtomicRequestReply, error) { + out := new(UpdateAtomicRequestReply) + err := c.cc.Invoke(ctx, Atomic_Update_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *atomicClient) Query(ctx context.Context, in *QueryAtomicRequest, opts ...grpc.CallOption) (*QueryAtomicReply, error) { + out := new(QueryAtomicReply) + err := c.cc.Invoke(ctx, Atomic_Query_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AtomicServer is the server API for Atomic service. +// All implementations must embed UnimplementedAtomicServer +// for forward compatibility +type AtomicServer interface { + // 更新数据,保证DB与缓存操作的原子性。 + Update(context.Context, *UpdateAtomicRequest) (*UpdateAtomicRequestReply, error) + // 查询 + Query(context.Context, *QueryAtomicRequest) (*QueryAtomicReply, error) + mustEmbedUnimplementedAtomicServer() +} + +// UnimplementedAtomicServer must be embedded to have forward compatible implementations. +type UnimplementedAtomicServer struct { +} + +func (UnimplementedAtomicServer) Update(context.Context, *UpdateAtomicRequest) (*UpdateAtomicRequestReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Update not implemented") +} +func (UnimplementedAtomicServer) Query(context.Context, *QueryAtomicRequest) (*QueryAtomicReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Query not implemented") +} +func (UnimplementedAtomicServer) mustEmbedUnimplementedAtomicServer() {} + +// UnsafeAtomicServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to AtomicServer will +// result in compilation errors. +type UnsafeAtomicServer interface { + mustEmbedUnimplementedAtomicServer() +} + +func RegisterAtomicServer(s grpc.ServiceRegistrar, srv AtomicServer) { + s.RegisterService(&Atomic_ServiceDesc, srv) +} + +func _Atomic_Update_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateAtomicRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AtomicServer).Update(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Atomic_Update_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AtomicServer).Update(ctx, req.(*UpdateAtomicRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Atomic_Query_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryAtomicRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AtomicServer).Query(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Atomic_Query_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AtomicServer).Query(ctx, req.(*QueryAtomicRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Atomic_ServiceDesc is the grpc.ServiceDesc for Atomic service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Atomic_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.stock.v1.atomic", + HandlerType: (*AtomicServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Update", + Handler: _Atomic_Update_Handler, + }, + { + MethodName: "Query", + Handler: _Atomic_Query_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "api/stock/v1/atomic.proto", +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic_router.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic_router.pb.go new file mode 100644 index 0000000..e551613 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/atomic_router.pb.go @@ -0,0 +1,221 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type AtomicLogicer interface { + Update(ctx context.Context, req *UpdateAtomicRequest) (*UpdateAtomicRequestReply, error) + Query(ctx context.Context, req *QueryAtomicRequest) (*QueryAtomicReply, error) +} + +type AtomicOption func(*atomicOptions) + +type atomicOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *atomicOptions) apply(opts ...AtomicOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithAtomicHTTPResponse() AtomicOption { + return func(o *atomicOptions) { + o.isFromRPC = false + } +} + +func WithAtomicRPCResponse() AtomicOption { + return func(o *atomicOptions) { + o.isFromRPC = true + } +} + +func WithAtomicResponser(responser errcode.Responser) AtomicOption { + return func(o *atomicOptions) { + o.responser = responser + } +} + +func WithAtomicLogger(zapLog *zap.Logger) AtomicOption { + return func(o *atomicOptions) { + o.zapLog = zapLog + } +} + +func WithAtomicErrorToHTTPCode(e ...*errcode.Error) AtomicOption { + return func(o *atomicOptions) { + o.httpErrors = e + } +} + +func WithAtomicRPCStatusToHTTPCode(s ...*errcode.RPCStatus) AtomicOption { + return func(o *atomicOptions) { + o.rpcStatus = s + } +} + +func WithAtomicWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) AtomicOption { + return func(o *atomicOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterAtomicRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic AtomicLogicer, + opts ...AtomicOption) { + + o := &atomicOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &atomicRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type atomicRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic AtomicLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *atomicRouter) register() { + r.iRouter.Handle("PUT", "/api/v1/stock/:id/atomic", r.withMiddleware("PUT", "/api/v1/stock/:id/atomic", r.Update_0)...) + r.iRouter.Handle("GET", "/api/v1/stock/:id/atomic", r.withMiddleware("GET", "/api/v1/stock/:id/atomic", r.Query_0)...) + +} + +func (r *atomicRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *atomicRouter) Update_0(c *gin.Context) { + req := &UpdateAtomicRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Update(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *atomicRouter) Query_0(c *gin.Context) { + req := &QueryAtomicRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Query(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.pb.go new file mode 100644 index 0000000..6745cfd --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.pb.go @@ -0,0 +1,325 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/callback.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type QueryPreparedRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *QueryPreparedRequest) Reset() { + *x = QueryPreparedRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_callback_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryPreparedRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryPreparedRequest) ProtoMessage() {} + +func (x *QueryPreparedRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_callback_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryPreparedRequest.ProtoReflect.Descriptor instead. +func (*QueryPreparedRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_callback_proto_rawDescGZIP(), []int{0} +} + +type QueryPreparedReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *QueryPreparedReply) Reset() { + *x = QueryPreparedReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_callback_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryPreparedReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryPreparedReply) ProtoMessage() {} + +func (x *QueryPreparedReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_callback_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryPreparedReply.ProtoReflect.Descriptor instead. +func (*QueryPreparedReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_callback_proto_rawDescGZIP(), []int{1} +} + +type DeleteCacheRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key"` +} + +func (x *DeleteCacheRequest) Reset() { + *x = DeleteCacheRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_callback_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteCacheRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteCacheRequest) ProtoMessage() {} + +func (x *DeleteCacheRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_callback_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteCacheRequest.ProtoReflect.Descriptor instead. +func (*DeleteCacheRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_callback_proto_rawDescGZIP(), []int{2} +} + +func (x *DeleteCacheRequest) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +type DeleteCacheReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteCacheReply) Reset() { + *x = DeleteCacheReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_callback_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteCacheReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteCacheReply) ProtoMessage() {} + +func (x *DeleteCacheReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_callback_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteCacheReply.ProtoReflect.Descriptor instead. +func (*DeleteCacheReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_callback_proto_rawDescGZIP(), []int{3} +} + +var File_api_stock_v1_callback_proto protoreflect.FileDescriptor + +var file_api_stock_v1_callback_proto_rawDesc = []byte{ + 0x0a, 0x1b, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x63, + 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x61, + 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x22, 0x16, 0x0a, 0x14, 0x51, 0x75, 0x65, 0x72, 0x79, 0x50, 0x72, 0x65, 0x70, 0x61, + 0x72, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x14, 0x0a, 0x12, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x50, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, 0x64, 0x52, 0x65, 0x70, 0x6c, 0x79, + 0x22, 0x2f, 0x0a, 0x12, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x19, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x72, 0x02, 0x10, 0x01, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x22, 0x12, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, + 0x52, 0x65, 0x70, 0x6c, 0x79, 0x32, 0x85, 0x02, 0x0a, 0x08, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, + 0x63, 0x6b, 0x12, 0x81, 0x01, 0x0a, 0x0d, 0x51, 0x75, 0x65, 0x72, 0x79, 0x50, 0x72, 0x65, 0x70, + 0x61, 0x72, 0x65, 0x64, 0x12, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x2e, 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x50, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, + 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x50, 0x72, 0x65, + 0x70, 0x61, 0x72, 0x65, 0x64, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x2a, 0x82, 0xd3, 0xe4, 0x93, + 0x02, 0x24, 0x0a, 0x05, 0x5b, 0x63, 0x74, 0x78, 0x5d, 0x12, 0x1b, 0x2f, 0x61, 0x70, 0x69, 0x2f, + 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x50, 0x72, + 0x65, 0x70, 0x61, 0x72, 0x65, 0x64, 0x12, 0x75, 0x0a, 0x0b, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, + 0x43, 0x61, 0x63, 0x68, 0x65, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, + 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x61, 0x63, + 0x68, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x24, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1e, 0x22, + 0x19, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x64, + 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x3a, 0x01, 0x2a, 0x42, 0x17, 0x5a, + 0x15, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_callback_proto_rawDescOnce sync.Once + file_api_stock_v1_callback_proto_rawDescData = file_api_stock_v1_callback_proto_rawDesc +) + +func file_api_stock_v1_callback_proto_rawDescGZIP() []byte { + file_api_stock_v1_callback_proto_rawDescOnce.Do(func() { + file_api_stock_v1_callback_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_callback_proto_rawDescData) + }) + return file_api_stock_v1_callback_proto_rawDescData +} + +var file_api_stock_v1_callback_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_api_stock_v1_callback_proto_goTypes = []interface{}{ + (*QueryPreparedRequest)(nil), // 0: api.stock.v1.QueryPreparedRequest + (*QueryPreparedReply)(nil), // 1: api.stock.v1.QueryPreparedReply + (*DeleteCacheRequest)(nil), // 2: api.stock.v1.DeleteCacheRequest + (*DeleteCacheReply)(nil), // 3: api.stock.v1.DeleteCacheReply +} +var file_api_stock_v1_callback_proto_depIdxs = []int32{ + 0, // 0: api.stock.v1.callback.QueryPrepared:input_type -> api.stock.v1.QueryPreparedRequest + 2, // 1: api.stock.v1.callback.DeleteCache:input_type -> api.stock.v1.DeleteCacheRequest + 1, // 2: api.stock.v1.callback.QueryPrepared:output_type -> api.stock.v1.QueryPreparedReply + 3, // 3: api.stock.v1.callback.DeleteCache:output_type -> api.stock.v1.DeleteCacheReply + 2, // [2:4] is the sub-list for method output_type + 0, // [0:2] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_callback_proto_init() } +func file_api_stock_v1_callback_proto_init() { + if File_api_stock_v1_callback_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_callback_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryPreparedRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_callback_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryPreparedReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_callback_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteCacheRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_callback_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteCacheReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_callback_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_callback_proto_goTypes, + DependencyIndexes: file_api_stock_v1_callback_proto_depIdxs, + MessageInfos: file_api_stock_v1_callback_proto_msgTypes, + }.Build() + File_api_stock_v1_callback_proto = out.File + file_api_stock_v1_callback_proto_rawDesc = nil + file_api_stock_v1_callback_proto_goTypes = nil + file_api_stock_v1_callback_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.pb.validate.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.pb.validate.go new file mode 100644 index 0000000..aa2578e --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.pb.validate.go @@ -0,0 +1,453 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/callback.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on QueryPreparedRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryPreparedRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryPreparedRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryPreparedRequestMultiError, or nil if none found. +func (m *QueryPreparedRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryPreparedRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return QueryPreparedRequestMultiError(errors) + } + + return nil +} + +// QueryPreparedRequestMultiError is an error wrapping multiple validation +// errors returned by QueryPreparedRequest.ValidateAll() if the designated +// constraints aren't met. +type QueryPreparedRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryPreparedRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryPreparedRequestMultiError) AllErrors() []error { return m } + +// QueryPreparedRequestValidationError is the validation error returned by +// QueryPreparedRequest.Validate if the designated constraints aren't met. +type QueryPreparedRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryPreparedRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryPreparedRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryPreparedRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryPreparedRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryPreparedRequestValidationError) ErrorName() string { + return "QueryPreparedRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryPreparedRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryPreparedRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryPreparedRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryPreparedRequestValidationError{} + +// Validate checks the field values on QueryPreparedReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryPreparedReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryPreparedReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryPreparedReplyMultiError, or nil if none found. +func (m *QueryPreparedReply) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryPreparedReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return QueryPreparedReplyMultiError(errors) + } + + return nil +} + +// QueryPreparedReplyMultiError is an error wrapping multiple validation errors +// returned by QueryPreparedReply.ValidateAll() if the designated constraints +// aren't met. +type QueryPreparedReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryPreparedReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryPreparedReplyMultiError) AllErrors() []error { return m } + +// QueryPreparedReplyValidationError is the validation error returned by +// QueryPreparedReply.Validate if the designated constraints aren't met. +type QueryPreparedReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryPreparedReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryPreparedReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryPreparedReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryPreparedReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryPreparedReplyValidationError) ErrorName() string { + return "QueryPreparedReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryPreparedReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryPreparedReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryPreparedReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryPreparedReplyValidationError{} + +// Validate checks the field values on DeleteCacheRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *DeleteCacheRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DeleteCacheRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DeleteCacheRequestMultiError, or nil if none found. +func (m *DeleteCacheRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *DeleteCacheRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if utf8.RuneCountInString(m.GetKey()) < 1 { + err := DeleteCacheRequestValidationError{ + field: "Key", + reason: "value length must be at least 1 runes", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return DeleteCacheRequestMultiError(errors) + } + + return nil +} + +// DeleteCacheRequestMultiError is an error wrapping multiple validation errors +// returned by DeleteCacheRequest.ValidateAll() if the designated constraints +// aren't met. +type DeleteCacheRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DeleteCacheRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DeleteCacheRequestMultiError) AllErrors() []error { return m } + +// DeleteCacheRequestValidationError is the validation error returned by +// DeleteCacheRequest.Validate if the designated constraints aren't met. +type DeleteCacheRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DeleteCacheRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DeleteCacheRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DeleteCacheRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DeleteCacheRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DeleteCacheRequestValidationError) ErrorName() string { + return "DeleteCacheRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e DeleteCacheRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDeleteCacheRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DeleteCacheRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DeleteCacheRequestValidationError{} + +// Validate checks the field values on DeleteCacheReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *DeleteCacheReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DeleteCacheReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DeleteCacheReplyMultiError, or nil if none found. +func (m *DeleteCacheReply) ValidateAll() error { + return m.validate(true) +} + +func (m *DeleteCacheReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return DeleteCacheReplyMultiError(errors) + } + + return nil +} + +// DeleteCacheReplyMultiError is an error wrapping multiple validation errors +// returned by DeleteCacheReply.ValidateAll() if the designated constraints +// aren't met. +type DeleteCacheReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DeleteCacheReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DeleteCacheReplyMultiError) AllErrors() []error { return m } + +// DeleteCacheReplyValidationError is the validation error returned by +// DeleteCacheReply.Validate if the designated constraints aren't met. +type DeleteCacheReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DeleteCacheReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DeleteCacheReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DeleteCacheReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DeleteCacheReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DeleteCacheReplyValidationError) ErrorName() string { return "DeleteCacheReplyValidationError" } + +// Error satisfies the builtin error interface +func (e DeleteCacheReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDeleteCacheReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DeleteCacheReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DeleteCacheReplyValidationError{} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.proto b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.proto new file mode 100644 index 0000000..3da37df --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback.proto @@ -0,0 +1,34 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "google/api/annotations.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +service callback { + // 反查数据 + rpc QueryPrepared(QueryPreparedRequest) returns (QueryPreparedReply) { + option (google.api.http) = { + get: "/api/v1/stock/queryPrepared" + selector: "[ctx]" + }; + } + + // 删除缓存 + rpc DeleteCache(DeleteCacheRequest) returns (DeleteCacheReply) { + option (google.api.http) = { + post: "/api/v1/stock/deleteCache" + body: "*" + }; + } +} + +message QueryPreparedRequest {} +message QueryPreparedReply {} + +message DeleteCacheRequest { + string key = 1 [(validate.rules).string.min_len = 1]; +} +message DeleteCacheReply {} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback_grpc.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback_grpc.pb.go new file mode 100644 index 0000000..f793b92 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback_grpc.pb.go @@ -0,0 +1,150 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.2 +// source: api/stock/v1/callback.proto + +package v1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Callback_QueryPrepared_FullMethodName = "/api.stock.v1.callback/QueryPrepared" + Callback_DeleteCache_FullMethodName = "/api.stock.v1.callback/DeleteCache" +) + +// CallbackClient is the client API for Callback service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type CallbackClient interface { + // 反查数据 + QueryPrepared(ctx context.Context, in *QueryPreparedRequest, opts ...grpc.CallOption) (*QueryPreparedReply, error) + // 删除缓存 + DeleteCache(ctx context.Context, in *DeleteCacheRequest, opts ...grpc.CallOption) (*DeleteCacheReply, error) +} + +type callbackClient struct { + cc grpc.ClientConnInterface +} + +func NewCallbackClient(cc grpc.ClientConnInterface) CallbackClient { + return &callbackClient{cc} +} + +func (c *callbackClient) QueryPrepared(ctx context.Context, in *QueryPreparedRequest, opts ...grpc.CallOption) (*QueryPreparedReply, error) { + out := new(QueryPreparedReply) + err := c.cc.Invoke(ctx, Callback_QueryPrepared_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *callbackClient) DeleteCache(ctx context.Context, in *DeleteCacheRequest, opts ...grpc.CallOption) (*DeleteCacheReply, error) { + out := new(DeleteCacheReply) + err := c.cc.Invoke(ctx, Callback_DeleteCache_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CallbackServer is the server API for Callback service. +// All implementations must embed UnimplementedCallbackServer +// for forward compatibility +type CallbackServer interface { + // 反查数据 + QueryPrepared(context.Context, *QueryPreparedRequest) (*QueryPreparedReply, error) + // 删除缓存 + DeleteCache(context.Context, *DeleteCacheRequest) (*DeleteCacheReply, error) + mustEmbedUnimplementedCallbackServer() +} + +// UnimplementedCallbackServer must be embedded to have forward compatible implementations. +type UnimplementedCallbackServer struct { +} + +func (UnimplementedCallbackServer) QueryPrepared(context.Context, *QueryPreparedRequest) (*QueryPreparedReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method QueryPrepared not implemented") +} +func (UnimplementedCallbackServer) DeleteCache(context.Context, *DeleteCacheRequest) (*DeleteCacheReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteCache not implemented") +} +func (UnimplementedCallbackServer) mustEmbedUnimplementedCallbackServer() {} + +// UnsafeCallbackServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to CallbackServer will +// result in compilation errors. +type UnsafeCallbackServer interface { + mustEmbedUnimplementedCallbackServer() +} + +func RegisterCallbackServer(s grpc.ServiceRegistrar, srv CallbackServer) { + s.RegisterService(&Callback_ServiceDesc, srv) +} + +func _Callback_QueryPrepared_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryPreparedRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CallbackServer).QueryPrepared(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Callback_QueryPrepared_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CallbackServer).QueryPrepared(ctx, req.(*QueryPreparedRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Callback_DeleteCache_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteCacheRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CallbackServer).DeleteCache(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Callback_DeleteCache_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CallbackServer).DeleteCache(ctx, req.(*DeleteCacheRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Callback_ServiceDesc is the grpc.ServiceDesc for Callback service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Callback_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.stock.v1.callback", + HandlerType: (*CallbackServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "QueryPrepared", + Handler: _Callback_QueryPrepared_Handler, + }, + { + MethodName: "DeleteCache", + Handler: _Callback_DeleteCache_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "api/stock/v1/callback.proto", +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback_router.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback_router.pb.go new file mode 100644 index 0000000..d98730b --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/callback_router.pb.go @@ -0,0 +1,204 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type CallbackLogicer interface { + QueryPrepared(ctx context.Context, req *QueryPreparedRequest) (*QueryPreparedReply, error) + DeleteCache(ctx context.Context, req *DeleteCacheRequest) (*DeleteCacheReply, error) +} + +type CallbackOption func(*callbackOptions) + +type callbackOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *callbackOptions) apply(opts ...CallbackOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithCallbackHTTPResponse() CallbackOption { + return func(o *callbackOptions) { + o.isFromRPC = false + } +} + +func WithCallbackRPCResponse() CallbackOption { + return func(o *callbackOptions) { + o.isFromRPC = true + } +} + +func WithCallbackResponser(responser errcode.Responser) CallbackOption { + return func(o *callbackOptions) { + o.responser = responser + } +} + +func WithCallbackLogger(zapLog *zap.Logger) CallbackOption { + return func(o *callbackOptions) { + o.zapLog = zapLog + } +} + +func WithCallbackErrorToHTTPCode(e ...*errcode.Error) CallbackOption { + return func(o *callbackOptions) { + o.httpErrors = e + } +} + +func WithCallbackRPCStatusToHTTPCode(s ...*errcode.RPCStatus) CallbackOption { + return func(o *callbackOptions) { + o.rpcStatus = s + } +} + +func WithCallbackWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) CallbackOption { + return func(o *callbackOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterCallbackRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic CallbackLogicer, + opts ...CallbackOption) { + + o := &callbackOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &callbackRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type callbackRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic CallbackLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *callbackRouter) register() { + r.iRouter.Handle("GET", "/api/v1/stock/queryPrepared", r.withMiddleware("GET", "/api/v1/stock/queryPrepared", r.QueryPrepared_0)...) + r.iRouter.Handle("POST", "/api/v1/stock/deleteCache", r.withMiddleware("POST", "/api/v1/stock/deleteCache", r.DeleteCache_0)...) + +} + +func (r *callbackRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *callbackRouter) QueryPrepared_0(c *gin.Context) { + req := &QueryPreparedRequest{} + var err error + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context = c + + out, err := r.iLogic.QueryPrepared(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *callbackRouter) DeleteCache_0(c *gin.Context) { + req := &DeleteCacheRequest{} + var err error + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.DeleteCache(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.pb.go new file mode 100644 index 0000000..43e8185 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.pb.go @@ -0,0 +1,556 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/downgrade.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type UpdateDowngradeRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *UpdateDowngradeRequest) Reset() { + *x = UpdateDowngradeRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateDowngradeRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateDowngradeRequest) ProtoMessage() {} + +func (x *UpdateDowngradeRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateDowngradeRequest.ProtoReflect.Descriptor instead. +func (*UpdateDowngradeRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{0} +} + +func (x *UpdateDowngradeRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *UpdateDowngradeRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type UpdateDowngradeRequestReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateDowngradeRequestReply) Reset() { + *x = UpdateDowngradeRequestReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateDowngradeRequestReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateDowngradeRequestReply) ProtoMessage() {} + +func (x *UpdateDowngradeRequestReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateDowngradeRequestReply.ProtoReflect.Descriptor instead. +func (*UpdateDowngradeRequestReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{1} +} + +type QueryDowngradeRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *QueryDowngradeRequest) Reset() { + *x = QueryDowngradeRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryDowngradeRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryDowngradeRequest) ProtoMessage() {} + +func (x *QueryDowngradeRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryDowngradeRequest.ProtoReflect.Descriptor instead. +func (*QueryDowngradeRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{2} +} + +func (x *QueryDowngradeRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type QueryDowngradeReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *QueryDowngradeReply) Reset() { + *x = QueryDowngradeReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryDowngradeReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryDowngradeReply) ProtoMessage() {} + +func (x *QueryDowngradeReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryDowngradeReply.ProtoReflect.Descriptor instead. +func (*QueryDowngradeReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{3} +} + +func (x *QueryDowngradeReply) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *QueryDowngradeReply) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type DowngradeBranchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Gid string `protobuf:"bytes,1,opt,name=gid,proto3" json:"gid"` // dtm gid + Key string `protobuf:"bytes,2,opt,name=key,proto3" json:"key"` // 缓存key + Id uint64 `protobuf:"varint,3,opt,name=id,proto3" json:"id"` + Stock uint32 `protobuf:"varint,4,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *DowngradeBranchRequest) Reset() { + *x = DowngradeBranchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DowngradeBranchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DowngradeBranchRequest) ProtoMessage() {} + +func (x *DowngradeBranchRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DowngradeBranchRequest.ProtoReflect.Descriptor instead. +func (*DowngradeBranchRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{4} +} + +func (x *DowngradeBranchRequest) GetGid() string { + if x != nil { + return x.Gid + } + return "" +} + +func (x *DowngradeBranchRequest) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +func (x *DowngradeBranchRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *DowngradeBranchRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type DowngradeBranchReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DowngradeBranchReply) Reset() { + *x = DowngradeBranchReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DowngradeBranchReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DowngradeBranchReply) ProtoMessage() {} + +func (x *DowngradeBranchReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DowngradeBranchReply.ProtoReflect.Descriptor instead. +func (*DowngradeBranchReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{5} +} + +var File_api_stock_v1_downgrade_proto protoreflect.FileDescriptor + +var file_api_stock_v1_downgrade_proto_rawDesc = []byte{ + 0x0a, 0x1c, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x64, + 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, + 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x32, + 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x74, 0x61, 0x67, 0x67, + 0x65, 0x72, 0x2f, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, + 0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x5d, 0x0a, 0x16, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, + 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, + 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1d, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x2a, 0x02, 0x20, 0x00, + 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, 0x1d, 0x0a, 0x1b, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x3d, 0x0a, 0x15, 0x51, 0x75, 0x65, 0x72, 0x79, 0x44, + 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, + 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, + 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x3b, 0x0a, 0x13, 0x51, 0x75, 0x65, 0x72, 0x79, 0x44, 0x6f, + 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x22, 0x86, 0x01, 0x0a, 0x16, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, + 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x19, 0x0a, + 0x03, 0x67, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x72, + 0x02, 0x10, 0x01, 0x52, 0x03, 0x67, 0x69, 0x64, 0x12, 0x19, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x72, 0x02, 0x10, 0x01, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x17, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x07, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1d, 0x0a, 0x05, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x42, 0x07, 0xfa, 0x42, 0x04, + 0x2a, 0x02, 0x20, 0x00, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, 0x16, 0x0a, 0x14, 0x44, + 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x32, 0xb8, 0x05, 0x0a, 0x09, 0x64, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, + 0x65, 0x12, 0xf0, 0x01, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x24, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, + 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, + 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x94, 0x01, + 0x92, 0x41, 0x6a, 0x0a, 0x23, 0x63, 0x61, 0x73, 0x65, 0x20, 0x34, 0x3a, 0x20, 0xe5, 0x8d, 0x87, + 0xe9, 0x99, 0x8d, 0xe7, 0xba, 0xa7, 0xe4, 0xb8, 0xad, 0xe7, 0x9a, 0x84, 0xe5, 0xbc, 0xba, 0xe4, + 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x12, 0x0c, 0xe6, 0x9b, 0xb4, 0xe6, 0x96, 0xb0, + 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x35, 0xe6, 0x9b, 0xb4, 0xe6, 0x96, 0xb0, 0xe6, 0x95, + 0xb0, 0xe6, 0x8d, 0xae, 0xef, 0xbc, 0x8c, 0xe5, 0x8d, 0x87, 0xe9, 0x99, 0x8d, 0xe7, 0xba, 0xa7, + 0xe4, 0xb8, 0xad, 0xe7, 0x9a, 0x84, 0x44, 0x42, 0xe5, 0x92, 0x8c, 0xe7, 0xbc, 0x93, 0xe5, 0xad, + 0x98, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x82, 0xd3, 0xe4, + 0x93, 0x02, 0x21, 0x1a, 0x1c, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x64, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, + 0x65, 0x3a, 0x01, 0x2a, 0x12, 0xb9, 0x01, 0x0a, 0x05, 0x51, 0x75, 0x65, 0x72, 0x79, 0x12, 0x23, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, + 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, + 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x68, 0x92, 0x41, 0x41, 0x0a, 0x23, 0x63, 0x61, 0x73, + 0x65, 0x20, 0x34, 0x3a, 0x20, 0xe5, 0x8d, 0x87, 0xe9, 0x99, 0x8d, 0xe7, 0xba, 0xa7, 0xe4, 0xb8, + 0xad, 0xe7, 0x9a, 0x84, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, + 0x12, 0x0c, 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x0c, + 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x82, 0xd3, 0xe4, 0x93, + 0x02, 0x1e, 0x12, 0x1c, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x64, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, + 0x12, 0xfb, 0x01, 0x0a, 0x0f, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x42, 0x72, + 0x61, 0x6e, 0x63, 0x68, 0x12, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x42, 0x72, 0x61, + 0x6e, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, + 0x61, 0x64, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x9d, + 0x01, 0x92, 0x41, 0x6b, 0x0a, 0x23, 0x63, 0x61, 0x73, 0x65, 0x20, 0x34, 0x3a, 0x20, 0xe5, 0x8d, + 0x87, 0xe9, 0x99, 0x8d, 0xe7, 0xba, 0xa7, 0xe4, 0xb8, 0xad, 0xe7, 0x9a, 0x84, 0xe5, 0xbc, 0xba, + 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x12, 0x21, 0xe5, 0x8d, 0x87, 0xe9, 0x99, + 0x8d, 0xe7, 0xba, 0xa7, 0xe4, 0xb8, 0xad, 0xe7, 0x9a, 0x84, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, + 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0xe5, 0x88, 0x86, 0xe6, 0x94, 0xaf, 0x1a, 0x21, 0xe5, 0x8d, + 0x87, 0xe9, 0x99, 0x8d, 0xe7, 0xba, 0xa7, 0xe4, 0xb8, 0xad, 0xe7, 0x9a, 0x84, 0xe5, 0xbc, 0xba, + 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0xe5, 0x88, 0x86, 0xe6, 0x94, 0xaf, 0x82, + 0xd3, 0xe4, 0x93, 0x02, 0x29, 0x0a, 0x05, 0x5b, 0x63, 0x74, 0x78, 0x5d, 0x22, 0x1d, 0x2f, 0x61, + 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x64, 0x6f, 0x77, 0x6e, + 0x67, 0x72, 0x61, 0x64, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x3a, 0x01, 0x2a, 0x42, 0xb4, + 0x01, 0x5a, 0x15, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, 0x92, 0x41, 0x99, 0x01, 0x12, 0x15, 0x0a, 0x0e, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x61, 0x70, 0x69, 0x20, 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, + 0x32, 0x2e, 0x30, 0x1a, 0x0e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, + 0x30, 0x38, 0x30, 0x2a, 0x02, 0x01, 0x02, 0x32, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x3a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, + 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x5a, 0x48, 0x0a, 0x46, 0x0a, + 0x0a, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68, 0x12, 0x38, 0x08, 0x02, 0x12, + 0x23, 0x54, 0x79, 0x70, 0x65, 0x20, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, + 0x72, 0x2d, 0x6a, 0x77, 0x74, 0x2d, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x0d, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_downgrade_proto_rawDescOnce sync.Once + file_api_stock_v1_downgrade_proto_rawDescData = file_api_stock_v1_downgrade_proto_rawDesc +) + +func file_api_stock_v1_downgrade_proto_rawDescGZIP() []byte { + file_api_stock_v1_downgrade_proto_rawDescOnce.Do(func() { + file_api_stock_v1_downgrade_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_downgrade_proto_rawDescData) + }) + return file_api_stock_v1_downgrade_proto_rawDescData +} + +var file_api_stock_v1_downgrade_proto_msgTypes = make([]protoimpl.MessageInfo, 6) +var file_api_stock_v1_downgrade_proto_goTypes = []interface{}{ + (*UpdateDowngradeRequest)(nil), // 0: api.stock.v1.UpdateDowngradeRequest + (*UpdateDowngradeRequestReply)(nil), // 1: api.stock.v1.UpdateDowngradeRequestReply + (*QueryDowngradeRequest)(nil), // 2: api.stock.v1.QueryDowngradeRequest + (*QueryDowngradeReply)(nil), // 3: api.stock.v1.QueryDowngradeReply + (*DowngradeBranchRequest)(nil), // 4: api.stock.v1.DowngradeBranchRequest + (*DowngradeBranchReply)(nil), // 5: api.stock.v1.DowngradeBranchReply +} +var file_api_stock_v1_downgrade_proto_depIdxs = []int32{ + 0, // 0: api.stock.v1.downgrade.Update:input_type -> api.stock.v1.UpdateDowngradeRequest + 2, // 1: api.stock.v1.downgrade.Query:input_type -> api.stock.v1.QueryDowngradeRequest + 4, // 2: api.stock.v1.downgrade.DowngradeBranch:input_type -> api.stock.v1.DowngradeBranchRequest + 1, // 3: api.stock.v1.downgrade.Update:output_type -> api.stock.v1.UpdateDowngradeRequestReply + 3, // 4: api.stock.v1.downgrade.Query:output_type -> api.stock.v1.QueryDowngradeReply + 5, // 5: api.stock.v1.downgrade.DowngradeBranch:output_type -> api.stock.v1.DowngradeBranchReply + 3, // [3:6] is the sub-list for method output_type + 0, // [0:3] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_downgrade_proto_init() } +func file_api_stock_v1_downgrade_proto_init() { + if File_api_stock_v1_downgrade_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_downgrade_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateDowngradeRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_downgrade_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateDowngradeRequestReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_downgrade_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryDowngradeRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_downgrade_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryDowngradeReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_downgrade_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DowngradeBranchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_downgrade_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DowngradeBranchReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_downgrade_proto_rawDesc, + NumEnums: 0, + NumMessages: 6, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_downgrade_proto_goTypes, + DependencyIndexes: file_api_stock_v1_downgrade_proto_depIdxs, + MessageInfos: file_api_stock_v1_downgrade_proto_msgTypes, + }.Build() + File_api_stock_v1_downgrade_proto = out.File + file_api_stock_v1_downgrade_proto_rawDesc = nil + file_api_stock_v1_downgrade_proto_goTypes = nil + file_api_stock_v1_downgrade_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.pb.validate.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.pb.validate.go new file mode 100644 index 0000000..a08a3cb --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.pb.validate.go @@ -0,0 +1,730 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/downgrade.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on UpdateDowngradeRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateDowngradeRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateDowngradeRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateDowngradeRequestMultiError, or nil if none found. +func (m *UpdateDowngradeRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateDowngradeRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := UpdateDowngradeRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetStock() <= 0 { + err := UpdateDowngradeRequestValidationError{ + field: "Stock", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return UpdateDowngradeRequestMultiError(errors) + } + + return nil +} + +// UpdateDowngradeRequestMultiError is an error wrapping multiple validation +// errors returned by UpdateDowngradeRequest.ValidateAll() if the designated +// constraints aren't met. +type UpdateDowngradeRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateDowngradeRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateDowngradeRequestMultiError) AllErrors() []error { return m } + +// UpdateDowngradeRequestValidationError is the validation error returned by +// UpdateDowngradeRequest.Validate if the designated constraints aren't met. +type UpdateDowngradeRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateDowngradeRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateDowngradeRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateDowngradeRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateDowngradeRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateDowngradeRequestValidationError) ErrorName() string { + return "UpdateDowngradeRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateDowngradeRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateDowngradeRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateDowngradeRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateDowngradeRequestValidationError{} + +// Validate checks the field values on UpdateDowngradeRequestReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateDowngradeRequestReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateDowngradeRequestReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateDowngradeRequestReplyMultiError, or nil if none found. +func (m *UpdateDowngradeRequestReply) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateDowngradeRequestReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return UpdateDowngradeRequestReplyMultiError(errors) + } + + return nil +} + +// UpdateDowngradeRequestReplyMultiError is an error wrapping multiple +// validation errors returned by UpdateDowngradeRequestReply.ValidateAll() if +// the designated constraints aren't met. +type UpdateDowngradeRequestReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateDowngradeRequestReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateDowngradeRequestReplyMultiError) AllErrors() []error { return m } + +// UpdateDowngradeRequestReplyValidationError is the validation error returned +// by UpdateDowngradeRequestReply.Validate if the designated constraints +// aren't met. +type UpdateDowngradeRequestReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateDowngradeRequestReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateDowngradeRequestReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateDowngradeRequestReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateDowngradeRequestReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateDowngradeRequestReplyValidationError) ErrorName() string { + return "UpdateDowngradeRequestReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateDowngradeRequestReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateDowngradeRequestReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateDowngradeRequestReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateDowngradeRequestReplyValidationError{} + +// Validate checks the field values on QueryDowngradeRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryDowngradeRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryDowngradeRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryDowngradeRequestMultiError, or nil if none found. +func (m *QueryDowngradeRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryDowngradeRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := QueryDowngradeRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return QueryDowngradeRequestMultiError(errors) + } + + return nil +} + +// QueryDowngradeRequestMultiError is an error wrapping multiple validation +// errors returned by QueryDowngradeRequest.ValidateAll() if the designated +// constraints aren't met. +type QueryDowngradeRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryDowngradeRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryDowngradeRequestMultiError) AllErrors() []error { return m } + +// QueryDowngradeRequestValidationError is the validation error returned by +// QueryDowngradeRequest.Validate if the designated constraints aren't met. +type QueryDowngradeRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryDowngradeRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryDowngradeRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryDowngradeRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryDowngradeRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryDowngradeRequestValidationError) ErrorName() string { + return "QueryDowngradeRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryDowngradeRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryDowngradeRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryDowngradeRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryDowngradeRequestValidationError{} + +// Validate checks the field values on QueryDowngradeReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryDowngradeReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryDowngradeReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryDowngradeReplyMultiError, or nil if none found. +func (m *QueryDowngradeReply) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryDowngradeReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Id + + // no validation rules for Stock + + if len(errors) > 0 { + return QueryDowngradeReplyMultiError(errors) + } + + return nil +} + +// QueryDowngradeReplyMultiError is an error wrapping multiple validation +// errors returned by QueryDowngradeReply.ValidateAll() if the designated +// constraints aren't met. +type QueryDowngradeReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryDowngradeReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryDowngradeReplyMultiError) AllErrors() []error { return m } + +// QueryDowngradeReplyValidationError is the validation error returned by +// QueryDowngradeReply.Validate if the designated constraints aren't met. +type QueryDowngradeReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryDowngradeReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryDowngradeReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryDowngradeReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryDowngradeReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryDowngradeReplyValidationError) ErrorName() string { + return "QueryDowngradeReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryDowngradeReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryDowngradeReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryDowngradeReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryDowngradeReplyValidationError{} + +// Validate checks the field values on DowngradeBranchRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *DowngradeBranchRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DowngradeBranchRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DowngradeBranchRequestMultiError, or nil if none found. +func (m *DowngradeBranchRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *DowngradeBranchRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if utf8.RuneCountInString(m.GetGid()) < 1 { + err := DowngradeBranchRequestValidationError{ + field: "Gid", + reason: "value length must be at least 1 runes", + } + if !all { + return err + } + errors = append(errors, err) + } + + if utf8.RuneCountInString(m.GetKey()) < 1 { + err := DowngradeBranchRequestValidationError{ + field: "Key", + reason: "value length must be at least 1 runes", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetId() <= 0 { + err := DowngradeBranchRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetStock() <= 0 { + err := DowngradeBranchRequestValidationError{ + field: "Stock", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return DowngradeBranchRequestMultiError(errors) + } + + return nil +} + +// DowngradeBranchRequestMultiError is an error wrapping multiple validation +// errors returned by DowngradeBranchRequest.ValidateAll() if the designated +// constraints aren't met. +type DowngradeBranchRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DowngradeBranchRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DowngradeBranchRequestMultiError) AllErrors() []error { return m } + +// DowngradeBranchRequestValidationError is the validation error returned by +// DowngradeBranchRequest.Validate if the designated constraints aren't met. +type DowngradeBranchRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DowngradeBranchRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DowngradeBranchRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DowngradeBranchRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DowngradeBranchRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DowngradeBranchRequestValidationError) ErrorName() string { + return "DowngradeBranchRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e DowngradeBranchRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDowngradeBranchRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DowngradeBranchRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DowngradeBranchRequestValidationError{} + +// Validate checks the field values on DowngradeBranchReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *DowngradeBranchReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DowngradeBranchReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DowngradeBranchReplyMultiError, or nil if none found. +func (m *DowngradeBranchReply) ValidateAll() error { + return m.validate(true) +} + +func (m *DowngradeBranchReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return DowngradeBranchReplyMultiError(errors) + } + + return nil +} + +// DowngradeBranchReplyMultiError is an error wrapping multiple validation +// errors returned by DowngradeBranchReply.ValidateAll() if the designated +// constraints aren't met. +type DowngradeBranchReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DowngradeBranchReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DowngradeBranchReplyMultiError) AllErrors() []error { return m } + +// DowngradeBranchReplyValidationError is the validation error returned by +// DowngradeBranchReply.Validate if the designated constraints aren't met. +type DowngradeBranchReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DowngradeBranchReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DowngradeBranchReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DowngradeBranchReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DowngradeBranchReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DowngradeBranchReplyValidationError) ErrorName() string { + return "DowngradeBranchReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e DowngradeBranchReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDowngradeBranchReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DowngradeBranchReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DowngradeBranchReplyValidationError{} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.proto b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.proto new file mode 100644 index 0000000..f948e9c --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade.proto @@ -0,0 +1,108 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "tagger/tagger.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +// Default settings for generating swagger documents +// NOTE: because json does not support 64 bits, the int64 and uint64 types under *.swagger.json are automatically converted to string types +// Reference https://github.com/grpc-ecosystem/grpc-gateway/blob/db7fbefff7c04877cdb32e16d4a248a024428207/examples/internal/proto/examplepb/a_bit_of_everything.proto +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + host: "localhost:8080" + base_path: "" + info: { + title: "stock api docs"; + version: "2.0"; + } + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; + security_definitions: { + security: { + key: "BearerAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + description: "Type Bearer your-jwt-token to Value"; + } + } + } +}; + +service downgrade{ + // 更新数据,升降级中的DB和缓存强一致性 + rpc Update(UpdateDowngradeRequest) returns (UpdateDowngradeRequestReply) { + option (google.api.http) = { + put: "/api/v1/stock/{id}/downgrade" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "更新数据", + description: "更新数据,升降级中的DB和缓存强一致性", + tags: "case 4: 升降级中的强一致性" + }; + } + + // 查询 + rpc Query(QueryDowngradeRequest) returns (QueryDowngradeReply) { + option (google.api.http) = { + get: "/api/v1/stock/{id}/downgrade" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "查询数据", + description: "查询数据", + tags: "case 4: 升降级中的强一致性" + }; + } + + // 升降级中的强一致性分支 + rpc DowngradeBranch(DowngradeBranchRequest) returns (DowngradeBranchReply) { + option (google.api.http) = { + post: "/api/v1/stock/downgradeBranch" + body: "*" + selector: "[ctx]" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "升降级中的强一致性分支", + description: "升降级中的强一致性分支", + tags: "case 4: 升降级中的强一致性" + }; + } +} + +message UpdateDowngradeRequest { + uint64 id = 1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; + uint32 stock = 2 [(validate.rules).uint32.gt = 0]; // 库存数量 +} + +message UpdateDowngradeRequestReply { + +} + +message QueryDowngradeRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; +} + +message QueryDowngradeReply { + uint64 id = 1; + uint32 stock = 2; // 库存数量 +} + +message DowngradeBranchRequest { + string gid = 1 [(validate.rules).string.min_len = 1]; // dtm gid + string key = 2 [(validate.rules).string.min_len = 1]; // 缓存key + + uint64 id = 3 [(validate.rules).uint64.gt = 0]; + uint32 stock = 4 [(validate.rules).uint32.gt = 0]; // 库存数量 +} + +message DowngradeBranchReply { + +} \ No newline at end of file diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade_grpc.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade_grpc.pb.go new file mode 100644 index 0000000..6e6f67c --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade_grpc.pb.go @@ -0,0 +1,189 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.2 +// source: api/stock/v1/downgrade.proto + +package v1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Downgrade_Update_FullMethodName = "/api.stock.v1.downgrade/Update" + Downgrade_Query_FullMethodName = "/api.stock.v1.downgrade/Query" + Downgrade_DowngradeBranch_FullMethodName = "/api.stock.v1.downgrade/DowngradeBranch" +) + +// DowngradeClient is the client API for Downgrade service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type DowngradeClient interface { + // 更新数据,升降级中的DB和缓存强一致性 + Update(ctx context.Context, in *UpdateDowngradeRequest, opts ...grpc.CallOption) (*UpdateDowngradeRequestReply, error) + // 查询 + Query(ctx context.Context, in *QueryDowngradeRequest, opts ...grpc.CallOption) (*QueryDowngradeReply, error) + // 升降级中的强一致性分支 + DowngradeBranch(ctx context.Context, in *DowngradeBranchRequest, opts ...grpc.CallOption) (*DowngradeBranchReply, error) +} + +type downgradeClient struct { + cc grpc.ClientConnInterface +} + +func NewDowngradeClient(cc grpc.ClientConnInterface) DowngradeClient { + return &downgradeClient{cc} +} + +func (c *downgradeClient) Update(ctx context.Context, in *UpdateDowngradeRequest, opts ...grpc.CallOption) (*UpdateDowngradeRequestReply, error) { + out := new(UpdateDowngradeRequestReply) + err := c.cc.Invoke(ctx, Downgrade_Update_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *downgradeClient) Query(ctx context.Context, in *QueryDowngradeRequest, opts ...grpc.CallOption) (*QueryDowngradeReply, error) { + out := new(QueryDowngradeReply) + err := c.cc.Invoke(ctx, Downgrade_Query_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *downgradeClient) DowngradeBranch(ctx context.Context, in *DowngradeBranchRequest, opts ...grpc.CallOption) (*DowngradeBranchReply, error) { + out := new(DowngradeBranchReply) + err := c.cc.Invoke(ctx, Downgrade_DowngradeBranch_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DowngradeServer is the server API for Downgrade service. +// All implementations must embed UnimplementedDowngradeServer +// for forward compatibility +type DowngradeServer interface { + // 更新数据,升降级中的DB和缓存强一致性 + Update(context.Context, *UpdateDowngradeRequest) (*UpdateDowngradeRequestReply, error) + // 查询 + Query(context.Context, *QueryDowngradeRequest) (*QueryDowngradeReply, error) + // 升降级中的强一致性分支 + DowngradeBranch(context.Context, *DowngradeBranchRequest) (*DowngradeBranchReply, error) + mustEmbedUnimplementedDowngradeServer() +} + +// UnimplementedDowngradeServer must be embedded to have forward compatible implementations. +type UnimplementedDowngradeServer struct { +} + +func (UnimplementedDowngradeServer) Update(context.Context, *UpdateDowngradeRequest) (*UpdateDowngradeRequestReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Update not implemented") +} +func (UnimplementedDowngradeServer) Query(context.Context, *QueryDowngradeRequest) (*QueryDowngradeReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Query not implemented") +} +func (UnimplementedDowngradeServer) DowngradeBranch(context.Context, *DowngradeBranchRequest) (*DowngradeBranchReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method DowngradeBranch not implemented") +} +func (UnimplementedDowngradeServer) mustEmbedUnimplementedDowngradeServer() {} + +// UnsafeDowngradeServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to DowngradeServer will +// result in compilation errors. +type UnsafeDowngradeServer interface { + mustEmbedUnimplementedDowngradeServer() +} + +func RegisterDowngradeServer(s grpc.ServiceRegistrar, srv DowngradeServer) { + s.RegisterService(&Downgrade_ServiceDesc, srv) +} + +func _Downgrade_Update_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateDowngradeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DowngradeServer).Update(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Downgrade_Update_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DowngradeServer).Update(ctx, req.(*UpdateDowngradeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Downgrade_Query_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryDowngradeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DowngradeServer).Query(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Downgrade_Query_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DowngradeServer).Query(ctx, req.(*QueryDowngradeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Downgrade_DowngradeBranch_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DowngradeBranchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DowngradeServer).DowngradeBranch(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Downgrade_DowngradeBranch_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DowngradeServer).DowngradeBranch(ctx, req.(*DowngradeBranchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Downgrade_ServiceDesc is the grpc.ServiceDesc for Downgrade service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Downgrade_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.stock.v1.downgrade", + HandlerType: (*DowngradeServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Update", + Handler: _Downgrade_Update_Handler, + }, + { + MethodName: "Query", + Handler: _Downgrade_Query_Handler, + }, + { + MethodName: "DowngradeBranch", + Handler: _Downgrade_DowngradeBranch_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "api/stock/v1/downgrade.proto", +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade_router.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade_router.pb.go new file mode 100644 index 0000000..db33621 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/downgrade_router.pb.go @@ -0,0 +1,247 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type DowngradeLogicer interface { + Update(ctx context.Context, req *UpdateDowngradeRequest) (*UpdateDowngradeRequestReply, error) + Query(ctx context.Context, req *QueryDowngradeRequest) (*QueryDowngradeReply, error) + DowngradeBranch(ctx context.Context, req *DowngradeBranchRequest) (*DowngradeBranchReply, error) +} + +type DowngradeOption func(*downgradeOptions) + +type downgradeOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *downgradeOptions) apply(opts ...DowngradeOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithDowngradeHTTPResponse() DowngradeOption { + return func(o *downgradeOptions) { + o.isFromRPC = false + } +} + +func WithDowngradeRPCResponse() DowngradeOption { + return func(o *downgradeOptions) { + o.isFromRPC = true + } +} + +func WithDowngradeResponser(responser errcode.Responser) DowngradeOption { + return func(o *downgradeOptions) { + o.responser = responser + } +} + +func WithDowngradeLogger(zapLog *zap.Logger) DowngradeOption { + return func(o *downgradeOptions) { + o.zapLog = zapLog + } +} + +func WithDowngradeErrorToHTTPCode(e ...*errcode.Error) DowngradeOption { + return func(o *downgradeOptions) { + o.httpErrors = e + } +} + +func WithDowngradeRPCStatusToHTTPCode(s ...*errcode.RPCStatus) DowngradeOption { + return func(o *downgradeOptions) { + o.rpcStatus = s + } +} + +func WithDowngradeWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) DowngradeOption { + return func(o *downgradeOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterDowngradeRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic DowngradeLogicer, + opts ...DowngradeOption) { + + o := &downgradeOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &downgradeRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type downgradeRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic DowngradeLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *downgradeRouter) register() { + r.iRouter.Handle("PUT", "/api/v1/stock/:id/downgrade", r.withMiddleware("PUT", "/api/v1/stock/:id/downgrade", r.Update_2)...) + r.iRouter.Handle("GET", "/api/v1/stock/:id/downgrade", r.withMiddleware("GET", "/api/v1/stock/:id/downgrade", r.Query_2)...) + r.iRouter.Handle("POST", "/api/v1/stock/downgradeBranch", r.withMiddleware("POST", "/api/v1/stock/downgradeBranch", r.DowngradeBranch_0)...) + +} + +func (r *downgradeRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *downgradeRouter) Update_2(c *gin.Context) { + req := &UpdateDowngradeRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Update(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *downgradeRouter) Query_2(c *gin.Context) { + req := &QueryDowngradeRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Query(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *downgradeRouter) DowngradeBranch_0(c *gin.Context) { + req := &DowngradeBranchRequest{} + var err error + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context = c + + out, err := r.iLogic.DowngradeBranch(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final.pb.go new file mode 100644 index 0000000..1a6229a --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final.pb.go @@ -0,0 +1,379 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/final.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type UpdateFinalRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *UpdateFinalRequest) Reset() { + *x = UpdateFinalRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_final_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateFinalRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateFinalRequest) ProtoMessage() {} + +func (x *UpdateFinalRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_final_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateFinalRequest.ProtoReflect.Descriptor instead. +func (*UpdateFinalRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_final_proto_rawDescGZIP(), []int{0} +} + +func (x *UpdateFinalRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *UpdateFinalRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type UpdateFinalRequestReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateFinalRequestReply) Reset() { + *x = UpdateFinalRequestReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_final_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateFinalRequestReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateFinalRequestReply) ProtoMessage() {} + +func (x *UpdateFinalRequestReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_final_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateFinalRequestReply.ProtoReflect.Descriptor instead. +func (*UpdateFinalRequestReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_final_proto_rawDescGZIP(), []int{1} +} + +type QueryFinalRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *QueryFinalRequest) Reset() { + *x = QueryFinalRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_final_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryFinalRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryFinalRequest) ProtoMessage() {} + +func (x *QueryFinalRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_final_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryFinalRequest.ProtoReflect.Descriptor instead. +func (*QueryFinalRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_final_proto_rawDescGZIP(), []int{2} +} + +func (x *QueryFinalRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type QueryFinalReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Stock uint32 `protobuf:"varint,1,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *QueryFinalReply) Reset() { + *x = QueryFinalReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_final_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryFinalReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryFinalReply) ProtoMessage() {} + +func (x *QueryFinalReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_final_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryFinalReply.ProtoReflect.Descriptor instead. +func (*QueryFinalReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_final_proto_rawDescGZIP(), []int{3} +} + +func (x *QueryFinalReply) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +var File_api_stock_v1_final_proto protoreflect.FileDescriptor + +var file_api_stock_v1_final_proto_rawDesc = []byte{ + 0x0a, 0x18, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x66, + 0x69, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x61, 0x70, 0x69, 0x2e, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, + 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x74, + 0x61, 0x67, 0x67, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x59, 0x0a, 0x12, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x69, + 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, + 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, + 0x12, 0x1d, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x42, + 0x07, 0xfa, 0x42, 0x04, 0x2a, 0x02, 0x20, 0x00, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, + 0x19, 0x0a, 0x17, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x39, 0x0a, 0x11, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, + 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, + 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x27, 0x0a, 0x0f, 0x51, 0x75, 0x65, 0x72, 0x79, 0x46, 0x69, + 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x32, 0xf9, + 0x02, 0x0a, 0x05, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x12, 0xcb, 0x01, 0x0a, 0x06, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, + 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x69, 0x6e, 0x61, 0x6c, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x78, 0x92, 0x41, + 0x52, 0x0a, 0x17, 0x63, 0x61, 0x73, 0x65, 0x20, 0x31, 0x3a, 0x20, 0xe6, 0x9c, 0x80, 0xe7, 0xbb, + 0x88, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x12, 0x0c, 0xe6, 0x9b, 0xb4, 0xe6, + 0x96, 0xb0, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x29, 0xe6, 0x9b, 0xb4, 0xe6, 0x96, 0xb0, + 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0xef, 0xbc, 0x8c, 0x44, 0x42, 0xe5, 0x92, 0x8c, 0xe7, 0xbc, + 0x93, 0xe5, 0xad, 0x98, 0xe6, 0x9c, 0x80, 0xe7, 0xbb, 0x88, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, + 0xe6, 0x80, 0xa7, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1d, 0x1a, 0x18, 0x2f, 0x61, 0x70, 0x69, 0x2f, + 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x3a, 0x01, 0x2a, 0x12, 0xa1, 0x01, 0x0a, 0x05, 0x51, 0x75, 0x65, 0x72, 0x79, + 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, + 0x51, 0x75, 0x65, 0x72, 0x79, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, + 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x70, 0x6c, 0x79, + 0x22, 0x58, 0x92, 0x41, 0x35, 0x0a, 0x17, 0x63, 0x61, 0x73, 0x65, 0x20, 0x31, 0x3a, 0x20, 0xe6, + 0x9c, 0x80, 0xe7, 0xbb, 0x88, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x12, 0x0c, + 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x0c, 0xe6, 0x9f, + 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1a, + 0x12, 0x18, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, + 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x42, 0xb4, 0x01, 0x5a, 0x15, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, + 0x31, 0x3b, 0x76, 0x31, 0x92, 0x41, 0x99, 0x01, 0x12, 0x15, 0x0a, 0x0e, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x20, 0x61, 0x70, 0x69, 0x20, 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, 0x32, 0x2e, 0x30, 0x1a, + 0x0e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, 0x30, 0x38, 0x30, 0x2a, + 0x02, 0x01, 0x02, 0x32, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x3a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x5a, 0x48, 0x0a, 0x46, 0x0a, 0x0a, 0x42, 0x65, 0x61, + 0x72, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68, 0x12, 0x38, 0x08, 0x02, 0x12, 0x23, 0x54, 0x79, 0x70, + 0x65, 0x20, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x72, 0x2d, 0x6a, 0x77, + 0x74, 0x2d, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x1a, 0x0d, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x02, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_final_proto_rawDescOnce sync.Once + file_api_stock_v1_final_proto_rawDescData = file_api_stock_v1_final_proto_rawDesc +) + +func file_api_stock_v1_final_proto_rawDescGZIP() []byte { + file_api_stock_v1_final_proto_rawDescOnce.Do(func() { + file_api_stock_v1_final_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_final_proto_rawDescData) + }) + return file_api_stock_v1_final_proto_rawDescData +} + +var file_api_stock_v1_final_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_api_stock_v1_final_proto_goTypes = []interface{}{ + (*UpdateFinalRequest)(nil), // 0: api.stock.v1.UpdateFinalRequest + (*UpdateFinalRequestReply)(nil), // 1: api.stock.v1.UpdateFinalRequestReply + (*QueryFinalRequest)(nil), // 2: api.stock.v1.QueryFinalRequest + (*QueryFinalReply)(nil), // 3: api.stock.v1.QueryFinalReply +} +var file_api_stock_v1_final_proto_depIdxs = []int32{ + 0, // 0: api.stock.v1.final.Update:input_type -> api.stock.v1.UpdateFinalRequest + 2, // 1: api.stock.v1.final.Query:input_type -> api.stock.v1.QueryFinalRequest + 1, // 2: api.stock.v1.final.Update:output_type -> api.stock.v1.UpdateFinalRequestReply + 3, // 3: api.stock.v1.final.Query:output_type -> api.stock.v1.QueryFinalReply + 2, // [2:4] is the sub-list for method output_type + 0, // [0:2] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_final_proto_init() } +func file_api_stock_v1_final_proto_init() { + if File_api_stock_v1_final_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_final_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateFinalRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_final_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateFinalRequestReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_final_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryFinalRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_final_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryFinalReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_final_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_final_proto_goTypes, + DependencyIndexes: file_api_stock_v1_final_proto_depIdxs, + MessageInfos: file_api_stock_v1_final_proto_msgTypes, + }.Build() + File_api_stock_v1_final_proto = out.File + file_api_stock_v1_final_proto_rawDesc = nil + file_api_stock_v1_final_proto_goTypes = nil + file_api_stock_v1_final_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final.pb.validate.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final.pb.validate.go new file mode 100644 index 0000000..de240cd --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final.pb.validate.go @@ -0,0 +1,477 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/final.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on UpdateFinalRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateFinalRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateFinalRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateFinalRequestMultiError, or nil if none found. +func (m *UpdateFinalRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateFinalRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := UpdateFinalRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetStock() <= 0 { + err := UpdateFinalRequestValidationError{ + field: "Stock", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return UpdateFinalRequestMultiError(errors) + } + + return nil +} + +// UpdateFinalRequestMultiError is an error wrapping multiple validation errors +// returned by UpdateFinalRequest.ValidateAll() if the designated constraints +// aren't met. +type UpdateFinalRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateFinalRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateFinalRequestMultiError) AllErrors() []error { return m } + +// UpdateFinalRequestValidationError is the validation error returned by +// UpdateFinalRequest.Validate if the designated constraints aren't met. +type UpdateFinalRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateFinalRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateFinalRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateFinalRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateFinalRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateFinalRequestValidationError) ErrorName() string { + return "UpdateFinalRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateFinalRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateFinalRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateFinalRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateFinalRequestValidationError{} + +// Validate checks the field values on UpdateFinalRequestReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateFinalRequestReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateFinalRequestReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateFinalRequestReplyMultiError, or nil if none found. +func (m *UpdateFinalRequestReply) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateFinalRequestReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return UpdateFinalRequestReplyMultiError(errors) + } + + return nil +} + +// UpdateFinalRequestReplyMultiError is an error wrapping multiple validation +// errors returned by UpdateFinalRequestReply.ValidateAll() if the designated +// constraints aren't met. +type UpdateFinalRequestReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateFinalRequestReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateFinalRequestReplyMultiError) AllErrors() []error { return m } + +// UpdateFinalRequestReplyValidationError is the validation error returned by +// UpdateFinalRequestReply.Validate if the designated constraints aren't met. +type UpdateFinalRequestReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateFinalRequestReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateFinalRequestReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateFinalRequestReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateFinalRequestReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateFinalRequestReplyValidationError) ErrorName() string { + return "UpdateFinalRequestReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateFinalRequestReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateFinalRequestReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateFinalRequestReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateFinalRequestReplyValidationError{} + +// Validate checks the field values on QueryFinalRequest with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *QueryFinalRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryFinalRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryFinalRequestMultiError, or nil if none found. +func (m *QueryFinalRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryFinalRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := QueryFinalRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return QueryFinalRequestMultiError(errors) + } + + return nil +} + +// QueryFinalRequestMultiError is an error wrapping multiple validation errors +// returned by QueryFinalRequest.ValidateAll() if the designated constraints +// aren't met. +type QueryFinalRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryFinalRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryFinalRequestMultiError) AllErrors() []error { return m } + +// QueryFinalRequestValidationError is the validation error returned by +// QueryFinalRequest.Validate if the designated constraints aren't met. +type QueryFinalRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryFinalRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryFinalRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryFinalRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryFinalRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryFinalRequestValidationError) ErrorName() string { + return "QueryFinalRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryFinalRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryFinalRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryFinalRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryFinalRequestValidationError{} + +// Validate checks the field values on QueryFinalReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *QueryFinalReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryFinalReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryFinalReplyMultiError, or nil if none found. +func (m *QueryFinalReply) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryFinalReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Stock + + if len(errors) > 0 { + return QueryFinalReplyMultiError(errors) + } + + return nil +} + +// QueryFinalReplyMultiError is an error wrapping multiple validation errors +// returned by QueryFinalReply.ValidateAll() if the designated constraints +// aren't met. +type QueryFinalReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryFinalReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryFinalReplyMultiError) AllErrors() []error { return m } + +// QueryFinalReplyValidationError is the validation error returned by +// QueryFinalReply.Validate if the designated constraints aren't met. +type QueryFinalReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryFinalReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryFinalReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryFinalReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryFinalReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryFinalReplyValidationError) ErrorName() string { return "QueryFinalReplyValidationError" } + +// Error satisfies the builtin error interface +func (e QueryFinalReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryFinalReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryFinalReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryFinalReplyValidationError{} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final.proto b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final.proto new file mode 100644 index 0000000..90ef8a5 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final.proto @@ -0,0 +1,81 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "tagger/tagger.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +// Default settings for generating swagger documents +// NOTE: because json does not support 64 bits, the int64 and uint64 types under *.swagger.json are automatically converted to string types +// Reference https://github.com/grpc-ecosystem/grpc-gateway/blob/db7fbefff7c04877cdb32e16d4a248a024428207/examples/internal/proto/examplepb/a_bit_of_everything.proto +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + host: "localhost:8080" + base_path: "" + info: { + title: "stock api docs"; + version: "2.0"; + } + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; + security_definitions: { + security: { + key: "BearerAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + description: "Type Bearer your-jwt-token to Value"; + } + } + } +}; + +service final{ + // 更新数据,DB和缓存最终一致性 + rpc Update(UpdateFinalRequest) returns (UpdateFinalRequestReply) { + option (google.api.http) = { + put: "/api/v1/stock/{id}/final" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "更新数据", + description: "更新数据,DB和缓存最终一致性", + tags: "case 1: 最终一致性" + }; + } + + // 查询 + rpc Query(QueryFinalRequest) returns (QueryFinalReply) { + option (google.api.http) = { + get: "/api/v1/stock/{id}/final" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "查询数据", + description: "查询数据", + tags: "case 1: 最终一致性" + }; + } +} + +message UpdateFinalRequest { + uint64 id = 1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; + uint32 stock = 2 [(validate.rules).uint32.gt = 0]; // 库存数量 +} + +message UpdateFinalRequestReply { + +} + +message QueryFinalRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; +} + +message QueryFinalReply { + uint32 stock = 1; // 库存数量 +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final_grpc.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final_grpc.pb.go new file mode 100644 index 0000000..265d4c1 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final_grpc.pb.go @@ -0,0 +1,150 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.2 +// source: api/stock/v1/final.proto + +package v1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Final_Update_FullMethodName = "/api.stock.v1.final/Update" + Final_Query_FullMethodName = "/api.stock.v1.final/Query" +) + +// FinalClient is the client API for Final service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type FinalClient interface { + // 更新数据,DB和缓存最终一致性 + Update(ctx context.Context, in *UpdateFinalRequest, opts ...grpc.CallOption) (*UpdateFinalRequestReply, error) + // 查询 + Query(ctx context.Context, in *QueryFinalRequest, opts ...grpc.CallOption) (*QueryFinalReply, error) +} + +type finalClient struct { + cc grpc.ClientConnInterface +} + +func NewFinalClient(cc grpc.ClientConnInterface) FinalClient { + return &finalClient{cc} +} + +func (c *finalClient) Update(ctx context.Context, in *UpdateFinalRequest, opts ...grpc.CallOption) (*UpdateFinalRequestReply, error) { + out := new(UpdateFinalRequestReply) + err := c.cc.Invoke(ctx, Final_Update_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *finalClient) Query(ctx context.Context, in *QueryFinalRequest, opts ...grpc.CallOption) (*QueryFinalReply, error) { + out := new(QueryFinalReply) + err := c.cc.Invoke(ctx, Final_Query_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FinalServer is the server API for Final service. +// All implementations must embed UnimplementedFinalServer +// for forward compatibility +type FinalServer interface { + // 更新数据,DB和缓存最终一致性 + Update(context.Context, *UpdateFinalRequest) (*UpdateFinalRequestReply, error) + // 查询 + Query(context.Context, *QueryFinalRequest) (*QueryFinalReply, error) + mustEmbedUnimplementedFinalServer() +} + +// UnimplementedFinalServer must be embedded to have forward compatible implementations. +type UnimplementedFinalServer struct { +} + +func (UnimplementedFinalServer) Update(context.Context, *UpdateFinalRequest) (*UpdateFinalRequestReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Update not implemented") +} +func (UnimplementedFinalServer) Query(context.Context, *QueryFinalRequest) (*QueryFinalReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Query not implemented") +} +func (UnimplementedFinalServer) mustEmbedUnimplementedFinalServer() {} + +// UnsafeFinalServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to FinalServer will +// result in compilation errors. +type UnsafeFinalServer interface { + mustEmbedUnimplementedFinalServer() +} + +func RegisterFinalServer(s grpc.ServiceRegistrar, srv FinalServer) { + s.RegisterService(&Final_ServiceDesc, srv) +} + +func _Final_Update_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateFinalRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FinalServer).Update(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Final_Update_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FinalServer).Update(ctx, req.(*UpdateFinalRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Final_Query_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryFinalRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FinalServer).Query(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Final_Query_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FinalServer).Query(ctx, req.(*QueryFinalRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Final_ServiceDesc is the grpc.ServiceDesc for Final service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Final_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.stock.v1.final", + HandlerType: (*FinalServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Update", + Handler: _Final_Update_Handler, + }, + { + MethodName: "Query", + Handler: _Final_Query_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "api/stock/v1/final.proto", +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final_router.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final_router.pb.go new file mode 100644 index 0000000..dbaa9cb --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/final_router.pb.go @@ -0,0 +1,221 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type FinalLogicer interface { + Update(ctx context.Context, req *UpdateFinalRequest) (*UpdateFinalRequestReply, error) + Query(ctx context.Context, req *QueryFinalRequest) (*QueryFinalReply, error) +} + +type FinalOption func(*finalOptions) + +type finalOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *finalOptions) apply(opts ...FinalOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithFinalHTTPResponse() FinalOption { + return func(o *finalOptions) { + o.isFromRPC = false + } +} + +func WithFinalRPCResponse() FinalOption { + return func(o *finalOptions) { + o.isFromRPC = true + } +} + +func WithFinalResponser(responser errcode.Responser) FinalOption { + return func(o *finalOptions) { + o.responser = responser + } +} + +func WithFinalLogger(zapLog *zap.Logger) FinalOption { + return func(o *finalOptions) { + o.zapLog = zapLog + } +} + +func WithFinalErrorToHTTPCode(e ...*errcode.Error) FinalOption { + return func(o *finalOptions) { + o.httpErrors = e + } +} + +func WithFinalRPCStatusToHTTPCode(s ...*errcode.RPCStatus) FinalOption { + return func(o *finalOptions) { + o.rpcStatus = s + } +} + +func WithFinalWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) FinalOption { + return func(o *finalOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterFinalRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic FinalLogicer, + opts ...FinalOption) { + + o := &finalOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &finalRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type finalRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic FinalLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *finalRouter) register() { + r.iRouter.Handle("PUT", "/api/v1/stock/:id/final", r.withMiddleware("PUT", "/api/v1/stock/:id/final", r.Update_4)...) + r.iRouter.Handle("GET", "/api/v1/stock/:id/final", r.withMiddleware("GET", "/api/v1/stock/:id/final", r.Query_4)...) + +} + +func (r *finalRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *finalRouter) Update_4(c *gin.Context) { + req := &UpdateFinalRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Update(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *finalRouter) Query_4(c *gin.Context) { + req := &QueryFinalRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Query(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.pb.go new file mode 100644 index 0000000..696d469 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.pb.go @@ -0,0 +1,918 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/stock.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + types "stock/api/types" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type CreateStockRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ProductID uint64 `protobuf:"varint,1,opt,name=productID,proto3" json:"productID"` // 商品id + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存 +} + +func (x *CreateStockRequest) Reset() { + *x = CreateStockRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateStockRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateStockRequest) ProtoMessage() {} + +func (x *CreateStockRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateStockRequest.ProtoReflect.Descriptor instead. +func (*CreateStockRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{0} +} + +func (x *CreateStockRequest) GetProductID() uint64 { + if x != nil { + return x.ProductID + } + return 0 +} + +func (x *CreateStockRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type CreateStockReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` +} + +func (x *CreateStockReply) Reset() { + *x = CreateStockReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateStockReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateStockReply) ProtoMessage() {} + +func (x *CreateStockReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateStockReply.ProtoReflect.Descriptor instead. +func (*CreateStockReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{1} +} + +func (x *CreateStockReply) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type DeleteStockByIDRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *DeleteStockByIDRequest) Reset() { + *x = DeleteStockByIDRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteStockByIDRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteStockByIDRequest) ProtoMessage() {} + +func (x *DeleteStockByIDRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteStockByIDRequest.ProtoReflect.Descriptor instead. +func (*DeleteStockByIDRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{2} +} + +func (x *DeleteStockByIDRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type DeleteStockByIDReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteStockByIDReply) Reset() { + *x = DeleteStockByIDReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteStockByIDReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteStockByIDReply) ProtoMessage() {} + +func (x *DeleteStockByIDReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteStockByIDReply.ProtoReflect.Descriptor instead. +func (*DeleteStockByIDReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{3} +} + +type UpdateStockByIDRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` + ProductID uint64 `protobuf:"varint,2,opt,name=productID,proto3" json:"productID"` // 商品id + Stock uint32 `protobuf:"varint,3,opt,name=stock,proto3" json:"stock"` // 库存 +} + +func (x *UpdateStockByIDRequest) Reset() { + *x = UpdateStockByIDRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateStockByIDRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateStockByIDRequest) ProtoMessage() {} + +func (x *UpdateStockByIDRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateStockByIDRequest.ProtoReflect.Descriptor instead. +func (*UpdateStockByIDRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{4} +} + +func (x *UpdateStockByIDRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *UpdateStockByIDRequest) GetProductID() uint64 { + if x != nil { + return x.ProductID + } + return 0 +} + +func (x *UpdateStockByIDRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type UpdateStockByIDReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateStockByIDReply) Reset() { + *x = UpdateStockByIDReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateStockByIDReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateStockByIDReply) ProtoMessage() {} + +func (x *UpdateStockByIDReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateStockByIDReply.ProtoReflect.Descriptor instead. +func (*UpdateStockByIDReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{5} +} + +type Stock struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` + ProductID uint64 `protobuf:"varint,2,opt,name=productID,proto3" json:"productID"` // 商品id + Stock uint32 `protobuf:"varint,3,opt,name=stock,proto3" json:"stock"` // 库存 + CreatedAt string `protobuf:"bytes,4,opt,name=createdAt,proto3" json:"createdAt"` + UpdatedAt string `protobuf:"bytes,5,opt,name=updatedAt,proto3" json:"updatedAt"` +} + +func (x *Stock) Reset() { + *x = Stock{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Stock) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Stock) ProtoMessage() {} + +func (x *Stock) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Stock.ProtoReflect.Descriptor instead. +func (*Stock) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{6} +} + +func (x *Stock) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Stock) GetProductID() uint64 { + if x != nil { + return x.ProductID + } + return 0 +} + +func (x *Stock) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +func (x *Stock) GetCreatedAt() string { + if x != nil { + return x.CreatedAt + } + return "" +} + +func (x *Stock) GetUpdatedAt() string { + if x != nil { + return x.UpdatedAt + } + return "" +} + +type GetStockByIDRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *GetStockByIDRequest) Reset() { + *x = GetStockByIDRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetStockByIDRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetStockByIDRequest) ProtoMessage() {} + +func (x *GetStockByIDRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetStockByIDRequest.ProtoReflect.Descriptor instead. +func (*GetStockByIDRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{7} +} + +func (x *GetStockByIDRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type GetStockByIDReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Stock *Stock `protobuf:"bytes,1,opt,name=stock,proto3" json:"stock"` +} + +func (x *GetStockByIDReply) Reset() { + *x = GetStockByIDReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetStockByIDReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetStockByIDReply) ProtoMessage() {} + +func (x *GetStockByIDReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetStockByIDReply.ProtoReflect.Descriptor instead. +func (*GetStockByIDReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{8} +} + +func (x *GetStockByIDReply) GetStock() *Stock { + if x != nil { + return x.Stock + } + return nil +} + +type ListStockRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Params *types.Params `protobuf:"bytes,1,opt,name=params,proto3" json:"params"` +} + +func (x *ListStockRequest) Reset() { + *x = ListStockRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListStockRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListStockRequest) ProtoMessage() {} + +func (x *ListStockRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListStockRequest.ProtoReflect.Descriptor instead. +func (*ListStockRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{9} +} + +func (x *ListStockRequest) GetParams() *types.Params { + if x != nil { + return x.Params + } + return nil +} + +type ListStockReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Total int64 `protobuf:"varint,1,opt,name=total,proto3" json:"total"` + Stocks []*Stock `protobuf:"bytes,2,rep,name=stocks,proto3" json:"stocks"` +} + +func (x *ListStockReply) Reset() { + *x = ListStockReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListStockReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListStockReply) ProtoMessage() {} + +func (x *ListStockReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListStockReply.ProtoReflect.Descriptor instead. +func (*ListStockReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{10} +} + +func (x *ListStockReply) GetTotal() int64 { + if x != nil { + return x.Total + } + return 0 +} + +func (x *ListStockReply) GetStocks() []*Stock { + if x != nil { + return x.Stocks + } + return nil +} + +var File_api_stock_v1_stock_proto protoreflect.FileDescriptor + +var file_api_stock_v1_stock_proto_rawDesc = []byte{ + 0x0a, 0x18, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x61, 0x70, 0x69, 0x2e, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x15, 0x61, 0x70, 0x69, 0x2f, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, + 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, + 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x74, + 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x48, 0x0a, 0x12, 0x43, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x12, + 0x14, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, 0x22, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, + 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x69, 0x64, 0x22, 0x3e, 0x0a, 0x16, 0x44, 0x65, 0x6c, + 0x65, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, + 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x16, 0x0a, 0x14, 0x44, 0x65, 0x6c, + 0x65, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, + 0x79, 0x22, 0x6b, 0x0a, 0x16, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, + 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x0d, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, + 0x69, 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, + 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x70, + 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, 0x16, + 0x0a, 0x14, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, + 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x87, 0x01, 0x0a, 0x05, 0x53, 0x74, 0x6f, 0x63, 0x6b, + 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x69, 0x64, + 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x04, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x12, 0x14, + 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, + 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, + 0x41, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, + 0x22, 0x3b, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, + 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x3e, 0x0a, + 0x11, 0x47, 0x65, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, + 0x6c, 0x79, 0x12, 0x29, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x13, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, + 0x2e, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, 0x3d, 0x0a, + 0x10, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x29, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x22, 0x53, 0x0a, 0x0e, + 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x14, + 0x0a, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x74, + 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x2b, 0x0a, 0x06, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x06, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x73, 0x32, 0xa3, 0x06, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x12, 0x99, 0x01, 0x0a, 0x06, + 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, + 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x74, + 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x4d, 0x92, 0x41, 0x32, 0x12, 0x0c, 0x63, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x1a, 0x22, 0x73, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x20, 0x69, 0x6e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x74, 0x6f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x82, + 0xd3, 0xe4, 0x93, 0x02, 0x12, 0x22, 0x0d, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x3a, 0x01, 0x2a, 0x12, 0x97, 0x01, 0x0a, 0x0a, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x42, 0x79, 0x49, 0x44, 0x12, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, + 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, + 0x22, 0x3f, 0x92, 0x41, 0x22, 0x12, 0x0c, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x20, 0x73, 0x74, + 0x6f, 0x63, 0x6b, 0x1a, 0x12, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x20, 0x62, 0x79, 0x20, 0x69, 0x64, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x2a, 0x12, 0x2f, + 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, + 0x7d, 0x12, 0x9a, 0x01, 0x0a, 0x0a, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x42, 0x79, 0x49, 0x44, + 0x12, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, + 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x42, 0x92, 0x41, 0x22, 0x12, + 0x0c, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x1a, 0x12, 0x75, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x62, 0x79, 0x20, 0x69, + 0x64, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x17, 0x1a, 0x12, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, + 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x3a, 0x01, 0x2a, 0x12, 0x96, + 0x01, 0x0a, 0x07, 0x47, 0x65, 0x74, 0x42, 0x79, 0x49, 0x44, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x74, 0x6f, + 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, + 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x47, + 0x92, 0x41, 0x2a, 0x12, 0x10, 0x67, 0x65, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x64, + 0x65, 0x74, 0x61, 0x69, 0x6c, 0x1a, 0x16, 0x67, 0x65, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x20, 0x62, 0x79, 0x20, 0x69, 0x64, 0x82, 0xd3, 0xe4, + 0x93, 0x02, 0x14, 0x12, 0x12, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0xad, 0x01, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, + 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, + 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, + 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x67, + 0x92, 0x41, 0x47, 0x12, 0x1c, 0x6c, 0x69, 0x73, 0x74, 0x20, 0x6f, 0x66, 0x20, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x73, 0x20, 0x62, 0x79, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x73, 0x1a, 0x27, 0x6c, 0x69, 0x73, 0x74, 0x20, 0x6f, 0x66, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x73, 0x20, 0x62, 0x79, 0x20, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x6e, 0x64, 0x20, + 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x17, + 0x22, 0x12, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, + 0x6c, 0x69, 0x73, 0x74, 0x3a, 0x01, 0x2a, 0x42, 0xb4, 0x01, 0x5a, 0x15, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x3b, 0x76, + 0x31, 0x92, 0x41, 0x99, 0x01, 0x12, 0x15, 0x0a, 0x0e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x61, + 0x70, 0x69, 0x20, 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, 0x32, 0x2e, 0x30, 0x1a, 0x0e, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, 0x30, 0x38, 0x30, 0x2a, 0x02, 0x01, 0x02, + 0x32, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, + 0x6f, 0x6e, 0x3a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, + 0x6a, 0x73, 0x6f, 0x6e, 0x5a, 0x48, 0x0a, 0x46, 0x0a, 0x0a, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, + 0x41, 0x75, 0x74, 0x68, 0x12, 0x38, 0x08, 0x02, 0x12, 0x23, 0x54, 0x79, 0x70, 0x65, 0x20, 0x42, + 0x65, 0x61, 0x72, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x72, 0x2d, 0x6a, 0x77, 0x74, 0x2d, 0x74, + 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x0d, 0x41, + 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x06, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_stock_proto_rawDescOnce sync.Once + file_api_stock_v1_stock_proto_rawDescData = file_api_stock_v1_stock_proto_rawDesc +) + +func file_api_stock_v1_stock_proto_rawDescGZIP() []byte { + file_api_stock_v1_stock_proto_rawDescOnce.Do(func() { + file_api_stock_v1_stock_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_stock_proto_rawDescData) + }) + return file_api_stock_v1_stock_proto_rawDescData +} + +var file_api_stock_v1_stock_proto_msgTypes = make([]protoimpl.MessageInfo, 11) +var file_api_stock_v1_stock_proto_goTypes = []interface{}{ + (*CreateStockRequest)(nil), // 0: api.stock.v1.CreateStockRequest + (*CreateStockReply)(nil), // 1: api.stock.v1.CreateStockReply + (*DeleteStockByIDRequest)(nil), // 2: api.stock.v1.DeleteStockByIDRequest + (*DeleteStockByIDReply)(nil), // 3: api.stock.v1.DeleteStockByIDReply + (*UpdateStockByIDRequest)(nil), // 4: api.stock.v1.UpdateStockByIDRequest + (*UpdateStockByIDReply)(nil), // 5: api.stock.v1.UpdateStockByIDReply + (*Stock)(nil), // 6: api.stock.v1.Stock + (*GetStockByIDRequest)(nil), // 7: api.stock.v1.GetStockByIDRequest + (*GetStockByIDReply)(nil), // 8: api.stock.v1.GetStockByIDReply + (*ListStockRequest)(nil), // 9: api.stock.v1.ListStockRequest + (*ListStockReply)(nil), // 10: api.stock.v1.ListStockReply + (*types.Params)(nil), // 11: api.types.Params +} +var file_api_stock_v1_stock_proto_depIdxs = []int32{ + 6, // 0: api.stock.v1.GetStockByIDReply.stock:type_name -> api.stock.v1.Stock + 11, // 1: api.stock.v1.ListStockRequest.params:type_name -> api.types.Params + 6, // 2: api.stock.v1.ListStockReply.stocks:type_name -> api.stock.v1.Stock + 0, // 3: api.stock.v1.stock.Create:input_type -> api.stock.v1.CreateStockRequest + 2, // 4: api.stock.v1.stock.DeleteByID:input_type -> api.stock.v1.DeleteStockByIDRequest + 4, // 5: api.stock.v1.stock.UpdateByID:input_type -> api.stock.v1.UpdateStockByIDRequest + 7, // 6: api.stock.v1.stock.GetByID:input_type -> api.stock.v1.GetStockByIDRequest + 9, // 7: api.stock.v1.stock.List:input_type -> api.stock.v1.ListStockRequest + 1, // 8: api.stock.v1.stock.Create:output_type -> api.stock.v1.CreateStockReply + 3, // 9: api.stock.v1.stock.DeleteByID:output_type -> api.stock.v1.DeleteStockByIDReply + 5, // 10: api.stock.v1.stock.UpdateByID:output_type -> api.stock.v1.UpdateStockByIDReply + 8, // 11: api.stock.v1.stock.GetByID:output_type -> api.stock.v1.GetStockByIDReply + 10, // 12: api.stock.v1.stock.List:output_type -> api.stock.v1.ListStockReply + 8, // [8:13] is the sub-list for method output_type + 3, // [3:8] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_stock_proto_init() } +func file_api_stock_v1_stock_proto_init() { + if File_api_stock_v1_stock_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_stock_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateStockRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateStockReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteStockByIDRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteStockByIDReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateStockByIDRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateStockByIDReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Stock); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetStockByIDRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetStockByIDReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListStockRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListStockReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_stock_proto_rawDesc, + NumEnums: 0, + NumMessages: 11, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_stock_proto_goTypes, + DependencyIndexes: file_api_stock_v1_stock_proto_depIdxs, + MessageInfos: file_api_stock_v1_stock_proto_msgTypes, + }.Build() + File_api_stock_v1_stock_proto = out.File + file_api_stock_v1_stock_proto_rawDesc = nil + file_api_stock_v1_stock_proto_goTypes = nil + file_api_stock_v1_stock_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.pb.validate.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.pb.validate.go new file mode 100644 index 0000000..7e08c68 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.pb.validate.go @@ -0,0 +1,1286 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/stock.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on CreateStockRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *CreateStockRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on CreateStockRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// CreateStockRequestMultiError, or nil if none found. +func (m *CreateStockRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *CreateStockRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for ProductID + + // no validation rules for Stock + + if len(errors) > 0 { + return CreateStockRequestMultiError(errors) + } + + return nil +} + +// CreateStockRequestMultiError is an error wrapping multiple validation errors +// returned by CreateStockRequest.ValidateAll() if the designated constraints +// aren't met. +type CreateStockRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CreateStockRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CreateStockRequestMultiError) AllErrors() []error { return m } + +// CreateStockRequestValidationError is the validation error returned by +// CreateStockRequest.Validate if the designated constraints aren't met. +type CreateStockRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CreateStockRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CreateStockRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CreateStockRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CreateStockRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CreateStockRequestValidationError) ErrorName() string { + return "CreateStockRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e CreateStockRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCreateStockRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CreateStockRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CreateStockRequestValidationError{} + +// Validate checks the field values on CreateStockReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *CreateStockReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on CreateStockReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// CreateStockReplyMultiError, or nil if none found. +func (m *CreateStockReply) ValidateAll() error { + return m.validate(true) +} + +func (m *CreateStockReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Id + + if len(errors) > 0 { + return CreateStockReplyMultiError(errors) + } + + return nil +} + +// CreateStockReplyMultiError is an error wrapping multiple validation errors +// returned by CreateStockReply.ValidateAll() if the designated constraints +// aren't met. +type CreateStockReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CreateStockReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CreateStockReplyMultiError) AllErrors() []error { return m } + +// CreateStockReplyValidationError is the validation error returned by +// CreateStockReply.Validate if the designated constraints aren't met. +type CreateStockReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CreateStockReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CreateStockReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CreateStockReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CreateStockReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CreateStockReplyValidationError) ErrorName() string { return "CreateStockReplyValidationError" } + +// Error satisfies the builtin error interface +func (e CreateStockReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCreateStockReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CreateStockReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CreateStockReplyValidationError{} + +// Validate checks the field values on DeleteStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *DeleteStockByIDRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DeleteStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DeleteStockByIDRequestMultiError, or nil if none found. +func (m *DeleteStockByIDRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *DeleteStockByIDRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := DeleteStockByIDRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return DeleteStockByIDRequestMultiError(errors) + } + + return nil +} + +// DeleteStockByIDRequestMultiError is an error wrapping multiple validation +// errors returned by DeleteStockByIDRequest.ValidateAll() if the designated +// constraints aren't met. +type DeleteStockByIDRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DeleteStockByIDRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DeleteStockByIDRequestMultiError) AllErrors() []error { return m } + +// DeleteStockByIDRequestValidationError is the validation error returned by +// DeleteStockByIDRequest.Validate if the designated constraints aren't met. +type DeleteStockByIDRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DeleteStockByIDRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DeleteStockByIDRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DeleteStockByIDRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DeleteStockByIDRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DeleteStockByIDRequestValidationError) ErrorName() string { + return "DeleteStockByIDRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e DeleteStockByIDRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDeleteStockByIDRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DeleteStockByIDRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DeleteStockByIDRequestValidationError{} + +// Validate checks the field values on DeleteStockByIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *DeleteStockByIDReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DeleteStockByIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DeleteStockByIDReplyMultiError, or nil if none found. +func (m *DeleteStockByIDReply) ValidateAll() error { + return m.validate(true) +} + +func (m *DeleteStockByIDReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return DeleteStockByIDReplyMultiError(errors) + } + + return nil +} + +// DeleteStockByIDReplyMultiError is an error wrapping multiple validation +// errors returned by DeleteStockByIDReply.ValidateAll() if the designated +// constraints aren't met. +type DeleteStockByIDReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DeleteStockByIDReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DeleteStockByIDReplyMultiError) AllErrors() []error { return m } + +// DeleteStockByIDReplyValidationError is the validation error returned by +// DeleteStockByIDReply.Validate if the designated constraints aren't met. +type DeleteStockByIDReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DeleteStockByIDReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DeleteStockByIDReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DeleteStockByIDReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DeleteStockByIDReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DeleteStockByIDReplyValidationError) ErrorName() string { + return "DeleteStockByIDReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e DeleteStockByIDReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDeleteStockByIDReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DeleteStockByIDReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DeleteStockByIDReplyValidationError{} + +// Validate checks the field values on UpdateStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateStockByIDRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateStockByIDRequestMultiError, or nil if none found. +func (m *UpdateStockByIDRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateStockByIDRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Id + + // no validation rules for ProductID + + // no validation rules for Stock + + if len(errors) > 0 { + return UpdateStockByIDRequestMultiError(errors) + } + + return nil +} + +// UpdateStockByIDRequestMultiError is an error wrapping multiple validation +// errors returned by UpdateStockByIDRequest.ValidateAll() if the designated +// constraints aren't met. +type UpdateStockByIDRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateStockByIDRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateStockByIDRequestMultiError) AllErrors() []error { return m } + +// UpdateStockByIDRequestValidationError is the validation error returned by +// UpdateStockByIDRequest.Validate if the designated constraints aren't met. +type UpdateStockByIDRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateStockByIDRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateStockByIDRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateStockByIDRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateStockByIDRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateStockByIDRequestValidationError) ErrorName() string { + return "UpdateStockByIDRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateStockByIDRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateStockByIDRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateStockByIDRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateStockByIDRequestValidationError{} + +// Validate checks the field values on UpdateStockByIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateStockByIDReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateStockByIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateStockByIDReplyMultiError, or nil if none found. +func (m *UpdateStockByIDReply) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateStockByIDReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return UpdateStockByIDReplyMultiError(errors) + } + + return nil +} + +// UpdateStockByIDReplyMultiError is an error wrapping multiple validation +// errors returned by UpdateStockByIDReply.ValidateAll() if the designated +// constraints aren't met. +type UpdateStockByIDReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateStockByIDReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateStockByIDReplyMultiError) AllErrors() []error { return m } + +// UpdateStockByIDReplyValidationError is the validation error returned by +// UpdateStockByIDReply.Validate if the designated constraints aren't met. +type UpdateStockByIDReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateStockByIDReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateStockByIDReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateStockByIDReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateStockByIDReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateStockByIDReplyValidationError) ErrorName() string { + return "UpdateStockByIDReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateStockByIDReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateStockByIDReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateStockByIDReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateStockByIDReplyValidationError{} + +// Validate checks the field values on Stock with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *Stock) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on Stock with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in StockMultiError, or nil if none found. +func (m *Stock) ValidateAll() error { + return m.validate(true) +} + +func (m *Stock) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Id + + // no validation rules for ProductID + + // no validation rules for Stock + + // no validation rules for CreatedAt + + // no validation rules for UpdatedAt + + if len(errors) > 0 { + return StockMultiError(errors) + } + + return nil +} + +// StockMultiError is an error wrapping multiple validation errors returned by +// Stock.ValidateAll() if the designated constraints aren't met. +type StockMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m StockMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m StockMultiError) AllErrors() []error { return m } + +// StockValidationError is the validation error returned by Stock.Validate if +// the designated constraints aren't met. +type StockValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e StockValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e StockValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e StockValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e StockValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e StockValidationError) ErrorName() string { return "StockValidationError" } + +// Error satisfies the builtin error interface +func (e StockValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sStock.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = StockValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = StockValidationError{} + +// Validate checks the field values on GetStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *GetStockByIDRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on GetStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// GetStockByIDRequestMultiError, or nil if none found. +func (m *GetStockByIDRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *GetStockByIDRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := GetStockByIDRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return GetStockByIDRequestMultiError(errors) + } + + return nil +} + +// GetStockByIDRequestMultiError is an error wrapping multiple validation +// errors returned by GetStockByIDRequest.ValidateAll() if the designated +// constraints aren't met. +type GetStockByIDRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m GetStockByIDRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m GetStockByIDRequestMultiError) AllErrors() []error { return m } + +// GetStockByIDRequestValidationError is the validation error returned by +// GetStockByIDRequest.Validate if the designated constraints aren't met. +type GetStockByIDRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e GetStockByIDRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e GetStockByIDRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e GetStockByIDRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e GetStockByIDRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e GetStockByIDRequestValidationError) ErrorName() string { + return "GetStockByIDRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e GetStockByIDRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sGetStockByIDRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = GetStockByIDRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = GetStockByIDRequestValidationError{} + +// Validate checks the field values on GetStockByIDReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *GetStockByIDReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on GetStockByIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// GetStockByIDReplyMultiError, or nil if none found. +func (m *GetStockByIDReply) ValidateAll() error { + return m.validate(true) +} + +func (m *GetStockByIDReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if all { + switch v := interface{}(m.GetStock()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, GetStockByIDReplyValidationError{ + field: "Stock", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, GetStockByIDReplyValidationError{ + field: "Stock", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetStock()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return GetStockByIDReplyValidationError{ + field: "Stock", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return GetStockByIDReplyMultiError(errors) + } + + return nil +} + +// GetStockByIDReplyMultiError is an error wrapping multiple validation errors +// returned by GetStockByIDReply.ValidateAll() if the designated constraints +// aren't met. +type GetStockByIDReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m GetStockByIDReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m GetStockByIDReplyMultiError) AllErrors() []error { return m } + +// GetStockByIDReplyValidationError is the validation error returned by +// GetStockByIDReply.Validate if the designated constraints aren't met. +type GetStockByIDReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e GetStockByIDReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e GetStockByIDReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e GetStockByIDReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e GetStockByIDReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e GetStockByIDReplyValidationError) ErrorName() string { + return "GetStockByIDReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e GetStockByIDReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sGetStockByIDReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = GetStockByIDReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = GetStockByIDReplyValidationError{} + +// Validate checks the field values on ListStockRequest with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *ListStockRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ListStockRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ListStockRequestMultiError, or nil if none found. +func (m *ListStockRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *ListStockRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if all { + switch v := interface{}(m.GetParams()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ListStockRequestValidationError{ + field: "Params", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ListStockRequestValidationError{ + field: "Params", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetParams()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ListStockRequestValidationError{ + field: "Params", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return ListStockRequestMultiError(errors) + } + + return nil +} + +// ListStockRequestMultiError is an error wrapping multiple validation errors +// returned by ListStockRequest.ValidateAll() if the designated constraints +// aren't met. +type ListStockRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ListStockRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ListStockRequestMultiError) AllErrors() []error { return m } + +// ListStockRequestValidationError is the validation error returned by +// ListStockRequest.Validate if the designated constraints aren't met. +type ListStockRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ListStockRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ListStockRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ListStockRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ListStockRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ListStockRequestValidationError) ErrorName() string { return "ListStockRequestValidationError" } + +// Error satisfies the builtin error interface +func (e ListStockRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sListStockRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ListStockRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ListStockRequestValidationError{} + +// Validate checks the field values on ListStockReply with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ListStockReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ListStockReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in ListStockReplyMultiError, +// or nil if none found. +func (m *ListStockReply) ValidateAll() error { + return m.validate(true) +} + +func (m *ListStockReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Total + + for idx, item := range m.GetStocks() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ListStockReplyValidationError{ + field: fmt.Sprintf("Stocks[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ListStockReplyValidationError{ + field: fmt.Sprintf("Stocks[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ListStockReplyValidationError{ + field: fmt.Sprintf("Stocks[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ListStockReplyMultiError(errors) + } + + return nil +} + +// ListStockReplyMultiError is an error wrapping multiple validation errors +// returned by ListStockReply.ValidateAll() if the designated constraints +// aren't met. +type ListStockReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ListStockReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ListStockReplyMultiError) AllErrors() []error { return m } + +// ListStockReplyValidationError is the validation error returned by +// ListStockReply.Validate if the designated constraints aren't met. +type ListStockReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ListStockReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ListStockReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ListStockReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ListStockReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ListStockReplyValidationError) ErrorName() string { return "ListStockReplyValidationError" } + +// Error satisfies the builtin error interface +func (e ListStockReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sListStockReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ListStockReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ListStockReplyValidationError{} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.proto b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.proto new file mode 100644 index 0000000..bbdaf79 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock.proto @@ -0,0 +1,198 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "api/types/types.proto"; +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "tagger/tagger.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +// Default settings for generating swagger documents +// NOTE: because json does not support 64 bits, the int64 and uint64 types under *.swagger.json are automatically converted to string types +// Reference https://github.com/grpc-ecosystem/grpc-gateway/blob/db7fbefff7c04877cdb32e16d4a248a024428207/examples/internal/proto/examplepb/a_bit_of_everything.proto +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + host: "localhost:8080" + base_path: "" + info: { + title: "stock api docs"; + version: "2.0"; + } + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; + security_definitions: { + security: { + key: "BearerAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + description: "Type Bearer your-jwt-token to Value"; + } + } + } +}; + +service stock { + // create stock + rpc Create(CreateStockRequest) returns (CreateStockReply) { + option (google.api.http) = { + post: "/api/v1/stock" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "create stock", + description: "submit information to create stock", + //security: { + // security_requirement: { + // key: "BearerAuth"; + // value: {} + // } + //} + }; + } + + // delete stock by id + rpc DeleteByID(DeleteStockByIDRequest) returns (DeleteStockByIDReply) { + option (google.api.http) = { + delete: "/api/v1/stock/{id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "delete stock", + description: "delete stock by id", + //security: { + // security_requirement: { + // key: "BearerAuth"; + // value: {} + // } + //} + }; + } + + // update stock by id + rpc UpdateByID(UpdateStockByIDRequest) returns (UpdateStockByIDReply) { + option (google.api.http) = { + put: "/api/v1/stock/{id}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "update stock", + description: "update stock by id", + //security: { + // security_requirement: { + // key: "BearerAuth"; + // value: {} + // } + //} + }; + } + + // get stock by id + rpc GetByID(GetStockByIDRequest) returns (GetStockByIDReply) { + option (google.api.http) = { + get: "/api/v1/stock/{id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "get stock detail", + description: "get stock detail by id", + //security: { + // security_requirement: { + // key: "BearerAuth"; + // value: {} + // } + //} + }; + } + + // list of stock by query parameters + rpc List(ListStockRequest) returns (ListStockReply) { + option (google.api.http) = { + post: "/api/v1/stock/list" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "list of stocks by parameters", + description: "list of stocks by paging and conditions", + //security: { + // security_requirement: { + // key: "BearerAuth"; + // value: {} + // } + //} + }; + } +} + + +// Some notes on defining fields under message: +// (1) Fill in the validate rules https://github.com/envoyproxy/protoc-gen-validate#constraint-rules +// (2) Suggest using camel hump naming for message field names, and for names ending in 'id', +// use xxxID naming format, such as userID, orderID, etc. +// (3) When using the protoc-gen-openapiv2 plugin, if the defined fields are snake case, +// you must add annotations for snake case names, such as string fieldName = 1 [json_name = "field_name"], +// to ensure that the front end and back end JSON naming is consistent. +// (4) If the route contains the path parameter, such as /api/v1/stock/{id}, the defined +// message must contain the name of the path parameter and the name should be +// added with a new tag, such as int64 id = 1 [(tagger.tags) = "uri:\"id\""]; +// (5) If the request url is followed by a query parameter, such as /api/v1/getStock?name=Tom, +// a form tag must be added when defining the query parameter in the message, +// such as string name = 1 [(tagger.tags) = "form:\"name\""]. + + +message CreateStockRequest { + uint64 productID = 1; // 商品id + uint32 stock = 2; // 库存 +} + +message CreateStockReply { + uint64 id = 1; +} + +message DeleteStockByIDRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\""]; +} + +message DeleteStockByIDReply { + +} + +message UpdateStockByIDRequest { + uint64 id = 1 [(tagger.tags) = "uri:\"id\"" ]; + uint64 productID = 2; // 商品id + uint32 stock = 3; // 库存 +} + +message UpdateStockByIDReply { + +} + +message Stock { + uint64 id = 1; + uint64 productID = 2; // 商品id + uint32 stock = 3; // 库存 + string createdAt = 4; + string updatedAt = 5; +} + +message GetStockByIDRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; +} + +message GetStockByIDReply { + Stock stock = 1; +} + +message ListStockRequest { + api.types.Params params = 1; +} + +message ListStockReply { + int64 total = 1; + repeated Stock stocks = 2; +} + + diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock_grpc.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock_grpc.pb.go new file mode 100644 index 0000000..2601ca6 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock_grpc.pb.go @@ -0,0 +1,267 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.2 +// source: api/stock/v1/stock.proto + +package v1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Stock_Create_FullMethodName = "/api.stock.v1.stock/Create" + Stock_DeleteByID_FullMethodName = "/api.stock.v1.stock/DeleteByID" + Stock_UpdateByID_FullMethodName = "/api.stock.v1.stock/UpdateByID" + Stock_GetByID_FullMethodName = "/api.stock.v1.stock/GetByID" + Stock_List_FullMethodName = "/api.stock.v1.stock/List" +) + +// StockClient is the client API for Stock service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type StockClient interface { + // create stock + Create(ctx context.Context, in *CreateStockRequest, opts ...grpc.CallOption) (*CreateStockReply, error) + // delete stock by id + DeleteByID(ctx context.Context, in *DeleteStockByIDRequest, opts ...grpc.CallOption) (*DeleteStockByIDReply, error) + // update stock by id + UpdateByID(ctx context.Context, in *UpdateStockByIDRequest, opts ...grpc.CallOption) (*UpdateStockByIDReply, error) + // get stock by id + GetByID(ctx context.Context, in *GetStockByIDRequest, opts ...grpc.CallOption) (*GetStockByIDReply, error) + // list of stock by query parameters + List(ctx context.Context, in *ListStockRequest, opts ...grpc.CallOption) (*ListStockReply, error) +} + +type stockClient struct { + cc grpc.ClientConnInterface +} + +func NewStockClient(cc grpc.ClientConnInterface) StockClient { + return &stockClient{cc} +} + +func (c *stockClient) Create(ctx context.Context, in *CreateStockRequest, opts ...grpc.CallOption) (*CreateStockReply, error) { + out := new(CreateStockReply) + err := c.cc.Invoke(ctx, Stock_Create_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *stockClient) DeleteByID(ctx context.Context, in *DeleteStockByIDRequest, opts ...grpc.CallOption) (*DeleteStockByIDReply, error) { + out := new(DeleteStockByIDReply) + err := c.cc.Invoke(ctx, Stock_DeleteByID_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *stockClient) UpdateByID(ctx context.Context, in *UpdateStockByIDRequest, opts ...grpc.CallOption) (*UpdateStockByIDReply, error) { + out := new(UpdateStockByIDReply) + err := c.cc.Invoke(ctx, Stock_UpdateByID_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *stockClient) GetByID(ctx context.Context, in *GetStockByIDRequest, opts ...grpc.CallOption) (*GetStockByIDReply, error) { + out := new(GetStockByIDReply) + err := c.cc.Invoke(ctx, Stock_GetByID_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *stockClient) List(ctx context.Context, in *ListStockRequest, opts ...grpc.CallOption) (*ListStockReply, error) { + out := new(ListStockReply) + err := c.cc.Invoke(ctx, Stock_List_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// StockServer is the server API for Stock service. +// All implementations must embed UnimplementedStockServer +// for forward compatibility +type StockServer interface { + // create stock + Create(context.Context, *CreateStockRequest) (*CreateStockReply, error) + // delete stock by id + DeleteByID(context.Context, *DeleteStockByIDRequest) (*DeleteStockByIDReply, error) + // update stock by id + UpdateByID(context.Context, *UpdateStockByIDRequest) (*UpdateStockByIDReply, error) + // get stock by id + GetByID(context.Context, *GetStockByIDRequest) (*GetStockByIDReply, error) + // list of stock by query parameters + List(context.Context, *ListStockRequest) (*ListStockReply, error) + mustEmbedUnimplementedStockServer() +} + +// UnimplementedStockServer must be embedded to have forward compatible implementations. +type UnimplementedStockServer struct { +} + +func (UnimplementedStockServer) Create(context.Context, *CreateStockRequest) (*CreateStockReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Create not implemented") +} +func (UnimplementedStockServer) DeleteByID(context.Context, *DeleteStockByIDRequest) (*DeleteStockByIDReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteByID not implemented") +} +func (UnimplementedStockServer) UpdateByID(context.Context, *UpdateStockByIDRequest) (*UpdateStockByIDReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method UpdateByID not implemented") +} +func (UnimplementedStockServer) GetByID(context.Context, *GetStockByIDRequest) (*GetStockByIDReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetByID not implemented") +} +func (UnimplementedStockServer) List(context.Context, *ListStockRequest) (*ListStockReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method List not implemented") +} +func (UnimplementedStockServer) mustEmbedUnimplementedStockServer() {} + +// UnsafeStockServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to StockServer will +// result in compilation errors. +type UnsafeStockServer interface { + mustEmbedUnimplementedStockServer() +} + +func RegisterStockServer(s grpc.ServiceRegistrar, srv StockServer) { + s.RegisterService(&Stock_ServiceDesc, srv) +} + +func _Stock_Create_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateStockRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StockServer).Create(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Stock_Create_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StockServer).Create(ctx, req.(*CreateStockRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Stock_DeleteByID_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteStockByIDRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StockServer).DeleteByID(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Stock_DeleteByID_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StockServer).DeleteByID(ctx, req.(*DeleteStockByIDRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Stock_UpdateByID_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateStockByIDRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StockServer).UpdateByID(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Stock_UpdateByID_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StockServer).UpdateByID(ctx, req.(*UpdateStockByIDRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Stock_GetByID_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetStockByIDRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StockServer).GetByID(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Stock_GetByID_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StockServer).GetByID(ctx, req.(*GetStockByIDRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Stock_List_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListStockRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StockServer).List(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Stock_List_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StockServer).List(ctx, req.(*ListStockRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Stock_ServiceDesc is the grpc.ServiceDesc for Stock service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Stock_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.stock.v1.stock", + HandlerType: (*StockServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Create", + Handler: _Stock_Create_Handler, + }, + { + MethodName: "DeleteByID", + Handler: _Stock_DeleteByID_Handler, + }, + { + MethodName: "UpdateByID", + Handler: _Stock_UpdateByID_Handler, + }, + { + MethodName: "GetByID", + Handler: _Stock_GetByID_Handler, + }, + { + MethodName: "List", + Handler: _Stock_List_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "api/stock/v1/stock.proto", +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock_router.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock_router.pb.go new file mode 100644 index 0000000..ded0f0f --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/stock_router.pb.go @@ -0,0 +1,320 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type StockLogicer interface { + Create(ctx context.Context, req *CreateStockRequest) (*CreateStockReply, error) + DeleteByID(ctx context.Context, req *DeleteStockByIDRequest) (*DeleteStockByIDReply, error) + UpdateByID(ctx context.Context, req *UpdateStockByIDRequest) (*UpdateStockByIDReply, error) + GetByID(ctx context.Context, req *GetStockByIDRequest) (*GetStockByIDReply, error) + List(ctx context.Context, req *ListStockRequest) (*ListStockReply, error) +} + +type StockOption func(*stockOptions) + +type stockOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *stockOptions) apply(opts ...StockOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithStockHTTPResponse() StockOption { + return func(o *stockOptions) { + o.isFromRPC = false + } +} + +func WithStockRPCResponse() StockOption { + return func(o *stockOptions) { + o.isFromRPC = true + } +} + +func WithStockResponser(responser errcode.Responser) StockOption { + return func(o *stockOptions) { + o.responser = responser + } +} + +func WithStockLogger(zapLog *zap.Logger) StockOption { + return func(o *stockOptions) { + o.zapLog = zapLog + } +} + +func WithStockErrorToHTTPCode(e ...*errcode.Error) StockOption { + return func(o *stockOptions) { + o.httpErrors = e + } +} + +func WithStockRPCStatusToHTTPCode(s ...*errcode.RPCStatus) StockOption { + return func(o *stockOptions) { + o.rpcStatus = s + } +} + +func WithStockWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) StockOption { + return func(o *stockOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterStockRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic StockLogicer, + opts ...StockOption) { + + o := &stockOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &stockRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type stockRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic StockLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *stockRouter) register() { + r.iRouter.Handle("POST", "/api/v1/stock", r.withMiddleware("POST", "/api/v1/stock", r.Create_0)...) + r.iRouter.Handle("DELETE", "/api/v1/stock/:id", r.withMiddleware("DELETE", "/api/v1/stock/:id", r.DeleteByID_0)...) + r.iRouter.Handle("PUT", "/api/v1/stock/:id", r.withMiddleware("PUT", "/api/v1/stock/:id", r.UpdateByID_0)...) + r.iRouter.Handle("GET", "/api/v1/stock/:id", r.withMiddleware("GET", "/api/v1/stock/:id", r.GetByID_0)...) + r.iRouter.Handle("POST", "/api/v1/stock/list", r.withMiddleware("POST", "/api/v1/stock/list", r.List_0)...) + +} + +func (r *stockRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *stockRouter) Create_0(c *gin.Context) { + req := &CreateStockRequest{} + var err error + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Create(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *stockRouter) DeleteByID_0(c *gin.Context) { + req := &DeleteStockByIDRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.DeleteByID(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *stockRouter) UpdateByID_0(c *gin.Context) { + req := &UpdateStockByIDRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.UpdateByID(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *stockRouter) GetByID_0(c *gin.Context) { + req := &GetStockByIDRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.GetByID(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *stockRouter) List_0(c *gin.Context) { + req := &ListStockRequest{} + var err error + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.List(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.pb.go new file mode 100644 index 0000000..98462bd --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.pb.go @@ -0,0 +1,379 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/strong.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type UpdateStrongRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *UpdateStrongRequest) Reset() { + *x = UpdateStrongRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_strong_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateStrongRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateStrongRequest) ProtoMessage() {} + +func (x *UpdateStrongRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_strong_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateStrongRequest.ProtoReflect.Descriptor instead. +func (*UpdateStrongRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_strong_proto_rawDescGZIP(), []int{0} +} + +func (x *UpdateStrongRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *UpdateStrongRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type UpdateStrongRequestReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateStrongRequestReply) Reset() { + *x = UpdateStrongRequestReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_strong_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateStrongRequestReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateStrongRequestReply) ProtoMessage() {} + +func (x *UpdateStrongRequestReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_strong_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateStrongRequestReply.ProtoReflect.Descriptor instead. +func (*UpdateStrongRequestReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_strong_proto_rawDescGZIP(), []int{1} +} + +type QueryStrongRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *QueryStrongRequest) Reset() { + *x = QueryStrongRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_strong_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryStrongRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryStrongRequest) ProtoMessage() {} + +func (x *QueryStrongRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_strong_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryStrongRequest.ProtoReflect.Descriptor instead. +func (*QueryStrongRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_strong_proto_rawDescGZIP(), []int{2} +} + +func (x *QueryStrongRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type QueryStrongReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Stock uint32 `protobuf:"varint,1,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *QueryStrongReply) Reset() { + *x = QueryStrongReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_strong_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryStrongReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryStrongReply) ProtoMessage() {} + +func (x *QueryStrongReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_strong_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryStrongReply.ProtoReflect.Descriptor instead. +func (*QueryStrongReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_strong_proto_rawDescGZIP(), []int{3} +} + +func (x *QueryStrongReply) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +var File_api_stock_v1_strong_proto protoreflect.FileDescriptor + +var file_api_stock_v1_strong_proto_rawDesc = []byte{ + 0x0a, 0x19, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x61, 0x70, 0x69, + 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, + 0x67, 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, + 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, + 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x5a, 0x0a, 0x13, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, + 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x1d, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0d, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x2a, 0x02, 0x20, 0x00, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x22, 0x1a, 0x0a, 0x18, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x72, 0x6f, 0x6e, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x3a, 0x0a, + 0x12, 0x51, 0x75, 0x65, 0x72, 0x79, 0x53, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, + 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x28, 0x0a, 0x10, 0x51, 0x75, 0x65, + 0x72, 0x79, 0x53, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x14, 0x0a, + 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, + 0x6f, 0x63, 0x6b, 0x32, 0xf7, 0x02, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x12, 0xc8, + 0x01, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x53, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x73, 0x92, 0x41, 0x4c, 0x0a, 0x14, 0x63, 0x61, 0x73, 0x65, 0x20, + 0x33, 0x3a, 0x20, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x12, + 0x0c, 0xe6, 0x9b, 0xb4, 0xe6, 0x96, 0xb0, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x26, 0xe6, + 0x9b, 0xb4, 0xe6, 0x96, 0xb0, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0xef, 0xbc, 0x8c, 0x44, 0x42, + 0xe5, 0x92, 0x8c, 0xe7, 0xbc, 0x93, 0xe5, 0xad, 0x98, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, 0xe8, + 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1e, 0x1a, 0x19, 0x2f, 0x61, 0x70, + 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, + 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3a, 0x01, 0x2a, 0x12, 0xa1, 0x01, 0x0a, 0x05, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, + 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x53, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x53, 0x74, 0x72, 0x6f, 0x6e, 0x67, + 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x56, 0x92, 0x41, 0x32, 0x0a, 0x14, 0x63, 0x61, 0x73, 0x65, + 0x20, 0x33, 0x3a, 0x20, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, + 0x12, 0x0c, 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x0c, + 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x82, 0xd3, 0xe4, 0x93, + 0x02, 0x1b, 0x12, 0x19, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x42, 0xb4, 0x01, + 0x5a, 0x15, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, 0x92, 0x41, 0x99, 0x01, 0x12, 0x15, 0x0a, 0x0e, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x61, 0x70, 0x69, 0x20, 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, 0x32, + 0x2e, 0x30, 0x1a, 0x0e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, 0x30, + 0x38, 0x30, 0x2a, 0x02, 0x01, 0x02, 0x32, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x3a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x5a, 0x48, 0x0a, 0x46, 0x0a, 0x0a, + 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68, 0x12, 0x38, 0x08, 0x02, 0x12, 0x23, + 0x54, 0x79, 0x70, 0x65, 0x20, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x72, + 0x2d, 0x6a, 0x77, 0x74, 0x2d, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x1a, 0x0d, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_strong_proto_rawDescOnce sync.Once + file_api_stock_v1_strong_proto_rawDescData = file_api_stock_v1_strong_proto_rawDesc +) + +func file_api_stock_v1_strong_proto_rawDescGZIP() []byte { + file_api_stock_v1_strong_proto_rawDescOnce.Do(func() { + file_api_stock_v1_strong_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_strong_proto_rawDescData) + }) + return file_api_stock_v1_strong_proto_rawDescData +} + +var file_api_stock_v1_strong_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_api_stock_v1_strong_proto_goTypes = []interface{}{ + (*UpdateStrongRequest)(nil), // 0: api.stock.v1.UpdateStrongRequest + (*UpdateStrongRequestReply)(nil), // 1: api.stock.v1.UpdateStrongRequestReply + (*QueryStrongRequest)(nil), // 2: api.stock.v1.QueryStrongRequest + (*QueryStrongReply)(nil), // 3: api.stock.v1.QueryStrongReply +} +var file_api_stock_v1_strong_proto_depIdxs = []int32{ + 0, // 0: api.stock.v1.strong.Update:input_type -> api.stock.v1.UpdateStrongRequest + 2, // 1: api.stock.v1.strong.Query:input_type -> api.stock.v1.QueryStrongRequest + 1, // 2: api.stock.v1.strong.Update:output_type -> api.stock.v1.UpdateStrongRequestReply + 3, // 3: api.stock.v1.strong.Query:output_type -> api.stock.v1.QueryStrongReply + 2, // [2:4] is the sub-list for method output_type + 0, // [0:2] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_strong_proto_init() } +func file_api_stock_v1_strong_proto_init() { + if File_api_stock_v1_strong_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_strong_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateStrongRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_strong_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateStrongRequestReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_strong_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryStrongRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_strong_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryStrongReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_strong_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_strong_proto_goTypes, + DependencyIndexes: file_api_stock_v1_strong_proto_depIdxs, + MessageInfos: file_api_stock_v1_strong_proto_msgTypes, + }.Build() + File_api_stock_v1_strong_proto = out.File + file_api_stock_v1_strong_proto_rawDesc = nil + file_api_stock_v1_strong_proto_goTypes = nil + file_api_stock_v1_strong_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.pb.validate.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.pb.validate.go new file mode 100644 index 0000000..5b5ca0a --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.pb.validate.go @@ -0,0 +1,477 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/strong.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on UpdateStrongRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateStrongRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateStrongRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateStrongRequestMultiError, or nil if none found. +func (m *UpdateStrongRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateStrongRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := UpdateStrongRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetStock() <= 0 { + err := UpdateStrongRequestValidationError{ + field: "Stock", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return UpdateStrongRequestMultiError(errors) + } + + return nil +} + +// UpdateStrongRequestMultiError is an error wrapping multiple validation +// errors returned by UpdateStrongRequest.ValidateAll() if the designated +// constraints aren't met. +type UpdateStrongRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateStrongRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateStrongRequestMultiError) AllErrors() []error { return m } + +// UpdateStrongRequestValidationError is the validation error returned by +// UpdateStrongRequest.Validate if the designated constraints aren't met. +type UpdateStrongRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateStrongRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateStrongRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateStrongRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateStrongRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateStrongRequestValidationError) ErrorName() string { + return "UpdateStrongRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateStrongRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateStrongRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateStrongRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateStrongRequestValidationError{} + +// Validate checks the field values on UpdateStrongRequestReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateStrongRequestReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateStrongRequestReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateStrongRequestReplyMultiError, or nil if none found. +func (m *UpdateStrongRequestReply) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateStrongRequestReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return UpdateStrongRequestReplyMultiError(errors) + } + + return nil +} + +// UpdateStrongRequestReplyMultiError is an error wrapping multiple validation +// errors returned by UpdateStrongRequestReply.ValidateAll() if the designated +// constraints aren't met. +type UpdateStrongRequestReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateStrongRequestReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateStrongRequestReplyMultiError) AllErrors() []error { return m } + +// UpdateStrongRequestReplyValidationError is the validation error returned by +// UpdateStrongRequestReply.Validate if the designated constraints aren't met. +type UpdateStrongRequestReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateStrongRequestReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateStrongRequestReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateStrongRequestReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateStrongRequestReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateStrongRequestReplyValidationError) ErrorName() string { + return "UpdateStrongRequestReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateStrongRequestReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateStrongRequestReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateStrongRequestReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateStrongRequestReplyValidationError{} + +// Validate checks the field values on QueryStrongRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryStrongRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryStrongRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryStrongRequestMultiError, or nil if none found. +func (m *QueryStrongRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryStrongRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := QueryStrongRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return QueryStrongRequestMultiError(errors) + } + + return nil +} + +// QueryStrongRequestMultiError is an error wrapping multiple validation errors +// returned by QueryStrongRequest.ValidateAll() if the designated constraints +// aren't met. +type QueryStrongRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryStrongRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryStrongRequestMultiError) AllErrors() []error { return m } + +// QueryStrongRequestValidationError is the validation error returned by +// QueryStrongRequest.Validate if the designated constraints aren't met. +type QueryStrongRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryStrongRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryStrongRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryStrongRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryStrongRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryStrongRequestValidationError) ErrorName() string { + return "QueryStrongRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryStrongRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryStrongRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryStrongRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryStrongRequestValidationError{} + +// Validate checks the field values on QueryStrongReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *QueryStrongReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryStrongReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryStrongReplyMultiError, or nil if none found. +func (m *QueryStrongReply) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryStrongReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Stock + + if len(errors) > 0 { + return QueryStrongReplyMultiError(errors) + } + + return nil +} + +// QueryStrongReplyMultiError is an error wrapping multiple validation errors +// returned by QueryStrongReply.ValidateAll() if the designated constraints +// aren't met. +type QueryStrongReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryStrongReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryStrongReplyMultiError) AllErrors() []error { return m } + +// QueryStrongReplyValidationError is the validation error returned by +// QueryStrongReply.Validate if the designated constraints aren't met. +type QueryStrongReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryStrongReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryStrongReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryStrongReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryStrongReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryStrongReplyValidationError) ErrorName() string { return "QueryStrongReplyValidationError" } + +// Error satisfies the builtin error interface +func (e QueryStrongReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryStrongReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryStrongReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryStrongReplyValidationError{} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.proto b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.proto new file mode 100644 index 0000000..643f4df --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong.proto @@ -0,0 +1,81 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "tagger/tagger.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +// Default settings for generating swagger documents +// NOTE: because json does not support 64 bits, the int64 and uint64 types under *.swagger.json are automatically converted to string types +// Reference https://github.com/grpc-ecosystem/grpc-gateway/blob/db7fbefff7c04877cdb32e16d4a248a024428207/examples/internal/proto/examplepb/a_bit_of_everything.proto +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + host: "localhost:8080" + base_path: "" + info: { + title: "stock api docs"; + version: "2.0"; + } + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; + security_definitions: { + security: { + key: "BearerAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + description: "Type Bearer your-jwt-token to Value"; + } + } + } +}; + +service strong{ + // 更新数据,DB和缓存强一致性 + rpc Update(UpdateStrongRequest) returns (UpdateStrongRequestReply) { + option (google.api.http) = { + put: "/api/v1/stock/{id}/strong" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "更新数据", + description: "更新数据,DB和缓存强一致性", + tags: "case 3: 强一致性" + }; + } + + // 查询 + rpc Query(QueryStrongRequest) returns (QueryStrongReply) { + option (google.api.http) = { + get: "/api/v1/stock/{id}/strong" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "查询数据", + description: "查询数据", + tags: "case 3: 强一致性" + }; + } +} + +message UpdateStrongRequest { + uint64 id = 1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; + uint32 stock = 2 [(validate.rules).uint32.gt = 0]; // 库存数量 +} + +message UpdateStrongRequestReply { + +} + +message QueryStrongRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; +} + +message QueryStrongReply { + uint32 stock = 1; // 库存数量 +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong_grpc.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong_grpc.pb.go new file mode 100644 index 0000000..2a478d4 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong_grpc.pb.go @@ -0,0 +1,150 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.2 +// source: api/stock/v1/strong.proto + +package v1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Strong_Update_FullMethodName = "/api.stock.v1.strong/Update" + Strong_Query_FullMethodName = "/api.stock.v1.strong/Query" +) + +// StrongClient is the client API for Strong service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type StrongClient interface { + // 更新数据,DB和缓存强一致性 + Update(ctx context.Context, in *UpdateStrongRequest, opts ...grpc.CallOption) (*UpdateStrongRequestReply, error) + // 查询 + Query(ctx context.Context, in *QueryStrongRequest, opts ...grpc.CallOption) (*QueryStrongReply, error) +} + +type strongClient struct { + cc grpc.ClientConnInterface +} + +func NewStrongClient(cc grpc.ClientConnInterface) StrongClient { + return &strongClient{cc} +} + +func (c *strongClient) Update(ctx context.Context, in *UpdateStrongRequest, opts ...grpc.CallOption) (*UpdateStrongRequestReply, error) { + out := new(UpdateStrongRequestReply) + err := c.cc.Invoke(ctx, Strong_Update_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *strongClient) Query(ctx context.Context, in *QueryStrongRequest, opts ...grpc.CallOption) (*QueryStrongReply, error) { + out := new(QueryStrongReply) + err := c.cc.Invoke(ctx, Strong_Query_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// StrongServer is the server API for Strong service. +// All implementations must embed UnimplementedStrongServer +// for forward compatibility +type StrongServer interface { + // 更新数据,DB和缓存强一致性 + Update(context.Context, *UpdateStrongRequest) (*UpdateStrongRequestReply, error) + // 查询 + Query(context.Context, *QueryStrongRequest) (*QueryStrongReply, error) + mustEmbedUnimplementedStrongServer() +} + +// UnimplementedStrongServer must be embedded to have forward compatible implementations. +type UnimplementedStrongServer struct { +} + +func (UnimplementedStrongServer) Update(context.Context, *UpdateStrongRequest) (*UpdateStrongRequestReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Update not implemented") +} +func (UnimplementedStrongServer) Query(context.Context, *QueryStrongRequest) (*QueryStrongReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Query not implemented") +} +func (UnimplementedStrongServer) mustEmbedUnimplementedStrongServer() {} + +// UnsafeStrongServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to StrongServer will +// result in compilation errors. +type UnsafeStrongServer interface { + mustEmbedUnimplementedStrongServer() +} + +func RegisterStrongServer(s grpc.ServiceRegistrar, srv StrongServer) { + s.RegisterService(&Strong_ServiceDesc, srv) +} + +func _Strong_Update_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateStrongRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StrongServer).Update(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Strong_Update_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StrongServer).Update(ctx, req.(*UpdateStrongRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Strong_Query_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryStrongRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StrongServer).Query(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Strong_Query_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StrongServer).Query(ctx, req.(*QueryStrongRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Strong_ServiceDesc is the grpc.ServiceDesc for Strong service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Strong_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.stock.v1.strong", + HandlerType: (*StrongServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Update", + Handler: _Strong_Update_Handler, + }, + { + MethodName: "Query", + Handler: _Strong_Query_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "api/stock/v1/strong.proto", +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong_router.pb.go b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong_router.pb.go new file mode 100644 index 0000000..210deb2 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/stock/v1/strong_router.pb.go @@ -0,0 +1,221 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type StrongLogicer interface { + Update(ctx context.Context, req *UpdateStrongRequest) (*UpdateStrongRequestReply, error) + Query(ctx context.Context, req *QueryStrongRequest) (*QueryStrongReply, error) +} + +type StrongOption func(*strongOptions) + +type strongOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *strongOptions) apply(opts ...StrongOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithStrongHTTPResponse() StrongOption { + return func(o *strongOptions) { + o.isFromRPC = false + } +} + +func WithStrongRPCResponse() StrongOption { + return func(o *strongOptions) { + o.isFromRPC = true + } +} + +func WithStrongResponser(responser errcode.Responser) StrongOption { + return func(o *strongOptions) { + o.responser = responser + } +} + +func WithStrongLogger(zapLog *zap.Logger) StrongOption { + return func(o *strongOptions) { + o.zapLog = zapLog + } +} + +func WithStrongErrorToHTTPCode(e ...*errcode.Error) StrongOption { + return func(o *strongOptions) { + o.httpErrors = e + } +} + +func WithStrongRPCStatusToHTTPCode(s ...*errcode.RPCStatus) StrongOption { + return func(o *strongOptions) { + o.rpcStatus = s + } +} + +func WithStrongWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) StrongOption { + return func(o *strongOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterStrongRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic StrongLogicer, + opts ...StrongOption) { + + o := &strongOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &strongRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type strongRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic StrongLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *strongRouter) register() { + r.iRouter.Handle("PUT", "/api/v1/stock/:id/strong", r.withMiddleware("PUT", "/api/v1/stock/:id/strong", r.Update_6)...) + r.iRouter.Handle("GET", "/api/v1/stock/:id/strong", r.withMiddleware("GET", "/api/v1/stock/:id/strong", r.Query_6)...) + +} + +func (r *strongRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *strongRouter) Update_6(c *gin.Context) { + req := &UpdateStrongRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Update(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *strongRouter) Query_6(c *gin.Context) { + req := &QueryStrongRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Query(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/types/types.pb.go b/_13_sponge-dtm-cache/grpc+http/api/types/types.pb.go new file mode 100644 index 0000000..5e41b19 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/types/types.pb.go @@ -0,0 +1,327 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/types/types.proto + +package types + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Params struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Page int32 `protobuf:"varint,1,opt,name=page,proto3" json:"page"` // page number, starting from 0 + Limit int32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit"` // number per page + Sort string `protobuf:"bytes,3,opt,name=sort,proto3" json:"sort"` // sorted fields, multi-column sorting separated by commas + Columns []*Column `protobuf:"bytes,4,rep,name=columns,proto3" json:"columns"` // query conditions +} + +func (x *Params) Reset() { + *x = Params{} + if protoimpl.UnsafeEnabled { + mi := &file_api_types_types_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Params) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Params) ProtoMessage() {} + +func (x *Params) ProtoReflect() protoreflect.Message { + mi := &file_api_types_types_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Params.ProtoReflect.Descriptor instead. +func (*Params) Descriptor() ([]byte, []int) { + return file_api_types_types_proto_rawDescGZIP(), []int{0} +} + +func (x *Params) GetPage() int32 { + if x != nil { + return x.Page + } + return 0 +} + +func (x *Params) GetLimit() int32 { + if x != nil { + return x.Limit + } + return 0 +} + +func (x *Params) GetSort() string { + if x != nil { + return x.Sort + } + return "" +} + +func (x *Params) GetColumns() []*Column { + if x != nil { + return x.Columns + } + return nil +} + +type Column struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name"` // column name + Exp string `protobuf:"bytes,2,opt,name=exp,proto3" json:"exp"` // expressions, which default to = when the value is null, have =, !=, >, >=, <, <=, like, in + Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value"` // column value + Logic string `protobuf:"bytes,4,opt,name=logic,proto3" json:"logic"` // logical type, defaults to and when value is null, only &(and), ||(or) +} + +func (x *Column) Reset() { + *x = Column{} + if protoimpl.UnsafeEnabled { + mi := &file_api_types_types_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Column) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Column) ProtoMessage() {} + +func (x *Column) ProtoReflect() protoreflect.Message { + mi := &file_api_types_types_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Column.ProtoReflect.Descriptor instead. +func (*Column) Descriptor() ([]byte, []int) { + return file_api_types_types_proto_rawDescGZIP(), []int{1} +} + +func (x *Column) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Column) GetExp() string { + if x != nil { + return x.Exp + } + return "" +} + +func (x *Column) GetValue() string { + if x != nil { + return x.Value + } + return "" +} + +func (x *Column) GetLogic() string { + if x != nil { + return x.Logic + } + return "" +} + +type Conditions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Columns []*Column `protobuf:"bytes,1,rep,name=columns,proto3" json:"columns"` // query conditions +} + +func (x *Conditions) Reset() { + *x = Conditions{} + if protoimpl.UnsafeEnabled { + mi := &file_api_types_types_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Conditions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Conditions) ProtoMessage() {} + +func (x *Conditions) ProtoReflect() protoreflect.Message { + mi := &file_api_types_types_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Conditions.ProtoReflect.Descriptor instead. +func (*Conditions) Descriptor() ([]byte, []int) { + return file_api_types_types_proto_rawDescGZIP(), []int{2} +} + +func (x *Conditions) GetColumns() []*Column { + if x != nil { + return x.Columns + } + return nil +} + +var File_api_types_types_proto protoreflect.FileDescriptor + +var file_api_types_types_proto_rawDesc = []byte{ + 0x0a, 0x15, 0x61, 0x70, 0x69, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x74, 0x79, 0x70, 0x65, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x09, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x73, 0x22, 0x73, 0x0a, 0x06, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x12, 0x0a, 0x04, + 0x70, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, + 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, + 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x6f, 0x72, 0x74, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x73, 0x6f, 0x72, 0x74, 0x12, 0x2b, 0x0a, 0x07, 0x63, 0x6f, + 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, + 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x52, 0x07, + 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x22, 0x5a, 0x0a, 0x06, 0x43, 0x6f, 0x6c, 0x75, 0x6d, + 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x78, 0x70, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x65, 0x78, 0x70, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x14, 0x0a, + 0x05, 0x6c, 0x6f, 0x67, 0x69, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x6f, + 0x67, 0x69, 0x63, 0x22, 0x39, 0x0a, 0x0a, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x12, 0x2b, 0x0a, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x43, + 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x42, 0x17, + 0x5a, 0x15, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x74, 0x79, 0x70, 0x65, + 0x73, 0x3b, 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_types_types_proto_rawDescOnce sync.Once + file_api_types_types_proto_rawDescData = file_api_types_types_proto_rawDesc +) + +func file_api_types_types_proto_rawDescGZIP() []byte { + file_api_types_types_proto_rawDescOnce.Do(func() { + file_api_types_types_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_types_types_proto_rawDescData) + }) + return file_api_types_types_proto_rawDescData +} + +var file_api_types_types_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_api_types_types_proto_goTypes = []interface{}{ + (*Params)(nil), // 0: api.types.Params + (*Column)(nil), // 1: api.types.Column + (*Conditions)(nil), // 2: api.types.Conditions +} +var file_api_types_types_proto_depIdxs = []int32{ + 1, // 0: api.types.Params.columns:type_name -> api.types.Column + 1, // 1: api.types.Conditions.columns:type_name -> api.types.Column + 2, // [2:2] is the sub-list for method output_type + 2, // [2:2] is the sub-list for method input_type + 2, // [2:2] is the sub-list for extension type_name + 2, // [2:2] is the sub-list for extension extendee + 0, // [0:2] is the sub-list for field type_name +} + +func init() { file_api_types_types_proto_init() } +func file_api_types_types_proto_init() { + if File_api_types_types_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_types_types_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Params); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_types_types_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Column); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_types_types_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Conditions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_types_types_proto_rawDesc, + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_api_types_types_proto_goTypes, + DependencyIndexes: file_api_types_types_proto_depIdxs, + MessageInfos: file_api_types_types_proto_msgTypes, + }.Build() + File_api_types_types_proto = out.File + file_api_types_types_proto_rawDesc = nil + file_api_types_types_proto_goTypes = nil + file_api_types_types_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/api/types/types.pb.validate.go b/_13_sponge-dtm-cache/grpc+http/api/types/types.pb.validate.go new file mode 100644 index 0000000..3ca7a60 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/types/types.pb.validate.go @@ -0,0 +1,413 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/types/types.proto + +package types + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on Params with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *Params) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on Params with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in ParamsMultiError, or nil if none found. +func (m *Params) ValidateAll() error { + return m.validate(true) +} + +func (m *Params) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Page + + // no validation rules for Limit + + // no validation rules for Sort + + for idx, item := range m.GetColumns() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ParamsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ParamsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ParamsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ParamsMultiError(errors) + } + + return nil +} + +// ParamsMultiError is an error wrapping multiple validation errors returned by +// Params.ValidateAll() if the designated constraints aren't met. +type ParamsMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ParamsMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ParamsMultiError) AllErrors() []error { return m } + +// ParamsValidationError is the validation error returned by Params.Validate if +// the designated constraints aren't met. +type ParamsValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ParamsValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ParamsValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ParamsValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ParamsValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ParamsValidationError) ErrorName() string { return "ParamsValidationError" } + +// Error satisfies the builtin error interface +func (e ParamsValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sParams.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ParamsValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ParamsValidationError{} + +// Validate checks the field values on Column with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *Column) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on Column with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in ColumnMultiError, or nil if none found. +func (m *Column) ValidateAll() error { + return m.validate(true) +} + +func (m *Column) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Exp + + // no validation rules for Value + + // no validation rules for Logic + + if len(errors) > 0 { + return ColumnMultiError(errors) + } + + return nil +} + +// ColumnMultiError is an error wrapping multiple validation errors returned by +// Column.ValidateAll() if the designated constraints aren't met. +type ColumnMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ColumnMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ColumnMultiError) AllErrors() []error { return m } + +// ColumnValidationError is the validation error returned by Column.Validate if +// the designated constraints aren't met. +type ColumnValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ColumnValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ColumnValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ColumnValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ColumnValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ColumnValidationError) ErrorName() string { return "ColumnValidationError" } + +// Error satisfies the builtin error interface +func (e ColumnValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sColumn.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ColumnValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ColumnValidationError{} + +// Validate checks the field values on Conditions with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *Conditions) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on Conditions with the rules defined in +// the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in ConditionsMultiError, or +// nil if none found. +func (m *Conditions) ValidateAll() error { + return m.validate(true) +} + +func (m *Conditions) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + for idx, item := range m.GetColumns() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ConditionsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ConditionsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ConditionsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ConditionsMultiError(errors) + } + + return nil +} + +// ConditionsMultiError is an error wrapping multiple validation errors +// returned by Conditions.ValidateAll() if the designated constraints aren't met. +type ConditionsMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ConditionsMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ConditionsMultiError) AllErrors() []error { return m } + +// ConditionsValidationError is the validation error returned by +// Conditions.Validate if the designated constraints aren't met. +type ConditionsValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ConditionsValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ConditionsValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ConditionsValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ConditionsValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ConditionsValidationError) ErrorName() string { return "ConditionsValidationError" } + +// Error satisfies the builtin error interface +func (e ConditionsValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sConditions.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ConditionsValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ConditionsValidationError{} diff --git a/_13_sponge-dtm-cache/grpc+http/api/types/types.proto b/_13_sponge-dtm-cache/grpc+http/api/types/types.proto new file mode 100644 index 0000000..1306a7f --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/api/types/types.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package api.types; + +option go_package = "stock/api/types;types"; + +message Params { + int32 page = 1; // page number, starting from 0 + int32 limit = 2; // number per page + string sort = 3; // sorted fields, multi-column sorting separated by commas + repeated Column columns = 4; // query conditions +} + +message Column { + string name = 1; // column name + string exp = 2; // expressions, which default to = when the value is null, have =, !=, >, >=, <, <=, like, in + string value = 3; // column value + string logic = 4; // logical type, defaults to and when value is null, only &(and), ||(or) +} + +message Conditions { + repeated Column columns = 1; // query conditions +} diff --git a/_13_sponge-dtm-cache/grpc+http/cmd/stock/initial/close.go b/_13_sponge-dtm-cache/grpc+http/cmd/stock/initial/close.go new file mode 100644 index 0000000..9576fee --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/cmd/stock/initial/close.go @@ -0,0 +1,48 @@ +package initial + +import ( + "context" + "time" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/tracer" + + "stock/internal/config" + "stock/internal/model" + "stock/pkg/goredis" +) + +// Close releasing resources after service exit +func Close(servers []app.IServer) []app.Close { + var closes []app.Close + + // close server + for _, s := range servers { + closes = append(closes, s.Stop) + } + + // close database + closes = append(closes, func() error { + return model.CloseDB() + }) + + // close redis + if config.Get().App.CacheType == "redis" { + closes = append(closes, func() error { + return model.CloseRedis() + }) + } + closes = append(closes, func() error { + return goredis.CloseRedis() + }) + + // close tracing + if config.Get().App.EnableTrace { + closes = append(closes, func() error { + ctx, _ := context.WithTimeout(context.Background(), 2*time.Second) //nolint + return tracer.Close(ctx) + }) + } + + return closes +} diff --git a/a_micro-grpc-http-protobuf/cmd/user/initial/createService.go b/_13_sponge-dtm-cache/grpc+http/cmd/stock/initial/createService.go similarity index 98% rename from a_micro-grpc-http-protobuf/cmd/user/initial/createService.go rename to _13_sponge-dtm-cache/grpc+http/cmd/stock/initial/createService.go index f06d162..dd26d74 100644 --- a/a_micro-grpc-http-protobuf/cmd/user/initial/createService.go +++ b/_13_sponge-dtm-cache/grpc+http/cmd/stock/initial/createService.go @@ -11,8 +11,8 @@ import ( "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" - "user/internal/config" - "user/internal/server" + "stock/internal/config" + "stock/internal/server" ) // CreateServices create grpc or http service diff --git a/_13_sponge-dtm-cache/grpc+http/cmd/stock/initial/initApp.go b/_13_sponge-dtm-cache/grpc+http/cmd/stock/initial/initApp.go new file mode 100644 index 0000000..b292f69 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/cmd/stock/initial/initApp.go @@ -0,0 +1,139 @@ +// Package initial is the package that starts the service to initialize the service, including +// the initialization configuration, service configuration, connecting to the database, and +// resource release needed when shutting down the service. +package initial + +import ( + "flag" + "fmt" + "strconv" + + "github.com/jinzhu/copier" + + "github.com/zhufuyi/sponge/pkg/conf" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/nacoscli" + "github.com/zhufuyi/sponge/pkg/stat" + "github.com/zhufuyi/sponge/pkg/tracer" + + "stock/configs" + "stock/internal/config" + "stock/internal/model" + "stock/internal/rpcclient" + "stock/pkg/goredis" +) + +var ( + version string + configFile string + enableConfigCenter bool +) + +// InitApp initial app configuration +func InitApp() { + initConfig() + cfg := config.Get() + + // initializing log + _, err := logger.Init( + logger.WithLevel(cfg.Logger.Level), + logger.WithFormat(cfg.Logger.Format), + logger.WithSave( + cfg.Logger.IsSave, + //logger.WithFileName(cfg.Logger.LogFileConfig.Filename), + //logger.WithFileMaxSize(cfg.Logger.LogFileConfig.MaxSize), + //logger.WithFileMaxBackups(cfg.Logger.LogFileConfig.MaxBackups), + //logger.WithFileMaxAge(cfg.Logger.LogFileConfig.MaxAge), + //logger.WithFileIsCompression(cfg.Logger.LogFileConfig.IsCompression), + ), + ) + if err != nil { + panic(err) + } + logger.Debug(config.Show()) + logger.Info("[logger] was initialized") + + // initializing tracing + if cfg.App.EnableTrace { + tracer.InitWithConfig( + cfg.App.Name, + cfg.App.Env, + cfg.App.Version, + cfg.Jaeger.AgentHost, + strconv.Itoa(cfg.Jaeger.AgentPort), + cfg.App.TracingSamplingRate, + ) + logger.Info("[tracer] was initialized") + } + + // initializing the print system and process resources + if cfg.App.EnableStat { + stat.Init( + stat.WithLog(logger.Get()), + stat.WithAlarm(), // invalid if it is windows, the default threshold for cpu and memory is 0.8, you can modify them + ) + logger.Info("[resource statistics] was initialized") + } + + // initializing database + model.InitDB() + logger.Infof("[%s] was initialized", cfg.Database.Driver) + model.InitCache(cfg.App.CacheType) + if cfg.App.CacheType != "" { + logger.Infof("[%s] was initialized", cfg.App.CacheType) + } + err = goredis.Init(cfg.Redis.Dsn) + if err != nil { + panic(err) + } + + rpcclient.InitEndpointsForDtm() +} + +func initConfig() { + flag.StringVar(&version, "version", "", "service Version Number") + flag.BoolVar(&enableConfigCenter, "enable-cc", false, "whether to get from the configuration center, "+ + "if true, the '-c' parameter indicates the configuration center") + flag.StringVar(&configFile, "c", "", "configuration file") + flag.Parse() + + if enableConfigCenter { + // get the configuration from the configuration center (first get the nacos configuration, + // then read the service configuration according to the nacos configuration center) + if configFile == "" { + configFile = configs.Path("stock_cc.yml") + } + nacosConfig, err := config.NewCenter(configFile) + if err != nil { + panic(err) + } + appConfig := &config.Config{} + params := &nacoscli.Params{} + _ = copier.Copy(params, &nacosConfig.Nacos) + format, data, err := nacoscli.GetConfig(params) + if err != nil { + panic(fmt.Sprintf("connect to configuration center err, %v", err)) + } + err = conf.ParseConfigData(data, format, appConfig) + if err != nil { + panic(fmt.Sprintf("parse configuration data err, %v", err)) + } + if appConfig.App.Name == "" { + panic("read the config from center error, config data is empty") + } + config.Set(appConfig) + } else { + // get configuration from local configuration file + if configFile == "" { + configFile = configs.Path("stock.yml") + } + err := config.Init(configFile) + if err != nil { + panic("init config error: " + err.Error()) + } + } + + if version != "" { + config.Get().App.Version = version + } +} diff --git a/a_micro-grpc-http-protobuf/cmd/user/main.go b/_13_sponge-dtm-cache/grpc+http/cmd/stock/main.go similarity index 91% rename from a_micro-grpc-http-protobuf/cmd/user/main.go rename to _13_sponge-dtm-cache/grpc+http/cmd/stock/main.go index 91f1549..0a4c51e 100644 --- a/a_micro-grpc-http-protobuf/cmd/user/main.go +++ b/_13_sponge-dtm-cache/grpc+http/cmd/stock/main.go @@ -4,7 +4,7 @@ package main import ( "github.com/zhufuyi/sponge/pkg/app" - "user/cmd/user/initial" + "stock/cmd/stock/initial" ) func main() { diff --git a/_13_sponge-dtm-cache/grpc+http/configs/location.go b/_13_sponge-dtm-cache/grpc+http/configs/location.go new file mode 100644 index 0000000..6b610a6 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/configs/location.go @@ -0,0 +1,23 @@ +// Package configs used to locate config file. +package configs + +import ( + "path/filepath" + "runtime" +) + +var basePath string + +func init() { + _, currentFile, _, _ := runtime.Caller(0) //nolint + basePath = filepath.Dir(currentFile) +} + +// Path return absolute path +func Path(rel string) string { + if filepath.IsAbs(rel) { + return rel + } + + return filepath.Join(basePath, rel) +} diff --git a/_13_sponge-dtm-cache/grpc+http/configs/stock.yml b/_13_sponge-dtm-cache/grpc+http/configs/stock.yml new file mode 100644 index 0000000..c07b5cd --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/configs/stock.yml @@ -0,0 +1,128 @@ +# Generate the go struct command: sponge config --server-dir=./serverDir + +# app settings +app: + name: "stock" # server name + env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment + version: "v0.0.0" + host: "127.0.0.1" # domain or ip, for service registration + enableStat: true # whether to turn on printing statistics, true:enable, false:disable + enableMetrics: true # whether to turn on indicator collection, true:enable, false:disable + enableHTTPProfile: false # whether to turn on performance analysis, true:enable, false:disable + enableLimit: false # whether to turn on rate limiting (adaptive), true:on, false:off + enableCircuitBreaker: false # whether to turn on circuit breaker(adaptive), true:on, false:off + enableTrace: false # whether to turn on trace, true:enable, false:disable, if true jaeger configuration must be set + tracingSamplingRate: 1.0 # tracing sampling rate, between 0 and 1, 0 means no sampling, 1 means sampling all links + registryDiscoveryType: "" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used + cacheType: "" # cache type, if empty, the cache is not used, support for "memory" and "redis", if set to redis, must set redis configuration + + +# http server settings +http: + port: 8080 # listen port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, if enableHTTPProfile is true, it needs to set 0 or greater than 60s + + +# grpc server settings +grpc: + port: 8282 # listen port + httpPort: 8283 # profile and metrics ports + enableToken: false # whether to enable server-side token authentication, default appID=grpc, appKey=123456 + # serverSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'certFile' and 'keyFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + serverSecure: + type: "" # secures type, "", "one-way", "two-way" + caFile: "" # ca certificate file, valid only in "two-way", absolute path + certFile: "" # server side cert file, absolute path + keyFile: "" # server side key file, absolute path + + +# grpc client-side settings, support for setting up multiple grpc clients. +grpcClient: + - name: "dtmservice" # grpc service name, used for service discovery + host: "192.168.3.37" # grpc service address, used for direct connection + port: 35790 # grpc service port + registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port + enableLoadBalance: true + - name: "stock" # grpc service name, used for service discovery + host: "192.168.3.90" # dtm回调stock服务地址 + port: 8282 # grpc service port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, valid only for unary grpc type + registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port + enableLoadBalance: true # whether to turn on the load balancer + # clientSecure parameter setting + # if type="", it means no secure connection, no need to fill in any parameters + # if type="one-way", it means server-side certification, only the fields 'serverName' and 'certFile' should be filled in + # if type="two-way", it means both client and server side certification, fill in all fields + clientSecure: + type: "" # secures type, "", "one-way", "two-way" + serverName: "" # server name, e.g. *.foo.com + caFile: "" # client side ca file, valid only in "two-way", absolute path + certFile: "" # client side cert file, absolute path, if secureType="one-way", fill in server side cert file here + keyFile: "" # client side key file, valid only in "two-way", absolute path + clientToken: + enable: false # whether to enable token authentication + appID: "" # app id + appKey: "" # app key + + + +# logger settings +logger: + level: "info" # output log levels debug, info, warn, error, default is debug + format: "console" # output format, console or json, default is console + isSave: false # false:output to terminal, true:output to file, default is false + #logFileConfig: # Effective when isSave=true + #filename: "out.log" # File name (default is out.log) + #maxSize: 20 # Maximum file size (MB, default is 10MB) + #maxBackups: 50 # Maximum number of old files to retain (default is 100) + #maxAge: 15 # Maximum number of days to retain old files (default is 30 days) + #isCompression: true # Whether to compress/archive old files (default is false) + + +# set database configuration. Reference: https://github.com/zhufuyi/sponge/blob/main/configs/serverNameExample.yml#L87 +database: + driver: "mysql" # database driver + # mysql settings + mysql: + # dsn format, :@(:)/?[k=v& ......] + dsn: "root:123456@(192.168.3.37:3306)/eshop_stock?parseTime=true&loc=Local&charset=utf8,utf8mb4" + enableLog: true # whether to turn on printing of all logs + maxIdleConns: 10 # set the maximum number of connections in the idle connection pool + maxOpenConns: 100 # set the maximum number of open database connections + connMaxLifetime: 30 # sets the maximum time for which the connection can be reused, in minutes + + + +# redis settings +redis: + # dsn format, [user]:@127.0.0.1:6379/[db], the default user is default, redis version 6.0 and above only supports user. + dsn: "default:123456@192.168.3.37:6379/0" + dialTimeout: 10 # connection timeout, unit(second) + readTimeout: 2 # read timeout, unit(second) + writeTimeout: 2 # write timeout, unit(second) + + +# jaeger settings +jaeger: + agentHost: "192.168.3.37" + agentPort: 6831 + + +# consul settings +consul: + addr: "192.168.3.37:8500" + + +# etcd settings +etcd: + addrs: ["192.168.3.37:2379"] + + +# nacos settings, used in service registration discovery +nacosRd: + ipAddr: "192.168.3.37" + port: 8848 + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id diff --git a/a_micro-grpc-http-protobuf/configs/user_cc.yml b/_13_sponge-dtm-cache/grpc+http/configs/stock_cc.yml similarity index 92% rename from a_micro-grpc-http-protobuf/configs/user_cc.yml rename to _13_sponge-dtm-cache/grpc+http/configs/stock_cc.yml index 48d391d..6a97506 100644 --- a/a_micro-grpc-http-protobuf/configs/user_cc.yml +++ b/_13_sponge-dtm-cache/grpc+http/configs/stock_cc.yml @@ -9,5 +9,5 @@ nacos: contextPath: "/nacos" # path namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id group: "dev" # group name: dev, prod, test - dataID: "user.yml" # config file id + dataID: "stock.yml" # config file id format: "yaml" # configuration file type: json,yaml,toml diff --git a/a_micro-grpc-http-protobuf/deployments/binary/README.md b/_13_sponge-dtm-cache/grpc+http/deployments/binary/README.md similarity index 87% rename from a_micro-grpc-http-protobuf/deployments/binary/README.md rename to _13_sponge-dtm-cache/grpc+http/deployments/binary/README.md index a78e4b6..335cec0 100644 --- a/a_micro-grpc-http-protobuf/deployments/binary/README.md +++ b/_13_sponge-dtm-cache/grpc+http/deployments/binary/README.md @@ -3,8 +3,8 @@ copy the configuration file to the configs directory and binary file before star ``` ├── configs -│ └── user.yml -├── user +│ └── stock.yml +├── stock ├── deploy.sh └── run.sh ``` diff --git a/a_micro-grpc-http-protobuf/deployments/binary/deploy.sh b/_13_sponge-dtm-cache/grpc+http/deployments/binary/deploy.sh similarity index 97% rename from a_micro-grpc-http-protobuf/deployments/binary/deploy.sh rename to _13_sponge-dtm-cache/grpc+http/deployments/binary/deploy.sh index 9c9e6a3..f2d897e 100644 --- a/a_micro-grpc-http-protobuf/deployments/binary/deploy.sh +++ b/_13_sponge-dtm-cache/grpc+http/deployments/binary/deploy.sh @@ -1,6 +1,6 @@ #!/bin/bash -serviceName="user" +serviceName="stock" function checkResult() { result=$1 diff --git a/a_micro-grpc-http-protobuf/deployments/binary/run.sh b/_13_sponge-dtm-cache/grpc+http/deployments/binary/run.sh similarity index 97% rename from a_micro-grpc-http-protobuf/deployments/binary/run.sh rename to _13_sponge-dtm-cache/grpc+http/deployments/binary/run.sh index 3cbeeda..f2f3ba9 100644 --- a/a_micro-grpc-http-protobuf/deployments/binary/run.sh +++ b/_13_sponge-dtm-cache/grpc+http/deployments/binary/run.sh @@ -1,6 +1,6 @@ #!/bin/bash -serviceName="user" +serviceName="stock" cmdStr="./${serviceName} -c configs/${serviceName}.yml" chmod +x ./${serviceName} diff --git a/a_micro-grpc-http-protobuf/deployments/docker-compose/README.md b/_13_sponge-dtm-cache/grpc+http/deployments/docker-compose/README.md similarity index 84% rename from a_micro-grpc-http-protobuf/deployments/docker-compose/README.md rename to _13_sponge-dtm-cache/grpc+http/deployments/docker-compose/README.md index 30a58af..f76e87d 100644 --- a/a_micro-grpc-http-protobuf/deployments/docker-compose/README.md +++ b/_13_sponge-dtm-cache/grpc+http/deployments/docker-compose/README.md @@ -3,7 +3,7 @@ copy the configuration file to the configs directory before starting the service ``` ├── configs -│ └── user.yml +│ └── stock.yml └── docker-compose.yml ``` diff --git a/a_micro-grpc-http-protobuf/deployments/docker-compose/docker-compose.yml b/_13_sponge-dtm-cache/grpc+http/deployments/docker-compose/docker-compose.yml similarity index 82% rename from a_micro-grpc-http-protobuf/deployments/docker-compose/docker-compose.yml rename to _13_sponge-dtm-cache/grpc+http/deployments/docker-compose/docker-compose.yml index e7171e6..fed47dd 100644 --- a/a_micro-grpc-http-protobuf/deployments/docker-compose/docker-compose.yml +++ b/_13_sponge-dtm-cache/grpc+http/deployments/docker-compose/docker-compose.yml @@ -1,11 +1,11 @@ version: "3.7" services: - user: - image: edusys/user:latest - container_name: user + stock: + image: eshop/stock:latest + container_name: stock restart: always - command: ["./user", "-c", "/app/configs/user.yml"] + command: ["./stock", "-c", "/app/configs/stock.yml"] volumes: - $PWD/configs:/app/configs diff --git a/a_micro-grpc-http-protobuf/deployments/kubernetes/README.md b/_13_sponge-dtm-cache/grpc+http/deployments/kubernetes/README.md similarity index 84% rename from a_micro-grpc-http-protobuf/deployments/kubernetes/README.md rename to _13_sponge-dtm-cache/grpc+http/deployments/kubernetes/README.md index 09f6a2e..b4ddaa8 100644 --- a/a_micro-grpc-http-protobuf/deployments/kubernetes/README.md +++ b/_13_sponge-dtm-cache/grpc+http/deployments/kubernetes/README.md @@ -20,7 +20,7 @@ kubectl apply -f ./ view the start-up status. -> kubectl get all -n edusys +> kubectl get all -n eshop
@@ -28,5 +28,5 @@ simple test of http port ```bash # mapping to the http port of the service on the local port -kubectl port-forward --address=0.0.0.0 service/ 8080:8080 -n +kubectl port-forward --address=0.0.0.0 service/ 8080:8080 -n ``` diff --git a/_13_sponge-dtm-cache/grpc+http/deployments/kubernetes/eshop-namespace.yml b/_13_sponge-dtm-cache/grpc+http/deployments/kubernetes/eshop-namespace.yml new file mode 100644 index 0000000..eba474f --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/deployments/kubernetes/eshop-namespace.yml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: eshop diff --git a/a_micro-grpc-http-protobuf/deployments/kubernetes/user-configmap.yml b/_13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-configmap.yml similarity index 98% rename from a_micro-grpc-http-protobuf/deployments/kubernetes/user-configmap.yml rename to _13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-configmap.yml index 3a8b129..0bea4a1 100644 --- a/a_micro-grpc-http-protobuf/deployments/kubernetes/user-configmap.yml +++ b/_13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-configmap.yml @@ -1,15 +1,15 @@ kind: ConfigMap apiVersion: v1 metadata: - name: user-config - namespace: edusys + name: stock-config + namespace: eshop data: - user.yml: |- + stock.yml: |- # Generate the go struct command: sponge config --server-dir=./serverDir # app settings app: - name: "user" # server name + name: "stock" # server name env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment version: "v0.0.0" host: "127.0.0.1" # domain or ip, for service registration diff --git a/a_micro-grpc-http-protobuf/deployments/kubernetes/user-deployment.yml b/_13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-deployment.yml similarity index 82% rename from a_micro-grpc-http-protobuf/deployments/kubernetes/user-deployment.yml rename to _13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-deployment.yml index 9e634a0..bcd1e4a 100644 --- a/a_micro-grpc-http-protobuf/deployments/kubernetes/user-deployment.yml +++ b/_13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-deployment.yml @@ -1,25 +1,25 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: user-dm - namespace: edusys + name: stock-dm + namespace: eshop spec: replicas: 1 selector: matchLabels: - app: user + app: stock template: metadata: - name: user-pod + name: stock-pod labels: - app: user + app: stock spec: containers: - - name: user - image: /edusys/user:latest + - name: stock + image: /eshop/stock:latest # If using a local image, use Never, default is Always #imagePullPolicy: Never - command: ["./user", "-c", "/app/configs/user.yml"] + command: ["./stock", "-c", "/app/configs/stock.yml"] resources: requests: cpu: 10m @@ -28,7 +28,7 @@ spec: cpu: 1000m memory: 1000Mi volumeMounts: - - name: user-vl + - name: stock-vl mountPath: /app/configs/ readOnly: true @@ -58,6 +58,6 @@ spec: imagePullSecrets: - name: docker-auth-secret volumes: - - name: user-vl + - name: stock-vl configMap: - name: user-config + name: stock-config diff --git a/a_micro-grpc-http-protobuf/deployments/kubernetes/user-svc.yml b/_13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-svc.yml similarity index 57% rename from a_micro-grpc-http-protobuf/deployments/kubernetes/user-svc.yml rename to _13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-svc.yml index a386090..5844086 100644 --- a/a_micro-grpc-http-protobuf/deployments/kubernetes/user-svc.yml +++ b/_13_sponge-dtm-cache/grpc+http/deployments/kubernetes/stock-svc.yml @@ -1,17 +1,17 @@ apiVersion: v1 kind: Service metadata: - name: user-svc - namespace: edusys + name: stock-svc + namespace: eshop spec: selector: - app: user + app: stock type: ClusterIP ports: - - name: user-svc-grpc-port + - name: stock-svc-grpc-port port: 8282 targetPort: 8282 - - name: user-svc-grpc-metrics-port + - name: stock-svc-grpc-metrics-port port: 8283 targetPort: 8283 diff --git a/_13_sponge-dtm-cache/grpc+http/docs/apis.go b/_13_sponge-dtm-cache/grpc+http/docs/apis.go new file mode 100644 index 0000000..3c749f0 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/docs/apis.go @@ -0,0 +1,21 @@ +package docs + +import ( + "embed" + "fmt" +) + +//go:embed apis.swagger.json +var jsonFile embed.FS + +// ApiDocs swagger json file content +var ApiDocs = []byte(``) + +func init() { + data, err := jsonFile.ReadFile("apis.swagger.json") + if err != nil { + fmt.Printf("\nReadFile error: %v\n\n", err) + return + } + ApiDocs = data +} diff --git a/_13_sponge-dtm-cache/grpc+http/docs/apis.swagger.json b/_13_sponge-dtm-cache/grpc+http/docs/apis.swagger.json new file mode 100644 index 0000000..a66fda3 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/docs/apis.swagger.json @@ -0,0 +1,849 @@ +{ + "swagger": "2.0", + "info": { + "title": "stock api docs", + "version": "2.0" + }, + "host": "localhost:8080", + "schemes": [ + "http", + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/api/v1/stock": { + "post": { + "summary": "create stock", + "description": "submit information to create stock", + "operationId": "stock_Create", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1CreateStockReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v1CreateStockRequest" + } + } + ], + "tags": [ + "stock" + ] + } + }, + "/api/v1/stock/deleteCache": { + "post": { + "summary": "删除缓存", + "operationId": "callback_DeleteCache", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1DeleteCacheReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v1DeleteCacheRequest" + } + } + ], + "tags": [ + "callback" + ] + } + }, + "/api/v1/stock/downgradeBranch": { + "post": { + "summary": "升降级中的强一致性分支", + "description": "升降级中的强一致性分支", + "operationId": "downgrade_DowngradeBranch", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1DowngradeBranchReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v1DowngradeBranchRequest" + } + } + ], + "tags": [ + "case 4: 升降级中的强一致性" + ] + } + }, + "/api/v1/stock/list": { + "post": { + "summary": "list of stocks by parameters", + "description": "list of stocks by paging and conditions", + "operationId": "stock_List", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1ListStockReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v1ListStockRequest" + } + } + ], + "tags": [ + "stock" + ] + } + }, + "/api/v1/stock/queryPrepared": { + "get": { + "summary": "反查数据", + "operationId": "callback_QueryPrepared", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1QueryPreparedReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "tags": [ + "callback" + ] + } + }, + "/api/v1/stock/{id}": { + "get": { + "summary": "get stock detail", + "description": "get stock detail by id", + "operationId": "stock_GetByID", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1GetStockByIDReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "stock" + ] + }, + "delete": { + "summary": "delete stock", + "description": "delete stock by id", + "operationId": "stock_DeleteByID", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1DeleteStockByIDReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "stock" + ] + }, + "put": { + "summary": "update stock", + "description": "update stock by id", + "operationId": "stock_UpdateByID", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1UpdateStockByIDReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "productID": { + "type": "integer", + "format": "uint64" + }, + "stock": { + "type": "integer", + "format": "int64" + } + } + } + } + ], + "tags": [ + "stock" + ] + } + }, + "/api/v1/stock/{id}/atomic": { + "get": { + "summary": "查询数据", + "description": "查询数据", + "operationId": "atomic_Query", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1QueryAtomicReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "case 2: 原子性" + ] + }, + "put": { + "summary": "更新数据", + "description": "更新数据,DB和缓存原子性", + "operationId": "atomic_Update", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1UpdateAtomicRequestReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + } + } + ], + "tags": [ + "case 2: 原子性" + ] + } + }, + "/api/v1/stock/{id}/downgrade": { + "get": { + "summary": "查询数据", + "description": "查询数据", + "operationId": "downgrade_Query", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1QueryDowngradeReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "case 4: 升降级中的强一致性" + ] + }, + "put": { + "summary": "更新数据", + "description": "更新数据,升降级中的DB和缓存强一致性", + "operationId": "downgrade_Update", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1UpdateDowngradeRequestReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + } + } + ], + "tags": [ + "case 4: 升降级中的强一致性" + ] + } + }, + "/api/v1/stock/{id}/final": { + "get": { + "summary": "查询数据", + "description": "查询数据", + "operationId": "final_Query", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1QueryFinalReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "case 1: 最终一致性" + ] + }, + "put": { + "summary": "更新数据", + "description": "更新数据,DB和缓存最终一致性", + "operationId": "final_Update", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1UpdateFinalRequestReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + } + } + ], + "tags": [ + "case 1: 最终一致性" + ] + } + }, + "/api/v1/stock/{id}/strong": { + "get": { + "summary": "查询数据", + "description": "查询数据", + "operationId": "strong_Query", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1QueryStrongReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "case 3: 强一致性" + ] + }, + "put": { + "summary": "更新数据", + "description": "更新数据,DB和缓存强一致性", + "operationId": "strong_Update", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1UpdateStrongRequestReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + } + } + ], + "tags": [ + "case 3: 强一致性" + ] + } + } + }, + "definitions": { + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string" + } + }, + "additionalProperties": {} + }, + "rpcStatus": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/protobufAny" + } + } + } + }, + "typesColumn": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "exp": { + "type": "string" + }, + "value": { + "type": "string" + }, + "logic": { + "type": "string" + } + } + }, + "typesParams": { + "type": "object", + "properties": { + "page": { + "type": "integer", + "format": "int32" + }, + "limit": { + "type": "integer", + "format": "int32" + }, + "sort": { + "type": "string" + }, + "columns": { + "type": "array", + "items": { + "$ref": "#/definitions/typesColumn" + } + } + } + }, + "v1CreateStockReply": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "uint64" + } + } + }, + "v1CreateStockRequest": { + "type": "object", + "properties": { + "productID": { + "type": "integer", + "format": "uint64" + }, + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1DeleteCacheReply": { + "type": "object" + }, + "v1DeleteCacheRequest": { + "type": "object", + "properties": { + "key": { + "type": "string" + } + } + }, + "v1DeleteStockByIDReply": { + "type": "object" + }, + "v1DowngradeBranchReply": { + "type": "object" + }, + "v1DowngradeBranchRequest": { + "type": "object", + "properties": { + "gid": { + "type": "string" + }, + "key": { + "type": "string" + }, + "id": { + "type": "integer", + "format": "uint64" + }, + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1GetStockByIDReply": { + "type": "object", + "properties": { + "stock": { + "$ref": "#/definitions/v1Stock" + } + } + }, + "v1ListStockReply": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "stocks": { + "type": "array", + "items": { + "$ref": "#/definitions/v1Stock" + } + } + } + }, + "v1ListStockRequest": { + "type": "object", + "properties": { + "params": { + "$ref": "#/definitions/typesParams" + } + } + }, + "v1QueryAtomicReply": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1QueryDowngradeReply": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "uint64" + }, + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1QueryFinalReply": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1QueryPreparedReply": { + "type": "object" + }, + "v1QueryStrongReply": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1Stock": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "uint64" + }, + "productID": { + "type": "integer", + "format": "uint64" + }, + "stock": { + "type": "integer", + "format": "int64" + }, + "createdAt": { + "type": "string" + }, + "updatedAt": { + "type": "string" + } + } + }, + "v1UpdateAtomicRequestReply": { + "type": "object" + }, + "v1UpdateDowngradeRequestReply": { + "type": "object" + }, + "v1UpdateFinalRequestReply": { + "type": "object" + }, + "v1UpdateStockByIDReply": { + "type": "object" + }, + "v1UpdateStrongRequestReply": { + "type": "object" + } + }, + "securityDefinitions": { + "BearerAuth": { + "type": "apiKey", + "description": "Type Bearer your-jwt-token to Value", + "name": "Authorization", + "in": "header" + } + } +} diff --git a/_13_sponge-dtm-cache/grpc+http/docs/gen.info b/_13_sponge-dtm-cache/grpc+http/docs/gen.info new file mode 100644 index 0000000..d34edd3 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/docs/gen.info @@ -0,0 +1 @@ +stock,stock,false \ No newline at end of file diff --git a/b_sponge-dtm-msg/go.mod b/_13_sponge-dtm-cache/grpc+http/go.mod similarity index 69% rename from b_sponge-dtm-msg/go.mod rename to _13_sponge-dtm-cache/grpc+http/go.mod index ffe293a..f736667 100644 --- a/b_sponge-dtm-msg/go.mod +++ b/_13_sponge-dtm-cache/grpc+http/go.mod @@ -1,26 +1,45 @@ -module transfer +module stock -go 1.19 +go 1.21 require ( + github.com/DATA-DOG/go-sqlmock v1.5.0 github.com/dtm-labs/client v1.18.7 + github.com/dtm-labs/rockscache v0.1.1 + github.com/gin-gonic/gin v1.9.1 + github.com/go-redis/redis/v8 v8.11.5 github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 + github.com/jinzhu/copier v0.3.5 + github.com/redis/go-redis/v9 v9.6.1 github.com/stretchr/testify v1.9.0 - github.com/zhufuyi/dtmdriver-sponge v0.0.2 - github.com/zhufuyi/sponge v1.8.5 + github.com/zhufuyi/dtmdriver-sponge v1.0.0 + github.com/zhufuyi/sponge v1.10.1 + go.uber.org/zap v1.24.0 + golang.org/x/sync v0.8.0 google.golang.org/grpc v1.61.0 - google.golang.org/protobuf v1.32.0 + google.golang.org/protobuf v1.34.2 + gorm.io/gorm v1.25.5 ) require ( cloud.google.com/go/compute v1.23.3 // indirect cloud.google.com/go/compute/metadata v0.2.3 // indirect github.com/BurntSushi/toml v1.2.0 // indirect + github.com/KyleBanks/depth v1.2.1 // indirect github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/semver/v3 v3.2.0 // indirect github.com/Masterminds/sprig/v3 v3.2.3 // indirect + github.com/PuerkitoBio/purell v1.1.1 // indirect + github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect + github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68 // indirect + github.com/alibabacloud-go/tea v1.1.17 // indirect + github.com/alibabacloud-go/tea-utils v1.4.4 // indirect + github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect + github.com/alicebob/miniredis/v2 v2.23.0 // indirect github.com/aliyun/alibaba-cloud-sdk-go v1.61.1800 // indirect + github.com/aliyun/alibabacloud-dkms-gcs-go-sdk v0.2.2 // indirect + github.com/aliyun/alibabacloud-dkms-transfer-go-sdk v0.1.7 // indirect github.com/armon/go-metrics v0.4.1 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bojand/ghz v0.117.0 // indirect @@ -35,6 +54,7 @@ require ( github.com/coreos/go-semver v0.3.0 // indirect github.com/coreos/go-systemd/v22 v22.3.2 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dgraph-io/ristretto v0.1.1 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect github.com/dtm-labs/dtmdriver v0.0.6 // indirect github.com/dtm-labs/logger v0.0.1 // indirect @@ -45,21 +65,26 @@ require ( github.com/felixge/fgprof v0.9.3 // indirect github.com/fsnotify/fsnotify v1.5.4 // indirect github.com/gabriel-vasile/mimetype v1.4.2 // indirect + github.com/gin-contrib/cors v1.3.1 // indirect github.com/gin-contrib/sse v0.1.0 // indirect - github.com/gin-gonic/gin v1.9.1 // indirect github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-ole/go-ole v1.2.6 // indirect + github.com/go-openapi/jsonpointer v0.19.5 // indirect + github.com/go-openapi/jsonreference v0.19.6 // indirect + github.com/go-openapi/spec v0.20.4 // indirect + github.com/go-openapi/swag v0.19.15 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/validator/v10 v10.14.0 // indirect - github.com/go-redis/redis/v8 v8.11.5 // indirect github.com/go-resty/resty/v2 v2.7.0 // indirect + github.com/go-sql-driver/mysql v1.7.0 // indirect github.com/goccy/go-json v0.10.2 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang-jwt/jwt/v5 v5.0.0 // indirect + github.com/golang/glog v1.1.2 // indirect github.com/golang/mock v1.6.0 // indirect - github.com/golang/protobuf v1.5.3 // indirect + github.com/golang/protobuf v1.5.4 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/pprof v0.0.0-20211214055906-6f57359322fd // indirect github.com/google/uuid v1.4.0 // indirect @@ -74,17 +99,27 @@ require ( github.com/hashicorp/serf v0.10.1 // indirect github.com/huandu/xstrings v1.4.0 // indirect github.com/imdario/mergo v0.3.11 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect + github.com/jackc/pgx/v5 v5.6.0 // indirect + github.com/jackc/puddle/v2 v2.2.1 // indirect github.com/jhump/protoreflect v1.15.1 // indirect github.com/jinzhu/configor v1.2.1 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af // indirect + github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.17.8 // indirect github.com/klauspost/cpuid/v2 v2.2.4 // indirect github.com/leodido/go-urn v1.2.4 // indirect + github.com/lithammer/shortuuid v3.0.0+incompatible // indirect github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect github.com/magiconair/properties v1.8.6 // indirect + github.com/mailru/easyjson v0.7.6 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.19 // indirect + github.com/mattn/go-sqlite3 v1.14.17 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect github.com/mitchellh/copystructure v1.0.0 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect @@ -93,7 +128,7 @@ require ( github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect - github.com/nacos-group/nacos-sdk-go/v2 v2.1.0 // indirect + github.com/nacos-group/nacos-sdk-go/v2 v2.2.7 // indirect github.com/natefinch/lumberjack v2.0.0+incompatible // indirect github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml/v2 v2.0.8 // indirect @@ -104,7 +139,8 @@ require ( github.com/prometheus/client_model v0.4.0 // indirect github.com/prometheus/common v0.37.0 // indirect github.com/prometheus/procfs v0.8.0 // indirect - github.com/rogpeppe/go-internal v1.8.0 // indirect + github.com/redis/go-redis/extra/rediscmd/v9 v9.5.3 // indirect + github.com/redis/go-redis/extra/redisotel/v9 v9.5.3 // indirect github.com/shirou/gopsutil/v3 v3.23.8 // indirect github.com/shoenig/go-m1cpu v0.1.6 // indirect github.com/shopspring/decimal v1.2.0 // indirect @@ -114,19 +150,27 @@ require ( github.com/spf13/pflag v1.0.5 // indirect github.com/spf13/viper v1.12.0 // indirect github.com/subosito/gotenv v1.3.0 // indirect + github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a // indirect + github.com/swaggo/gin-swagger v1.5.2 // indirect + github.com/swaggo/swag v1.8.12 // indirect github.com/tklauser/go-sysconf v0.3.12 // indirect github.com/tklauser/numcpus v0.6.1 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/ugorji/go/codec v1.2.11 // indirect + github.com/uptrace/opentelemetry-go-extra/otelgorm v0.2.3 // indirect + github.com/uptrace/opentelemetry-go-extra/otelsql v0.2.3 // indirect + github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect + github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 // indirect github.com/yusufpapurcu/wmi v1.2.3 // indirect go.etcd.io/etcd/api/v3 v3.5.5 // indirect go.etcd.io/etcd/client/pkg/v3 v3.5.5 // indirect go.etcd.io/etcd/client/v3 v3.5.5 // indirect go.mongodb.org/mongo-driver v1.14.0 // indirect + go.opentelemetry.io/contrib v1.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect go.opentelemetry.io/otel v1.24.0 // indirect go.opentelemetry.io/otel/exporters/jaeger v1.17.0 // indirect @@ -137,22 +181,24 @@ require ( go.uber.org/atomic v1.10.0 // indirect go.uber.org/goleak v1.2.1 // indirect go.uber.org/multierr v1.9.0 // indirect - go.uber.org/zap v1.24.0 // indirect golang.org/x/arch v0.3.0 // indirect - golang.org/x/crypto v0.22.0 // indirect - golang.org/x/net v0.24.0 // indirect + golang.org/x/crypto v0.26.0 // indirect + golang.org/x/net v0.25.0 // indirect golang.org/x/oauth2 v0.14.0 // indirect - golang.org/x/sync v0.7.0 // indirect - golang.org/x/sys v0.19.0 // indirect - golang.org/x/text v0.14.0 // indirect + golang.org/x/sys v0.23.0 // indirect + golang.org/x/text v0.17.0 // indirect golang.org/x/time v0.1.0 // indirect + golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20231106174013-bbf56f31fb17 // indirect - gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/ini.v1 v1.66.4 // indirect gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect + gorm.io/driver/mysql v1.5.2 // indirect + gorm.io/driver/postgres v1.5.4 // indirect + gorm.io/driver/sqlite v1.5.4 // indirect + gorm.io/plugin/dbresolver v1.5.1 // indirect ) diff --git a/b_sponge-dtm-msg/go.sum b/_13_sponge-dtm-cache/grpc+http/go.sum similarity index 83% rename from b_sponge-dtm-msg/go.sum rename to _13_sponge-dtm-cache/grpc+http/go.sum index 90a044e..e8499fd 100644 --- a/b_sponge-dtm-msg/go.sum +++ b/_13_sponge-dtm-cache/grpc+http/go.sum @@ -44,22 +44,45 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03 github.com/BurntSushi/toml v1.2.0 h1:Rt8g24XnyGTyglgET/PRUNlrUeu9F5L+7FilkXfZgs0= github.com/BurntSushi/toml v1.2.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= +github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g= github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= +github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/agiledragon/gomonkey/v2 v2.3.1/go.mod h1:ap1AmDzcVOAz1YpeJ3TCzIgstoaWLA6jbbgxfB4w2iY= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/aliyun/alibaba-cloud-sdk-go v1.61.1704/go.mod h1:RcDobYh8k5VP6TNybz9m++gL3ijVI5wueVr0EM10VsU= +github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68 h1:NqugFkGxx1TXSh/pBcU00Y6bljgDPaFdh5MUSeJ7e50= +github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68/go.mod h1:6pb/Qy8c+lqua8cFpEy7g39NRRqOWc3rOwAy8m5Y2BY= +github.com/alibabacloud-go/tea v1.1.0/go.mod h1:IkGyUSX4Ba1V+k4pCtJUc6jDpZLFph9QMy2VUPTwukg= +github.com/alibabacloud-go/tea v1.1.17 h1:05R5DnaJXe9sCNIe8KUgWHC/z6w/VZIwczgUwzRnul8= +github.com/alibabacloud-go/tea v1.1.17/go.mod h1:nXxjm6CIFkBhwW4FQkNrolwbfon8Svy6cujmKFUq98A= +github.com/alibabacloud-go/tea-utils v1.4.4 h1:lxCDvNCdTo9FaXKKq45+4vGETQUKNOW/qKTcX9Sk53o= +github.com/alibabacloud-go/tea-utils v1.4.4/go.mod h1:KNcT0oXlZZxOXINnZBs6YvgOd5aYp9U67G+E3R8fcQw= +github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk= +github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= +github.com/alicebob/miniredis/v2 v2.23.0 h1:+lwAJYjvvdIVg6doFHuotFjueJ/7KY10xo/vm3X3Scw= +github.com/alicebob/miniredis/v2 v2.23.0/go.mod h1:XNqvJdQJv5mSuVMc0ynneafpnL/zv52acZ6kqeS0t88= github.com/aliyun/alibaba-cloud-sdk-go v1.61.1800 h1:ie/8RxBOfKZWcrbYSJi2Z8uX8TcOlSMwPlEJh83OeOw= github.com/aliyun/alibaba-cloud-sdk-go v1.61.1800/go.mod h1:RcDobYh8k5VP6TNybz9m++gL3ijVI5wueVr0EM10VsU= +github.com/aliyun/alibabacloud-dkms-gcs-go-sdk v0.2.2 h1:rWkH6D2XlXb/Y+tNAQROxBzp3a0p92ni+pXcaHBe/WI= +github.com/aliyun/alibabacloud-dkms-gcs-go-sdk v0.2.2/go.mod h1:GDtq+Kw+v0fO+j5BrrWiUHbBq7L+hfpzpPfXKOZMFE0= +github.com/aliyun/alibabacloud-dkms-transfer-go-sdk v0.1.7 h1:olLiPI2iM8Hqq6vKnSxpM3awCrm9/BeOgHpzQkOYnI4= +github.com/aliyun/alibabacloud-dkms-transfer-go-sdk v0.1.7/go.mod h1:oDg1j4kFxnhgftaiLJABkGeSvuEvSF5Lo6UmRAMruX4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= @@ -76,6 +99,10 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bojand/ghz v0.117.0 h1:dTMxg+tUcLMw8BYi7vQPjXsrM2DJ20ns53hz1am1SbQ= github.com/bojand/ghz v0.117.0/go.mod h1:MXspmKdJie7NAS0IHzqG9X5h6zO3tIRGQ6Tkt8sAwa4= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA= github.com/bufbuild/protocompile v0.4.0/go.mod h1:3v93+mbWn/v3xzN+31nwkJfrEpAUwp+BagBSZWx+TP8= github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= @@ -115,9 +142,15 @@ github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmf github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2 h1:D9/bQk5vlXQFZ6Kwuu6zaiXJ9oTPe68++AzAJc1DzSI= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8= +github.com/dgraph-io/ristretto v0.1.1/go.mod h1:S1GPSBCYCIhmVNfcth17y2zZtQT6wzkzgwUve0VDWWA= +github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczCTSixgIKmwPv6+wP5DGjqLYw5SUiA= +github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/dtm-labs/client v1.18.7 h1:JOvw1loWhjY5w0gyasHs+BeEyWFBgHvSNl/MNsVQZIA= @@ -126,6 +159,8 @@ github.com/dtm-labs/dtmdriver v0.0.6 h1:Iz6xnO+hE2TKDHI2TX4BKCzMtgXYgeQFBEGvvaNh github.com/dtm-labs/dtmdriver v0.0.6/go.mod h1:V5E1uFsExb6Do32ezpB8bMX6be+izLhkcboniLP5shU= github.com/dtm-labs/logger v0.0.1 h1:187UPkYviyOXelmkbew+Q94mg/BFjxJEsHfyHawu5YQ= github.com/dtm-labs/logger v0.0.1/go.mod h1:0woMQZ6ljx9wZIl7hW8cuV2PRQmwEKxhqYtab7zVNWg= +github.com/dtm-labs/rockscache v0.1.1 h1:6S1vgaHvGqrLd8Ka4hRTKeKPV7v+tT0MSkTIX81LRyA= +github.com/dtm-labs/rockscache v0.1.1/go.mod h1:c76WX0kyIibmQ2ACxUXvDvaLykoPakivMqIxt+UzE7A= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= @@ -150,6 +185,7 @@ github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8Wlg github.com/felixge/fgprof v0.9.3 h1:VvyZxILNuCiUCSXtPtYmmtGvb65nqXh2QFWc0Wpf2/g= github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= @@ -157,8 +193,14 @@ github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmV github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gin-contrib/cors v1.3.1 h1:doAsuITavI4IOcd0Y19U4B+O0dNWihRyX//nn4sEmgA= +github.com/gin-contrib/cors v1.3.1/go.mod h1:jjEJ4268OPZUcU7k9Pm653S7lXUGcqMADzFA61xsmDk= +github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4= +github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.5.0/go.mod h1:Nd6IXA8m5kNZdNEHMBd93KT+mdY3+bewLgRvmCsR2Do= +github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= @@ -179,19 +221,40 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs= +github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= +github.com/go-openapi/spec v0.20.4 h1:O8hJrt0UMnhHcluhIdUgCLRWyM2x7QkBXRvOs7m+O1M= +github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.12.1/go.mod h1:IUMDtCfWo/w/mtMfIE/IG2K+Ey3ygWanZIBtBW0W2TM= +github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.16.0/go.mod h1:1AnU7NaIRDWWzGEKwgtJRd2xk99HeFyHw3yid4rvQIY= +github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js= github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY= github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I= +github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-sql-driver/mysql v1.7.0 h1:ueSltNNllEqE3qcWBTD0iQd3IpL/6U+mJxLkazJ7YPc= +github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= @@ -202,6 +265,8 @@ github.com/goji/httpauth v0.0.0-20160601135302-2da839ab0f4d/go.mod h1:nnjvkQ9ptG github.com/golang-jwt/jwt/v5 v5.0.0 h1:1n1XNM9hk7O9mnQoNBGolZvzebBQ7p93ULHRc28XJUE= github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/glog v1.1.2 h1:DVjP2PbBOzHyzA+dn3WhHIq4NdVu3Q+pvivFICf/7fo= +github.com/golang/glog v1.1.2/go.mod h1:zR+okUeTbrL6EL3xHUDxZuEtGv04p5shwip1+mL/rLQ= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -230,8 +295,8 @@ github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= @@ -251,6 +316,7 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -276,6 +342,7 @@ github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho= @@ -284,8 +351,10 @@ github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFb github.com/hashicorp/consul/api v1.19.1 h1:GLeK1WD4VIRvt4wRhQKHFudztEkRb8pDs+uRiJgNwes= github.com/hashicorp/consul/api v1.19.1/go.mod h1:jAt316eYgWGNLJtxkMQrcqRpuDE/kFJdqkEFwRXFv8U= github.com/hashicorp/consul/sdk v0.13.1 h1:EygWVWWMczTzXGpO93awkHFzfUka6hLYJ0qhETd+6lY= -github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= +github.com/hashicorp/consul/sdk v0.13.1/go.mod h1:SW/mM4LbKfqmMvcFu8v+eiQQ7oitXEFeiBe9StxERb0= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= @@ -299,16 +368,20 @@ github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iP github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc= +github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.4.0 h1:aAQzgqIrRKRa7w75CKpbBxYsmUoPjzVm1W59ca1L0J4= +github.com/hashicorp/go-version v1.4.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= @@ -330,15 +403,33 @@ github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1: github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.6.0 h1:SWJzexBzPL5jb0GEsrPMLIsi/3jOo7RHlzTjcAeDrPY= +github.com/jackc/pgx/v5 v5.6.0/go.mod h1:DNZ/vlrUnhWCoFGxHAG8U2ljioxukquj7utPDgtQdTw= +github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= +github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c= github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo= github.com/jinzhu/configor v1.2.1 h1:OKk9dsR8i6HPOCZR8BcMtcEImAFjIhbJFZNyn5GCZko= github.com/jinzhu/configor v1.2.1/go.mod h1:nX89/MOmDba7ZX7GCyU/VIaQ2Ar2aizBl2d3JLF/rDc= +github.com/jinzhu/copier v0.3.5 h1:GlvfUwHk62RokgqVNvYsku0TATCF7bAHVwEXoBh3iJg= +github.com/jinzhu/copier v0.3.5/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.5/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -346,6 +437,7 @@ github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnr github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= @@ -363,15 +455,25 @@ github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFB github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.1.0/go.mod h1:+cyI34gQWZcE1eQU7NVgKkkzdXDQHr1dBMtdAPozLkw= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= +github.com/lithammer/shortuuid v3.0.0+incompatible h1:NcD0xWW/MZYXEHa6ITy6kaXN5nwm/V115vj2YXfhS0w= +github.com/lithammer/shortuuid v3.0.0+incompatible/go.mod h1:FR74pbAuElzOUuenUHTK2Tciko1/vKuIKS9dSkDrA4w= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= @@ -381,12 +483,15 @@ github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxec github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM= +github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= @@ -414,10 +519,11 @@ github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6f github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/nacos-group/nacos-sdk-go/v2 v2.1.0 h1:PxRwOzHhnK6eGGvioEGkn8s6XRXmUVuXu91i2yQcdDs= -github.com/nacos-group/nacos-sdk-go/v2 v2.1.0/go.mod h1:ys/1adWeKXXzbNWfRNbaFlX/t6HVLWdpsNDvmoWTw0g= +github.com/nacos-group/nacos-sdk-go/v2 v2.2.7 h1:wCC1f3/VzIR1WD30YKeJGZAOchYCK/35mLC8qWt6Q6o= +github.com/nacos-group/nacos-sdk-go/v2 v2.2.7/go.mod h1:VYlyDPlQchPC31PmfBustu81vsOkdpCuO5k0dRdQcFc= github.com/natefinch/lumberjack v2.0.0+incompatible h1:4QJd3OLAMgj7ph+yZTuX13Ld4UpgHp07nNdFX7mqFfM= github.com/natefinch/lumberjack v2.0.0+incompatible/go.mod h1:Wi9p2TTF5DG5oU+6YfsmYQpsTIOm0B1VNzQg9Mw6nPk= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= @@ -433,11 +539,17 @@ github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAl github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE= github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/otiai10/copy v1.7.0/go.mod h1:rmRl6QPdJj6EiUqXQ/4Nn2lLXoNQjFCQbbNrxgc/t3U= +github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= +github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= +github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= +github.com/otiai10/mint v1.3.3/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= @@ -459,7 +571,6 @@ github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= -github.com/prometheus/client_golang v1.12.2/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= github.com/prometheus/client_golang v1.13.0 h1:b71QUfeo5M8gq2+evJdTPfZhYMAU0uKPkyPJ7TPsloU= github.com/prometheus/client_golang v1.13.0/go.mod h1:vTeo+zgvILHsnnj/39Ou/1fPN5nJFOEMgftOUOmlvYQ= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= @@ -483,10 +594,18 @@ github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1 github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= +github.com/redis/go-redis/extra/rediscmd/v9 v9.5.3 h1:1/BDligzCa40GTllkDnY3Y5DTHuKCONbB2JcRyIfl20= +github.com/redis/go-redis/extra/rediscmd/v9 v9.5.3/go.mod h1:3dZmcLn3Qw6FLlWASn1g4y+YO9ycEFUOM+bhBmzLVKQ= +github.com/redis/go-redis/extra/redisotel/v9 v9.5.3 h1:kuvuJL/+MZIEdvtb/kTBRiRgYaOmx1l+lYJyVdrRUOs= +github.com/redis/go-redis/extra/redisotel/v9 v9.5.3/go.mod h1:7f/FMrf5RRRVHXgfk7CzSVzXHiWeuOQUu2bsVqWoa+g= +github.com/redis/go-redis/v9 v9.6.1 h1:HHDteefn6ZkTtY5fGUE8tj8uy85AHk6zP7CpzIAM0y4= +github.com/redis/go-redis/v9 v9.6.1/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= @@ -498,9 +617,12 @@ github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU= github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/spf13/afero v1.9.2 h1:j49Hj62F0n+DaZ1dDCvhABaPNSGNkt32oRFxI33IEMw= github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= @@ -517,6 +639,7 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -534,6 +657,14 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.3.0 h1:mjC+YW8QpAdXibNi+vNWgzmgBH4+5l5dCXv8cNysBLI= github.com/subosito/gotenv v1.3.0/go.mod h1:YzJjq/33h7nrwdY+iHMhEOEEbW0ovIz0tB6t6PwAXzs= +github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= +github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a h1:kAe4YSu0O0UFn1DowNo2MY5p6xzqtJ/wQ7LZynSvGaY= +github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= +github.com/swaggo/gin-swagger v1.5.2 h1:dj2es17EaOHoy0Owu4xn3An1mI8/xjdFyIH6KAbOdYo= +github.com/swaggo/gin-swagger v1.5.2/go.mod h1:Cbj/MlHApPOjZdf4joWFXLLgmZVPyh54GPvPPyVjVZM= +github.com/swaggo/swag v1.8.1/go.mod h1:ugemnJsPZm/kRwFUnzBlbHRd0JY9zE1M4F+uy2pAaPQ= +github.com/swaggo/swag v1.8.12 h1:pctzkNPu0AlQP2royqX3apjKCQonAnf7KGoxeO4y64w= +github.com/swaggo/swag v1.8.12/go.mod h1:lNfm6Gg+oAq3zRJQNEMBE66LIJKM44mxFqhEEgy2its= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= @@ -542,8 +673,19 @@ github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9f github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= +github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/uptrace/opentelemetry-go-extra/otelgorm v0.2.3 h1:girTS67d1m8+XUJLbNBDjCSH8BtujWFoI93W1OUjFIc= +github.com/uptrace/opentelemetry-go-extra/otelgorm v0.2.3/go.mod h1:kjsn/ilDe5TABXwTy7Dg/Lfr2pRAjrCD+yPV+pbhOMY= +github.com/uptrace/opentelemetry-go-extra/otelsql v0.2.3 h1:LNi0Qa7869/loPjz2kmMvp/jwZZnMZ9scMJKhDJ1DIo= +github.com/uptrace/opentelemetry-go-extra/otelsql v0.2.3/go.mod h1:jyigonKik3C5V895QNiAGpKYKEvFuqjw9qAEZks1mUg= +github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= +github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI= +github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= @@ -559,13 +701,16 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 h1:k/gmLsJDWwWqbLCur2yWnJzwQEKRcAHXo6seXGuSwWw= +github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9/go.mod h1:E1AXubJBdNmFERAOucpDIxNzeGfLzg0mYh+UfMWdChA= github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= -github.com/zhufuyi/dtmdriver-sponge v0.0.2 h1:YCXEGtO+9ThVzOlbbcOUPai69CfBUcNm6KuYFGDySec= -github.com/zhufuyi/dtmdriver-sponge v0.0.2/go.mod h1:IIXUNeJis54f2CL5CNJqIpqRAdRKNpQdsA1LQMYv+vo= -github.com/zhufuyi/sponge v1.8.5 h1:8qu2E574UN6yY9HdveKyoSH1mOTLGOPbDsLPadvCZV0= -github.com/zhufuyi/sponge v1.8.5/go.mod h1:kU+gnW99OFbpT3R1LNODAmMPa3dHByUlXGQ7TSJaESw= +github.com/zhufuyi/dtmdriver-sponge v1.0.0 h1:4cMdQAQAVrGVYPt89u1/kJlF/gVUTOcJROkE03sddk0= +github.com/zhufuyi/dtmdriver-sponge v1.0.0/go.mod h1:eQz+TsQnW4BX0cuXlwbiQxwl8u19o1BnuNlntFxDmAU= +github.com/zhufuyi/sponge v1.10.1 h1:feB75axQtMJ5EkC9M6WgUC+VIqZlB+K6zmNul9xlB0c= +github.com/zhufuyi/sponge v1.10.1/go.mod h1:g6oDmwPTUrCL9+RJbSbjQEtmc0yhJ1vUD5rCdF1QNG4= go.etcd.io/etcd/api/v3 v3.5.5 h1:BX4JIbQ7hl7+jL+g+2j5UAr0o1bctCm6/Ct+ArBGkf0= go.etcd.io/etcd/api/v3 v3.5.5/go.mod h1:KFtNaxGDw4Yx/BA4iPPwevUTAuqcsPxzyX8PHydchN8= go.etcd.io/etcd/client/pkg/v3 v3.5.5 h1:9S0JUVvmrVl7wCF39iTQthdaaNIiAaQbmK75ogO6GU8= @@ -581,6 +726,8 @@ go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opentelemetry.io/contrib v1.24.0 h1:Tfn7pP/482iIzeeba91tP52a1c1TEeqYc1saih+vBN8= +go.opentelemetry.io/contrib v1.24.0/go.mod h1:usW9bPlrjHiJFbK0a6yK/M5wNHs3nLmtrT3vzhoD3co= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= @@ -621,14 +768,16 @@ golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= -golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= -golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= +golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw= +golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -665,6 +814,8 @@ golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -703,15 +854,18 @@ golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= +golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= -golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= -golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= +golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -737,13 +891,14 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= -golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -754,6 +909,7 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -794,12 +950,15 @@ golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -811,12 +970,13 @@ golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= -golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.23.0 h1:YfKFowiIMvtgl1UERQoTPPToxltDeZfbj4H7dVUCwmM= +golang.org/x/sys v0.23.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -832,12 +992,11 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= +golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.1.0 h1:xYY+Bajn2a7VBmTM5GikTmnK8ZuX8YgnQCqZpbBNtmA= golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -846,6 +1005,7 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3 golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -895,7 +1055,10 @@ golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1010,16 +1173,19 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= +gopkg.in/go-playground/validator.v9 v9.29.1/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWdVbfP1avr/N00E2vyQ= gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.66.4 h1:SsAcf+mM7mRZo2nJNGt8mZCjG8ZRaNGMURJw7BsIST4= gopkg.in/ini.v1 v1.66.4/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= @@ -1037,9 +1203,24 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/driver/mysql v1.4.3/go.mod h1:sSIebwZAVPiT+27jK9HIwvsqOGKx3YMPmrA3mBJR10c= +gorm.io/driver/mysql v1.5.2 h1:QC2HRskSE75wBuOxe0+iCkyJZ+RqpudsQtqkp+IMuXs= +gorm.io/driver/mysql v1.5.2/go.mod h1:pQLhh1Ut/WUAySdTHwBpBv6+JKcj+ua4ZFx1QQTBzb8= +gorm.io/driver/postgres v1.5.4 h1:Iyrp9Meh3GmbSuyIAGyjkN+n9K+GHX9b9MqsTL4EJCo= +gorm.io/driver/postgres v1.5.4/go.mod h1:Bgo89+h0CRcdA33Y6frlaHHVuTdOf87pmyzwW9C/BH0= +gorm.io/driver/sqlite v1.5.4 h1:IqXwXi8M/ZlPzH/947tn5uik3aYQslP9BVveoax0nV0= +gorm.io/driver/sqlite v1.5.4/go.mod h1:qxAuCol+2r6PannQDpOP1FP6ag3mKi4esLnB/jHed+4= +gorm.io/gorm v1.23.8/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk= +gorm.io/gorm v1.25.2-0.20230530020048-26663ab9bf55/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k= +gorm.io/gorm v1.25.2/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k= +gorm.io/gorm v1.25.5 h1:zR9lOiiYf09VNh5Q1gphfyia1JpiClIWG9hQaxB/mls= +gorm.io/gorm v1.25.5/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= +gorm.io/plugin/dbresolver v1.5.1 h1:s9Dj9f7r+1rE3nx/Ywzc85nXptUEaeOO0pt27xdopM8= +gorm.io/plugin/dbresolver v1.5.1/go.mod h1:l4Cn87EHLEYuqUncpEeTC2tTJQkjngPSD+lo8hIvcT0= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/_13_sponge-dtm-cache/grpc+http/internal/cache/stock.go b/_13_sponge-dtm-cache/grpc+http/internal/cache/stock.go new file mode 100644 index 0000000..2e56567 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/cache/stock.go @@ -0,0 +1,149 @@ +package cache + +import ( + "context" + "strings" + "time" + + "github.com/zhufuyi/sponge/pkg/cache" + "github.com/zhufuyi/sponge/pkg/encoding" + "github.com/zhufuyi/sponge/pkg/utils" + + "stock/internal/model" +) + +const ( + // cache prefix key, must end with a colon + stockCachePrefixKey = "stock:" + // StockExpireTime expire time + StockExpireTime = 5 * time.Minute +) + +var _ StockCache = (*stockCache)(nil) + +// StockCache cache interface +type StockCache interface { + Set(ctx context.Context, id uint64, data *model.Stock, duration time.Duration) error + Get(ctx context.Context, id uint64) (*model.Stock, error) + MultiGet(ctx context.Context, ids []uint64) (map[uint64]*model.Stock, error) + MultiSet(ctx context.Context, data []*model.Stock, duration time.Duration) error + Del(ctx context.Context, id uint64) error + SetCacheWithNotFound(ctx context.Context, id uint64) error +} + +// stockCache define a cache struct +type stockCache struct { + cache cache.Cache +} + +// NewStockCache new a cache +func NewStockCache(cacheType *model.CacheType) StockCache { + jsonEncoding := encoding.JSONEncoding{} + cachePrefix := "" + + cType := strings.ToLower(cacheType.CType) + switch cType { + case "redis": + c := cache.NewRedisCache(cacheType.Rdb, cachePrefix, jsonEncoding, func() interface{} { + return &model.Stock{} + }) + return &stockCache{cache: c} + case "memory": + c := cache.NewMemoryCache(cachePrefix, jsonEncoding, func() interface{} { + return &model.Stock{} + }) + return &stockCache{cache: c} + } + + return nil // no cache +} + +// GetStockCacheKey cache key +func (c *stockCache) GetStockCacheKey(id uint64) string { + return stockCachePrefixKey + utils.Uint64ToStr(id) +} + +// Set write to cache +func (c *stockCache) Set(ctx context.Context, id uint64, data *model.Stock, duration time.Duration) error { + if data == nil || id == 0 { + return nil + } + cacheKey := c.GetStockCacheKey(id) + err := c.cache.Set(ctx, cacheKey, data, duration) + if err != nil { + return err + } + return nil +} + +// Get cache value +func (c *stockCache) Get(ctx context.Context, id uint64) (*model.Stock, error) { + var data *model.Stock + cacheKey := c.GetStockCacheKey(id) + err := c.cache.Get(ctx, cacheKey, &data) + if err != nil { + return nil, err + } + return data, nil +} + +// MultiSet multiple set cache +func (c *stockCache) MultiSet(ctx context.Context, data []*model.Stock, duration time.Duration) error { + valMap := make(map[string]interface{}) + for _, v := range data { + cacheKey := c.GetStockCacheKey(v.ID) + valMap[cacheKey] = v + } + + err := c.cache.MultiSet(ctx, valMap, duration) + if err != nil { + return err + } + + return nil +} + +// MultiGet multiple get cache, return key in map is id value +func (c *stockCache) MultiGet(ctx context.Context, ids []uint64) (map[uint64]*model.Stock, error) { + var keys []string + for _, v := range ids { + cacheKey := c.GetStockCacheKey(v) + keys = append(keys, cacheKey) + } + + itemMap := make(map[string]*model.Stock) + err := c.cache.MultiGet(ctx, keys, itemMap) + if err != nil { + return nil, err + } + + retMap := make(map[uint64]*model.Stock) + for _, id := range ids { + val, ok := itemMap[c.GetStockCacheKey(id)] + if ok { + retMap[id] = val + } + } + + return retMap, nil +} + +// Del delete cache +func (c *stockCache) Del(ctx context.Context, id uint64) error { + cacheKey := c.GetStockCacheKey(id) + err := c.cache.Del(ctx, cacheKey) + if err != nil { + return err + } + return nil +} + +// SetCacheWithNotFound set empty cache +func (c *stockCache) SetCacheWithNotFound(ctx context.Context, id uint64) error { + cacheKey := c.GetStockCacheKey(id) + err := c.cache.SetCacheWithNotFound(ctx, cacheKey) + if err != nil { + return err + } + return nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/cache/stock_test.go b/_13_sponge-dtm-cache/grpc+http/internal/cache/stock_test.go new file mode 100644 index 0000000..f280608 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/cache/stock_test.go @@ -0,0 +1,144 @@ +package cache + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/zhufuyi/sponge/pkg/gotest" + "github.com/zhufuyi/sponge/pkg/utils" + + "stock/internal/model" +) + +func newStockCache() *gotest.Cache { + record1 := &model.Stock{} + record1.ID = 1 + record2 := &model.Stock{} + record2.ID = 2 + testData := map[string]interface{}{ + utils.Uint64ToStr(record1.ID): record1, + utils.Uint64ToStr(record2.ID): record2, + } + + c := gotest.NewCache(testData) + c.ICache = NewStockCache(&model.CacheType{ + CType: "redis", + Rdb: c.RedisClient, + }) + return c +} + +func Test_stockCache_Set(t *testing.T) { + c := newStockCache() + defer c.Close() + + record := c.TestDataSlice[0].(*model.Stock) + err := c.ICache.(StockCache).Set(c.Ctx, record.ID, record, time.Hour) + if err != nil { + t.Fatal(err) + } + + // nil data + err = c.ICache.(StockCache).Set(c.Ctx, 0, nil, time.Hour) + assert.NoError(t, err) +} + +func Test_stockCache_Get(t *testing.T) { + c := newStockCache() + defer c.Close() + + record := c.TestDataSlice[0].(*model.Stock) + err := c.ICache.(StockCache).Set(c.Ctx, record.ID, record, time.Hour) + if err != nil { + t.Fatal(err) + } + + got, err := c.ICache.(StockCache).Get(c.Ctx, record.ID) + if err != nil { + t.Fatal(err) + } + assert.Equal(t, record, got) + + // zero key error + _, err = c.ICache.(StockCache).Get(c.Ctx, 0) + assert.Error(t, err) +} + +func Test_stockCache_MultiGet(t *testing.T) { + c := newStockCache() + defer c.Close() + + var testData []*model.Stock + for _, data := range c.TestDataSlice { + testData = append(testData, data.(*model.Stock)) + } + + err := c.ICache.(StockCache).MultiSet(c.Ctx, testData, time.Hour) + if err != nil { + t.Fatal(err) + } + + got, err := c.ICache.(StockCache).MultiGet(c.Ctx, c.GetIDs()) + if err != nil { + t.Fatal(err) + } + + expected := c.GetTestData() + for k, v := range expected { + assert.Equal(t, got[utils.StrToUint64(k)], v.(*model.Stock)) + } +} + +func Test_stockCache_MultiSet(t *testing.T) { + c := newStockCache() + defer c.Close() + + var testData []*model.Stock + for _, data := range c.TestDataSlice { + testData = append(testData, data.(*model.Stock)) + } + + err := c.ICache.(StockCache).MultiSet(c.Ctx, testData, time.Hour) + if err != nil { + t.Fatal(err) + } +} + +func Test_stockCache_Del(t *testing.T) { + c := newStockCache() + defer c.Close() + + record := c.TestDataSlice[0].(*model.Stock) + err := c.ICache.(StockCache).Del(c.Ctx, record.ID) + if err != nil { + t.Fatal(err) + } +} + +func Test_stockCache_SetCacheWithNotFound(t *testing.T) { + c := newStockCache() + defer c.Close() + + record := c.TestDataSlice[0].(*model.Stock) + err := c.ICache.(StockCache).SetCacheWithNotFound(c.Ctx, record.ID) + if err != nil { + t.Fatal(err) + } +} + +func TestNewStockCache(t *testing.T) { + c := NewStockCache(&model.CacheType{ + CType: "", + }) + assert.Nil(t, c) + c = NewStockCache(&model.CacheType{ + CType: "memory", + }) + assert.NotNil(t, c) + c = NewStockCache(&model.CacheType{ + CType: "redis", + }) + assert.NotNil(t, c) +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/config/stock.go b/_13_sponge-dtm-cache/grpc+http/internal/config/stock.go new file mode 100644 index 0000000..5f12b4b --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/config/stock.go @@ -0,0 +1,173 @@ +// code generated by https://stock + +package config + +import ( + "github.com/zhufuyi/sponge/pkg/conf" +) + +var config *Config + +func Init(configFile string, fs ...func()) error { + config = &Config{} + return conf.Parse(configFile, config, fs...) +} + +func Show(hiddenFields ...string) string { + return conf.Show(config, hiddenFields...) +} + +func Get() *Config { + if config == nil { + panic("config is nil, please call config.Init() first") + } + return config +} + +func Set(conf *Config) { + config = conf +} + +type Config struct { + App App `yaml:"app" json:"app"` + Consul Consul `yaml:"consul" json:"consul"` + Database Database `yaml:"database" json:"database"` + Etcd Etcd `yaml:"etcd" json:"etcd"` + Grpc Grpc `yaml:"grpc" json:"grpc"` + GrpcClient []GrpcClient `yaml:"grpcClient" json:"grpcClient"` + HTTP HTTP `yaml:"http" json:"http"` + Jaeger Jaeger `yaml:"jaeger" json:"jaeger"` + Logger Logger `yaml:"logger" json:"logger"` + NacosRd NacosRd `yaml:"nacosRd" json:"nacosRd"` + Redis Redis `yaml:"redis" json:"redis"` +} + +type Consul struct { + Addr string `yaml:"addr" json:"addr"` +} + +type Etcd struct { + Addrs []string `yaml:"addrs" json:"addrs"` +} + +type Jaeger struct { + AgentHost string `yaml:"agentHost" json:"agentHost"` + AgentPort int `yaml:"agentPort" json:"agentPort"` +} + +type ClientToken struct { + AppID string `yaml:"appID" json:"appID"` + AppKey string `yaml:"appKey" json:"appKey"` + Enable bool `yaml:"enable" json:"enable"` +} + +type ClientSecure struct { + CaFile string `yaml:"caFile" json:"caFile"` + CertFile string `yaml:"certFile" json:"certFile"` + KeyFile string `yaml:"keyFile" json:"keyFile"` + ServerName string `yaml:"serverName" json:"serverName"` + Type string `yaml:"type" json:"type"` +} + +type ServerSecure struct { + CaFile string `yaml:"caFile" json:"caFile"` + CertFile string `yaml:"certFile" json:"certFile"` + KeyFile string `yaml:"keyFile" json:"keyFile"` + Type string `yaml:"type" json:"type"` +} + +type App struct { + CacheType string `yaml:"cacheType" json:"cacheType"` + EnableCircuitBreaker bool `yaml:"enableCircuitBreaker" json:"enableCircuitBreaker"` + EnableHTTPProfile bool `yaml:"enableHTTPProfile" json:"enableHTTPProfile"` + EnableLimit bool `yaml:"enableLimit" json:"enableLimit"` + EnableMetrics bool `yaml:"enableMetrics" json:"enableMetrics"` + EnableStat bool `yaml:"enableStat" json:"enableStat"` + EnableTrace bool `yaml:"enableTrace" json:"enableTrace"` + Env string `yaml:"env" json:"env"` + Host string `yaml:"host" json:"host"` + Name string `yaml:"name" json:"name"` + RegistryDiscoveryType string `yaml:"registryDiscoveryType" json:"registryDiscoveryType"` + TracingSamplingRate float64 `yaml:"tracingSamplingRate" json:"tracingSamplingRate"` + Version string `yaml:"version" json:"version"` +} + +type GrpcClient struct { + ClientSecure ClientSecure `yaml:"clientSecure" json:"clientSecure"` + ClientToken ClientToken `yaml:"clientToken" json:"clientToken"` + EnableLoadBalance bool `yaml:"enableLoadBalance" json:"enableLoadBalance"` + Host string `yaml:"host" json:"host"` + Name string `yaml:"name" json:"name"` + Port int `yaml:"port" json:"port"` + RegistryDiscoveryType string `yaml:"registryDiscoveryType" json:"registryDiscoveryType"` + Timeout int `yaml:"timeout" json:"timeout"` +} + +type Sqlite struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + DBFile string `yaml:"dbFile" json:"dbFile"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` +} + +type Mysql struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + Dsn string `yaml:"dsn" json:"dsn"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MastersDsn []string `yaml:"mastersDsn" json:"mastersDsn"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` + SlavesDsn []string `yaml:"slavesDsn" json:"slavesDsn"` +} + +type Postgresql struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + Dsn string `yaml:"dsn" json:"dsn"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` +} + +type Redis struct { + DialTimeout int `yaml:"dialTimeout" json:"dialTimeout"` + Dsn string `yaml:"dsn" json:"dsn"` + ReadTimeout int `yaml:"readTimeout" json:"readTimeout"` + WriteTimeout int `yaml:"writeTimeout" json:"writeTimeout"` +} + +type Database struct { + Driver string `yaml:"driver" json:"driver"` + Mongodb Mongodb `yaml:"mongodb" json:"mongodb"` + Mysql Mysql `yaml:"mysql" json:"mysql"` + Postgresql Mysql `yaml:"postgresql" json:"postgresql"` + Sqlite Sqlite `yaml:"sqlite" json:"sqlite"` +} + +type Mongodb struct { + Dsn string `yaml:"dsn" json:"dsn"` +} + +type Grpc struct { + EnableToken bool `yaml:"enableToken" json:"enableToken"` + HTTPPort int `yaml:"httpPort" json:"httpPort"` + Port int `yaml:"port" json:"port"` + ServerSecure ServerSecure `yaml:"serverSecure" json:"serverSecure"` +} + +type Logger struct { + Format string `yaml:"format" json:"format"` + IsSave bool `yaml:"isSave" json:"isSave"` + Level string `yaml:"level" json:"level"` +} + +type NacosRd struct { + IPAddr string `yaml:"ipAddr" json:"ipAddr"` + NamespaceID string `yaml:"namespaceID" json:"namespaceID"` + Port int `yaml:"port" json:"port"` +} + +type HTTP struct { + Port int `yaml:"port" json:"port"` + Timeout int `yaml:"timeout" json:"timeout"` +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/config/stock_cc.go b/_13_sponge-dtm-cache/grpc+http/internal/config/stock_cc.go new file mode 100644 index 0000000..9bf9ff0 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/config/stock_cc.go @@ -0,0 +1,28 @@ +// code generated by https://stock + +package config + +import ( + "github.com/zhufuyi/sponge/pkg/conf" +) + +func NewCenter(configFile string) (*Center, error) { + nacosConf := &Center{} + err := conf.Parse(configFile, nacosConf) + return nacosConf, err +} + +type Center struct { + Nacos Nacos `yaml:"nacos" json:"nacos"` +} + +type Nacos struct { + ContextPath string `yaml:"contextPath" json:"contextPath"` + DataID string `yaml:"dataID" json:"dataID"` + Format string `yaml:"format" json:"format"` + Group string `yaml:"group" json:"group"` + IPAddr string `yaml:"ipAddr" json:"ipAddr"` + NamespaceID string `yaml:"namespaceID" json:"namespaceID"` + Port int `yaml:"port" json:"port"` + Scheme string `yaml:"scheme" json:"scheme"` +} diff --git a/a_micro-grpc-http-protobuf/internal/config/user_test.go b/_13_sponge-dtm-cache/grpc+http/internal/config/stock_test.go similarity index 83% rename from a_micro-grpc-http-protobuf/internal/config/user_test.go rename to _13_sponge-dtm-cache/grpc+http/internal/config/stock_test.go index 9385009..bba3f97 100644 --- a/a_micro-grpc-http-protobuf/internal/config/user_test.go +++ b/_13_sponge-dtm-cache/grpc+http/internal/config/stock_test.go @@ -7,11 +7,11 @@ import ( "github.com/zhufuyi/sponge/pkg/gofile" - "user/configs" + "stock/configs" ) func TestInit(t *testing.T) { - configFile := configs.Path("user.yml") + configFile := configs.Path("stock.yml") err := Init(configFile) if gofile.IsExists(configFile) { assert.NoError(t, err) @@ -24,6 +24,7 @@ func TestInit(t *testing.T) { str := Show() assert.NotEmpty(t, str) + t.Log(str) // set nil Set(nil) @@ -34,7 +35,7 @@ func TestInit(t *testing.T) { } func TestInitNacos(t *testing.T) { - configFile := configs.Path("user_cc.yml") + configFile := configs.Path("stock_cc.yml") _, err := NewCenter(configFile) if gofile.IsExists(configFile) { assert.NoError(t, err) diff --git a/_13_sponge-dtm-cache/grpc+http/internal/dao/stock.go b/_13_sponge-dtm-cache/grpc+http/internal/dao/stock.go new file mode 100644 index 0000000..80745d0 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/dao/stock.go @@ -0,0 +1,273 @@ +package dao + +import ( + "context" + "database/sql" + "errors" + "fmt" + "github.com/zhufuyi/sponge/pkg/logger" + "time" + + "golang.org/x/sync/singleflight" + "gorm.io/gorm" + + cacheBase "github.com/zhufuyi/sponge/pkg/cache" + "github.com/zhufuyi/sponge/pkg/ggorm/query" + "github.com/zhufuyi/sponge/pkg/utils" + + "stock/internal/cache" + "stock/internal/model" +) + +var _ StockDao = (*stockDao)(nil) + +// StockDao defining the dao interface +type StockDao interface { + Create(ctx context.Context, table *model.Stock) error + DeleteByID(ctx context.Context, id uint64) error + UpdateByID(ctx context.Context, table *model.Stock) error + GetByID(ctx context.Context, id uint64) (*model.Stock, error) + GetByColumns(ctx context.Context, params *query.Params) ([]*model.Stock, int64, error) + + CreateByTx(ctx context.Context, tx *gorm.DB, table *model.Stock) (uint64, error) + DeleteByTx(ctx context.Context, tx *gorm.DB, id uint64) error + UpdateByTx(ctx context.Context, tx *gorm.DB, table *model.Stock) error +} + +type stockDao struct { + db *gorm.DB + cache cache.StockCache // if nil, the cache is not used. + sfg *singleflight.Group // if cache is nil, the sfg is not used. +} + +// NewStockDao creating the dao interface +func NewStockDao(db *gorm.DB, xCache cache.StockCache) StockDao { + if xCache == nil { + return &stockDao{db: db} + } + return &stockDao{ + db: db, + cache: xCache, + sfg: new(singleflight.Group), + } +} + +func (d *stockDao) deleteCache(ctx context.Context, id uint64) error { + if d.cache != nil { + return d.cache.Del(ctx, id) + } + return nil +} + +// Create a record, insert the record and the id value is written back to the table +func (d *stockDao) Create(ctx context.Context, table *model.Stock) error { + return d.db.WithContext(ctx).Create(table).Error +} + +// DeleteByID delete a record by id +func (d *stockDao) DeleteByID(ctx context.Context, id uint64) error { + err := d.db.WithContext(ctx).Where("id = ?", id).Delete(&model.Stock{}).Error + if err != nil { + return err + } + + // delete cache + _ = d.deleteCache(ctx, id) + + return nil +} + +// UpdateByID update a record by id +func (d *stockDao) UpdateByID(ctx context.Context, table *model.Stock) error { + err := d.updateDataByID(ctx, d.db, table) + + // delete cache + _ = d.deleteCache(ctx, table.ID) + + return err +} + +func (d *stockDao) updateDataByID(ctx context.Context, db *gorm.DB, table *model.Stock) error { + if table.ID < 1 { + return errors.New("id cannot be 0") + } + + update := map[string]interface{}{} + + if table.ProductID != 0 { + update["product_id"] = table.ProductID + } + if table.Stock != 0 { + update["stock"] = table.Stock + } + + return db.WithContext(ctx).Model(table).Updates(update).Error +} + +// GetByID get a record by id +func (d *stockDao) GetByID(ctx context.Context, id uint64) (*model.Stock, error) { + // no cache + if d.cache == nil { + record := &model.Stock{} + err := d.db.WithContext(ctx).Where("id = ?", id).First(record).Error + return record, err + } + + // get from cache or database + record, err := d.cache.Get(ctx, id) + if err == nil { + return record, nil + } + + if errors.Is(err, model.ErrCacheNotFound) { + // for the same id, prevent high concurrent simultaneous access to database + val, err, _ := d.sfg.Do(utils.Uint64ToStr(id), func() (interface{}, error) { //nolint + table := &model.Stock{} + err = d.db.WithContext(ctx).Where("id = ?", id).First(table).Error + if err != nil { + // if data is empty, set not found cache to prevent cache penetration, default expiration time 10 minutes + if errors.Is(err, model.ErrRecordNotFound) { + err = d.cache.SetCacheWithNotFound(ctx, id) + if err != nil { + return nil, err + } + return nil, model.ErrRecordNotFound + } + return nil, err + } + // set cache + err = d.cache.Set(ctx, id, table, cache.StockExpireTime) + if err != nil { + return nil, fmt.Errorf("cache.Set error: %v, id=%d", err, id) + } + return table, nil + }) + if err != nil { + return nil, err + } + table, ok := val.(*model.Stock) + if !ok { + return nil, model.ErrRecordNotFound + } + return table, nil + } else if errors.Is(err, cacheBase.ErrPlaceholder) { + return nil, model.ErrRecordNotFound + } + + // fail fast, if cache error return, don't request to db + return nil, err +} + +// GetByColumns get paging records by column information, +// Note: query performance degrades when table rows are very large because of the use of offset. +// +// params includes paging parameters and query parameters +// paging parameters (required): +// +// page: page number, starting from 0 +// limit: lines per page +// sort: sort fields, default is id backwards, you can add - sign before the field to indicate reverse order, no - sign to indicate ascending order, multiple fields separated by comma +// +// query parameters (not required): +// +// name: column name +// exp: expressions, which default is "=", support =, !=, >, >=, <, <=, like, in +// value: column value, if exp=in, multiple values are separated by commas +// logic: logical type, defaults to and when value is null, only &(and), ||(or) +// +// example: search for a male over 20 years of age +// +// params = &query.Params{ +// Page: 0, +// Limit: 20, +// Columns: []query.Column{ +// { +// Name: "age", +// Exp: ">", +// Value: 20, +// }, +// { +// Name: "gender", +// Value: "male", +// }, +// } +func (d *stockDao) GetByColumns(ctx context.Context, params *query.Params) ([]*model.Stock, int64, error) { + queryStr, args, err := params.ConvertToGormConditions() + if err != nil { + return nil, 0, errors.New("query params error: " + err.Error()) + } + + var total int64 + if params.Sort != "ignore count" { // determine if count is required + err = d.db.WithContext(ctx).Model(&model.Stock{}).Select([]string{"id"}).Where(queryStr, args...).Count(&total).Error + if err != nil { + return nil, 0, err + } + if total == 0 { + return nil, total, nil + } + } + + records := []*model.Stock{} + order, limit, offset := params.ConvertToPage() + err = d.db.WithContext(ctx).Order(order).Limit(limit).Offset(offset).Where(queryStr, args...).Find(&records).Error + if err != nil { + return nil, 0, err + } + + return records, total, err +} + +// CreateByTx create a record in the database using the provided transaction +func (d *stockDao) CreateByTx(ctx context.Context, tx *gorm.DB, table *model.Stock) (uint64, error) { + err := tx.WithContext(ctx).Create(table).Error + return table.ID, err +} + +// DeleteByTx delete a record by id in the database using the provided transaction +func (d *stockDao) DeleteByTx(ctx context.Context, tx *gorm.DB, id uint64) error { + err := tx.WithContext(ctx).Where("id = ?", id).Delete(&model.Stock{}).Error + if err != nil { + return err + } + + // delete cache + _ = d.deleteCache(ctx, id) + + return nil +} + +// UpdateByTx update a record by id in the database using the provided transaction +func (d *stockDao) UpdateByTx(ctx context.Context, tx *gorm.DB, table *model.Stock) error { + err := d.updateDataByID(ctx, tx, table) + + // delete cache + _ = d.deleteCache(ctx, table.ID) + + return err +} + +// ------------------------------------------------------------------------------------------- + +// UpdateStockInTx update the stock of a record +func UpdateStockInTx(tx *sql.Tx, table *model.Stock) error { + sqlStr := "update stock set stock=?, updated_at=? where id=?" + result, err := tx.Exec(sqlStr, table.Stock, time.Now(), table.ID) + rowCount, _ := result.RowsAffected() + logger.Info("[mysql] info", logger.String("sql", sqlStr), logger.Any("args", []interface{}{table.Stock, time.Now(), table.ID}), logger.Int64("rows", rowCount)) + return err +} + +// GetStockByID get the stock of a record by id +func GetStockByID(db *sql.DB, id uint64) (string, error) { + sqlStr := "select stock from stock where id=?" + row := db.QueryRow(sqlStr, id) + + var stock string + err := row.Scan(&stock) + if err != nil { + return "", err + } + logger.Info("[mysql] info", logger.String("sql", sqlStr), logger.Any("args", []interface{}{id})) + return stock, nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/dao/stock_test.go b/_13_sponge-dtm-cache/grpc+http/internal/dao/stock_test.go new file mode 100644 index 0000000..9690630 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/dao/stock_test.go @@ -0,0 +1,232 @@ +package dao + +import ( + "context" + "testing" + + "github.com/DATA-DOG/go-sqlmock" + "github.com/stretchr/testify/assert" + "github.com/zhufuyi/sponge/pkg/ggorm/query" + "github.com/zhufuyi/sponge/pkg/gotest" + "github.com/zhufuyi/sponge/pkg/utils" + + "stock/internal/cache" + "stock/internal/model" +) + +func newStockDao() *gotest.Dao { + testData := &model.Stock{} + testData.ID = 1 + // you can set the other fields of testData here, such as: + //testData.CreatedAt = time.Now() + //testData.UpdatedAt = testData.CreatedAt + + // init mock cache + //c := gotest.NewCache(map[string]interface{}{"no cache": testData}) // to test mysql, disable caching + c := gotest.NewCache(map[string]interface{}{utils.Uint64ToStr(testData.ID): testData}) + c.ICache = cache.NewStockCache(&model.CacheType{ + CType: "redis", + Rdb: c.RedisClient, + }) + + // init mock dao + d := gotest.NewDao(c, testData) + d.IDao = NewStockDao(d.DB, c.ICache.(cache.StockCache)) + + return d +} + +func Test_stockDao_Create(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec("INSERT INTO .*"). + WithArgs(d.GetAnyArgs(testData)...). + WillReturnResult(sqlmock.NewResult(1, 1)) + d.SQLMock.ExpectCommit() + + err := d.IDao.(StockDao).Create(d.Ctx, testData) + if err != nil { + t.Fatal(err) + } +} + +func Test_stockDao_DeleteByID(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + expectedSQLForDeletion := "DELETE .*" + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec(expectedSQLForDeletion). + WithArgs(testData.ID). + WillReturnResult(sqlmock.NewResult(int64(testData.ID), 1)) + d.SQLMock.ExpectCommit() + + err := d.IDao.(StockDao).DeleteByID(d.Ctx, testData.ID) + if err != nil { + t.Fatal(err) + } + + // zero id error + err = d.IDao.(StockDao).DeleteByID(d.Ctx, 0) + assert.Error(t, err) +} + +func Test_stockDao_UpdateByID(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec("UPDATE .*"). + WithArgs(d.AnyTime, testData.ID). + WillReturnResult(sqlmock.NewResult(1, 1)) + d.SQLMock.ExpectCommit() + + err := d.IDao.(StockDao).UpdateByID(d.Ctx, testData) + if err != nil { + t.Fatal(err) + } + + // zero id error + err = d.IDao.(StockDao).UpdateByID(d.Ctx, &model.Stock{}) + assert.Error(t, err) + +} + +func Test_stockDao_GetByID(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + // column names and corresponding data + rows := sqlmock.NewRows([]string{"id"}). + AddRow(testData.ID) + + d.SQLMock.ExpectQuery("SELECT .*"). + WithArgs(testData.ID). + WillReturnRows(rows) + + _, err := d.IDao.(StockDao).GetByID(d.Ctx, testData.ID) + if err != nil { + t.Fatal(err) + } + + err = d.SQLMock.ExpectationsWereMet() + if err != nil { + t.Fatal(err) + } + + // notfound error + d.SQLMock.ExpectQuery("SELECT .*"). + WithArgs(2). + WillReturnRows(rows) + _, err = d.IDao.(StockDao).GetByID(d.Ctx, 2) + assert.Error(t, err) + + d.SQLMock.ExpectQuery("SELECT .*"). + WithArgs(3, 4). + WillReturnRows(rows) + _, err = d.IDao.(StockDao).GetByID(d.Ctx, 4) + assert.Error(t, err) +} + +func Test_stockDao_GetByColumns(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + // column names and corresponding data + rows := sqlmock.NewRows([]string{"id"}). + AddRow(testData.ID) + + d.SQLMock.ExpectQuery("SELECT .*").WillReturnRows(rows) + + _, _, err := d.IDao.(StockDao).GetByColumns(d.Ctx, &query.Params{ + Page: 0, + Limit: 10, + Sort: "ignore count", // ignore test count(*) + }) + if err != nil { + t.Fatal(err) + } + + err = d.SQLMock.ExpectationsWereMet() + if err != nil { + t.Fatal(err) + } + + // err test + _, _, err = d.IDao.(StockDao).GetByColumns(d.Ctx, &query.Params{ + Page: 0, + Limit: 10, + Columns: []query.Column{ + { + Name: "id", + Exp: "<", + Value: 0, + }, + }, + }) + assert.Error(t, err) + + // error test + dao := &stockDao{} + _, _, err = dao.GetByColumns(context.Background(), &query.Params{Columns: []query.Column{{}}}) + t.Log(err) +} + +func Test_stockDao_CreateByTx(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec("INSERT INTO .*"). + WithArgs(d.GetAnyArgs(testData)...). + WillReturnResult(sqlmock.NewResult(1, 1)) + d.SQLMock.ExpectCommit() + + _, err := d.IDao.(StockDao).CreateByTx(d.Ctx, d.DB, testData) + if err != nil { + t.Fatal(err) + } +} + +func Test_stockDao_DeleteByTx(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + expectedSQLForDeletion := "DELETE .*" + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec(expectedSQLForDeletion). + WithArgs(testData.ID). + WillReturnResult(sqlmock.NewResult(int64(testData.ID), 1)) + d.SQLMock.ExpectCommit() + + err := d.IDao.(StockDao).DeleteByTx(d.Ctx, d.DB, testData.ID) + if err != nil { + t.Fatal(err) + } +} + +func Test_stockDao_UpdateByTx(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec("UPDATE .*"). + WithArgs(d.AnyTime, testData.ID). + WillReturnResult(sqlmock.NewResult(1, 1)) + d.SQLMock.ExpectCommit() + + err := d.IDao.(StockDao).UpdateByTx(d.Ctx, d.DB, testData) + if err != nil { + t.Fatal(err) + } +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/ecode/atomic_rpc.go b/_13_sponge-dtm-cache/grpc+http/internal/ecode/atomic_rpc.go new file mode 100644 index 0000000..3b3ab1b --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/ecode/atomic_rpc.go @@ -0,0 +1,20 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// atomic business-level rpc error codes. +// the _atomicNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + _atomicNO = 72 + _atomicName = "atomic" + _atomicBaseCode = errcode.RCode(_atomicNO) + + StatusUpdateAtomic = errcode.NewRPCStatus(_atomicBaseCode+1, "failed to Update "+_atomicName) + StatusQueryAtomic = errcode.NewRPCStatus(_atomicBaseCode+2, "failed to Query "+_atomicName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/_13_sponge-dtm-cache/grpc+http/internal/ecode/callback_rpc.go b/_13_sponge-dtm-cache/grpc+http/internal/ecode/callback_rpc.go new file mode 100644 index 0000000..6997311 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/ecode/callback_rpc.go @@ -0,0 +1,20 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// callback business-level rpc error codes. +// the _callbackNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + _callbackNO = 44 + _callbackName = "callback" + _callbackBaseCode = errcode.RCode(_callbackNO) + + StatusQueryPreparedCallback = errcode.NewRPCStatus(_callbackBaseCode+1, "failed to QueryPrepared "+_callbackName) + StatusDeleteCacheCallback = errcode.NewRPCStatus(_callbackBaseCode+2, "failed to DeleteCache "+_callbackName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/_13_sponge-dtm-cache/grpc+http/internal/ecode/downgrade_rpc.go b/_13_sponge-dtm-cache/grpc+http/internal/ecode/downgrade_rpc.go new file mode 100644 index 0000000..b1f4dcc --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/ecode/downgrade_rpc.go @@ -0,0 +1,21 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// downgrade business-level rpc error codes. +// the _downgradeNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + _downgradeNO = 74 + _downgradeName = "downgrade" + _downgradeBaseCode = errcode.RCode(_downgradeNO) + + StatusUpdateDowngrade = errcode.NewRPCStatus(_downgradeBaseCode+1, "failed to Update "+_downgradeName) + StatusQueryDowngrade = errcode.NewRPCStatus(_downgradeBaseCode+2, "failed to Query "+_downgradeName) + StatusDowngradeBranchDowngrade = errcode.NewRPCStatus(_downgradeBaseCode+3, "failed to DowngradeBranch "+_downgradeName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/_13_sponge-dtm-cache/grpc+http/internal/ecode/final_rpc.go b/_13_sponge-dtm-cache/grpc+http/internal/ecode/final_rpc.go new file mode 100644 index 0000000..1cc42d6 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/ecode/final_rpc.go @@ -0,0 +1,20 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// final business-level rpc error codes. +// the _finalNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + _finalNO = 89 + _finalName = "final" + _finalBaseCode = errcode.RCode(_finalNO) + + StatusUpdateFinal = errcode.NewRPCStatus(_finalBaseCode+1, "failed to Update "+_finalName) + StatusQueryFinal = errcode.NewRPCStatus(_finalBaseCode+2, "failed to Query "+_finalName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/_13_sponge-dtm-cache/grpc+http/internal/ecode/stock_rpc.go b/_13_sponge-dtm-cache/grpc+http/internal/ecode/stock_rpc.go new file mode 100644 index 0000000..d5b1a13 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/ecode/stock_rpc.go @@ -0,0 +1,23 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// stock business-level rpc error codes. +// the _stockNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + _stockNO = 82 + _stockName = "stock" + _stockBaseCode = errcode.RCode(_stockNO) + + StatusCreateStock = errcode.NewRPCStatus(_stockBaseCode+1, "failed to Create "+_stockName) + StatusDeleteByIDStock = errcode.NewRPCStatus(_stockBaseCode+2, "failed to DeleteByID "+_stockName) + StatusUpdateByIDStock = errcode.NewRPCStatus(_stockBaseCode+3, "failed to UpdateByID "+_stockName) + StatusGetByIDStock = errcode.NewRPCStatus(_stockBaseCode+4, "failed to GetByID "+_stockName) + StatusListStock = errcode.NewRPCStatus(_stockBaseCode+5, "failed to List "+_stockName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/_13_sponge-dtm-cache/grpc+http/internal/ecode/strong_rpc.go b/_13_sponge-dtm-cache/grpc+http/internal/ecode/strong_rpc.go new file mode 100644 index 0000000..6ad9768 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/ecode/strong_rpc.go @@ -0,0 +1,20 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// strong business-level rpc error codes. +// the _strongNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + _strongNO = 58 + _strongName = "strong" + _strongBaseCode = errcode.RCode(_strongNO) + + StatusUpdateStrong = errcode.NewRPCStatus(_strongBaseCode+1, "failed to Update "+_strongName) + StatusQueryStrong = errcode.NewRPCStatus(_strongBaseCode+2, "failed to Query "+_strongName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/a_micro-grpc-http-protobuf/internal/ecode/systemCode_http.go b/_13_sponge-dtm-cache/grpc+http/internal/ecode/systemCode_http.go similarity index 90% rename from a_micro-grpc-http-protobuf/internal/ecode/systemCode_http.go rename to _13_sponge-dtm-cache/grpc+http/internal/ecode/systemCode_http.go index 42796fc..d5f749e 100644 --- a/a_micro-grpc-http-protobuf/internal/ecode/systemCode_http.go +++ b/_13_sponge-dtm-cache/grpc+http/internal/ecode/systemCode_http.go @@ -13,11 +13,12 @@ var ( Unauthorized = errcode.Unauthorized InternalServerError = errcode.InternalServerError NotFound = errcode.NotFound - AlreadyExists = errcode.AlreadyExists Timeout = errcode.Timeout TooManyRequests = errcode.TooManyRequests Forbidden = errcode.Forbidden LimitExceed = errcode.LimitExceed + Conflict = errcode.Conflict + TooEarly = errcode.TooEarly DeadlineExceeded = errcode.DeadlineExceeded AccessDenied = errcode.AccessDenied @@ -34,3 +35,5 @@ var ( Unimplemented = errcode.Unimplemented DataLoss = errcode.DataLoss ) + +var SkipResponse = errcode.SkipResponse diff --git a/b_sponge-dtm-msg/internal/ecode/systemCode_rpc.go b/_13_sponge-dtm-cache/grpc+http/internal/ecode/systemCode_rpc.go similarity index 91% rename from b_sponge-dtm-msg/internal/ecode/systemCode_rpc.go rename to _13_sponge-dtm-cache/grpc+http/internal/ecode/systemCode_rpc.go index e8d872b..e2e05d4 100644 --- a/b_sponge-dtm-msg/internal/ecode/systemCode_rpc.go +++ b/_13_sponge-dtm-cache/grpc+http/internal/ecode/systemCode_rpc.go @@ -31,9 +31,13 @@ var ( StatusLimitExceed = errcode.StatusLimitExceed StatusMethodNotAllowed = errcode.StatusMethodNotAllowed StatusAccessDenied = errcode.StatusAccessDenied + //StatusConflict = errcode.StatusConflict ) // Any kev-value func Any(key string, val interface{}) errcode.Detail { return errcode.Any(key, val) } + +// StatusSkipResponse is only use for grpc-gateway +var StatusSkipResponse = errcode.SkipResponse diff --git a/_13_sponge-dtm-cache/grpc+http/internal/handler/atomic.go b/_13_sponge-dtm-cache/grpc+http/internal/handler/atomic.go new file mode 100644 index 0000000..f260b9f --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/handler/atomic.go @@ -0,0 +1,35 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package handler + +import ( + "context" + + stockV1 "stock/api/stock/v1" + "stock/internal/service" +) + +var _ stockV1.AtomicLogicer = (*atomicHandler)(nil) + +type atomicHandler struct { + server stockV1.AtomicServer +} + +// NewAtomicHandler create a handler +func NewAtomicHandler() stockV1.AtomicLogicer { + return &atomicHandler{ + server: service.NewAtomicServer(), + } +} + +// Update 更新数据,保证DB与缓存操作的原子性。如果更新完DB后,redis发生进程crash,重新启动redis服务后,也能够保证缓存能够更新。 +func (h *atomicHandler) Update(ctx context.Context, req *stockV1.UpdateAtomicRequest) (*stockV1.UpdateAtomicRequestReply, error) { + + return h.server.Update(ctx, req) +} + +// Query 查询 +func (h *atomicHandler) Query(ctx context.Context, req *stockV1.QueryAtomicRequest) (*stockV1.QueryAtomicReply, error) { + + return h.server.Query(ctx, req) +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/handler/callback.go b/_13_sponge-dtm-cache/grpc+http/internal/handler/callback.go new file mode 100644 index 0000000..6f01c83 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/handler/callback.go @@ -0,0 +1,36 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package handler + +import ( + "context" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + stockV1 "stock/api/stock/v1" + "stock/internal/service" +) + +var _ stockV1.CallbackLogicer = (*callbackHandler)(nil) + +type callbackHandler struct { + server stockV1.CallbackServer +} + +// NewCallbackHandler create a handler +func NewCallbackHandler() stockV1.CallbackLogicer { + return &callbackHandler{ + server: service.NewCallbackServer(), + } +} + +// QueryPrepared 反查数据 +func (h *callbackHandler) QueryPrepared(ctx context.Context, req *stockV1.QueryPreparedRequest) (*stockV1.QueryPreparedReply, error) { + _, ctx = middleware.AdaptCtx(ctx) + return h.server.QueryPrepared(ctx, req) +} + +// DeleteCache 删除缓存 +func (h *callbackHandler) DeleteCache(ctx context.Context, req *stockV1.DeleteCacheRequest) (*stockV1.DeleteCacheReply, error) { + + return h.server.DeleteCache(ctx, req) +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/handler/downgrade.go b/_13_sponge-dtm-cache/grpc+http/internal/handler/downgrade.go new file mode 100644 index 0000000..b238b25 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/handler/downgrade.go @@ -0,0 +1,42 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package handler + +import ( + "context" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + stockV1 "stock/api/stock/v1" + "stock/internal/service" +) + +var _ stockV1.DowngradeLogicer = (*downgradeHandler)(nil) + +type downgradeHandler struct { + server stockV1.DowngradeServer +} + +// NewDowngradeHandler create a handler +func NewDowngradeHandler() stockV1.DowngradeLogicer { + return &downgradeHandler{ + server: service.NewDowngradeServer(), + } +} + +// Update 更新数据,升降级中的DB和缓存强一致性 +func (h *downgradeHandler) Update(ctx context.Context, req *stockV1.UpdateDowngradeRequest) (*stockV1.UpdateDowngradeRequestReply, error) { + + return h.server.Update(ctx, req) +} + +// Query 查询 +func (h *downgradeHandler) Query(ctx context.Context, req *stockV1.QueryDowngradeRequest) (*stockV1.QueryDowngradeReply, error) { + + return h.server.Query(ctx, req) +} + +// DowngradeBranch 升降级中的强一致性分支 +func (h *downgradeHandler) DowngradeBranch(ctx context.Context, req *stockV1.DowngradeBranchRequest) (*stockV1.DowngradeBranchReply, error) { + _, ctx = middleware.AdaptCtx(ctx) + return h.server.DowngradeBranch(ctx, req) +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/handler/final.go b/_13_sponge-dtm-cache/grpc+http/internal/handler/final.go new file mode 100644 index 0000000..1f33d34 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/handler/final.go @@ -0,0 +1,35 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package handler + +import ( + "context" + + stockV1 "stock/api/stock/v1" + "stock/internal/service" +) + +var _ stockV1.FinalLogicer = (*finalHandler)(nil) + +type finalHandler struct { + server stockV1.FinalServer +} + +// NewFinalHandler create a handler +func NewFinalHandler() stockV1.FinalLogicer { + return &finalHandler{ + server: service.NewFinalServer(), + } +} + +// Update 更新数据,DB和缓存最终一致性 +func (h *finalHandler) Update(ctx context.Context, req *stockV1.UpdateFinalRequest) (*stockV1.UpdateFinalRequestReply, error) { + + return h.server.Update(ctx, req) +} + +// Query 查询 +func (h *finalHandler) Query(ctx context.Context, req *stockV1.QueryFinalRequest) (*stockV1.QueryFinalReply, error) { + + return h.server.Query(ctx, req) +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/handler/stock.go b/_13_sponge-dtm-cache/grpc+http/internal/handler/stock.go new file mode 100644 index 0000000..d06c2fa --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/handler/stock.go @@ -0,0 +1,46 @@ +package handler + +import ( + "context" + + stockV1 "stock/api/stock/v1" + "stock/internal/service" +) + +var _ stockV1.StockLogicer = (*stockHandler)(nil) + +type stockHandler struct { + server stockV1.StockServer +} + +// NewStockHandler create a handler +func NewStockHandler() stockV1.StockLogicer { + return &stockHandler{ + server: service.NewStockServer(), + } +} + +// Create a record +func (h *stockHandler) Create(ctx context.Context, req *stockV1.CreateStockRequest) (*stockV1.CreateStockReply, error) { + return h.server.Create(ctx, req) +} + +// DeleteByID delete a record by id +func (h *stockHandler) DeleteByID(ctx context.Context, req *stockV1.DeleteStockByIDRequest) (*stockV1.DeleteStockByIDReply, error) { + return h.server.DeleteByID(ctx, req) +} + +// UpdateByID update a record by id +func (h *stockHandler) UpdateByID(ctx context.Context, req *stockV1.UpdateStockByIDRequest) (*stockV1.UpdateStockByIDReply, error) { + return h.server.UpdateByID(ctx, req) +} + +// GetByID get a record by id +func (h *stockHandler) GetByID(ctx context.Context, req *stockV1.GetStockByIDRequest) (*stockV1.GetStockByIDReply, error) { + return h.server.GetByID(ctx, req) +} + +// List of records by query parameters +func (h *stockHandler) List(ctx context.Context, req *stockV1.ListStockRequest) (*stockV1.ListStockReply, error) { + return h.server.List(ctx, req) +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/handler/strong.go b/_13_sponge-dtm-cache/grpc+http/internal/handler/strong.go new file mode 100644 index 0000000..2e47d27 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/handler/strong.go @@ -0,0 +1,35 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package handler + +import ( + "context" + + stockV1 "stock/api/stock/v1" + "stock/internal/service" +) + +var _ stockV1.StrongLogicer = (*strongHandler)(nil) + +type strongHandler struct { + server stockV1.StrongServer +} + +// NewStrongHandler create a handler +func NewStrongHandler() stockV1.StrongLogicer { + return &strongHandler{ + server: service.NewStrongServer(), + } +} + +// Update 更新数据,DB和缓存强一致性 +func (h *strongHandler) Update(ctx context.Context, req *stockV1.UpdateStrongRequest) (*stockV1.UpdateStrongRequestReply, error) { + + return h.server.Update(ctx, req) +} + +// Query 查询 +func (h *strongHandler) Query(ctx context.Context, req *stockV1.QueryStrongRequest) (*stockV1.QueryStrongReply, error) { + + return h.server.Query(ctx, req) +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/model/init.go b/_13_sponge-dtm-cache/grpc+http/internal/model/init.go new file mode 100644 index 0000000..7087975 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/model/init.go @@ -0,0 +1,234 @@ +// Package model is the initial database driver and define the data structures corresponding to the tables. +package model + +import ( + "database/sql" + "strings" + "sync" + "time" + + "github.com/dtm-labs/rockscache" + "github.com/redis/go-redis/v9" + "gorm.io/gorm" + + "github.com/zhufuyi/sponge/pkg/ggorm" + "github.com/zhufuyi/sponge/pkg/goredis" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + + "stock/internal/config" +) + +var ( + // ErrCacheNotFound No hit cache + ErrCacheNotFound = redis.Nil + + // ErrRecordNotFound no records found + ErrRecordNotFound = gorm.ErrRecordNotFound +) + +var ( + db *gorm.DB + sdb *sql.DB + once1 sync.Once + + redisCli *redis.Client + once2 sync.Once + + cacheType *CacheType + once3 sync.Once +) + +// CacheType cache type +type CacheType struct { + CType string // cache type memory or redis + Rdb *redis.Client // if CType=redis, Rdb cannot be empty +} + +// InitCache initial cache +func InitCache(cType string) { + cacheType = &CacheType{ + CType: cType, + } + + if cType == "redis" { + cacheType.Rdb = GetRedisCli() + } +} + +// GetCacheType get cacheType +func GetCacheType() *CacheType { + if cacheType == nil { + once3.Do(func() { + InitCache(config.Get().App.CacheType) + }) + } + + return cacheType +} + +// InitRedis connect redis +func InitRedis() { + opts := []goredis.Option{ + goredis.WithDialTimeout(time.Duration(config.Get().Redis.DialTimeout) * time.Second), + goredis.WithReadTimeout(time.Duration(config.Get().Redis.ReadTimeout) * time.Second), + goredis.WithWriteTimeout(time.Duration(config.Get().Redis.WriteTimeout) * time.Second), + } + if config.Get().App.EnableTrace { + opts = append(opts, goredis.WithEnableTrace()) + } + + var err error + redisCli, err = goredis.Init(config.Get().Redis.Dsn, opts...) + if err != nil { + panic("goredis.Init error: " + err.Error()) + } +} + +// GetRedisCli get redis client +func GetRedisCli() *redis.Client { + if redisCli == nil { + once2.Do(func() { + InitRedis() + }) + } + + return redisCli +} + +// CloseRedis close redis +func CloseRedis() error { + if redisCli == nil { + return nil + } + + err := redisCli.Close() + if err != nil && err.Error() != redis.ErrClosed.Error() { + return err + } + + return nil +} + +// ------------------------------------------------------------------------------------------ + +// InitDB connect database +func InitDB() { + switch strings.ToLower(config.Get().Database.Driver) { + case ggorm.DBDriverMysql, ggorm.DBDriverTidb: + InitMysql() + default: + panic("InitDB error, unsupported database driver: " + config.Get().Database.Driver) + } +} + +// InitMysql connect mysql +func InitMysql() { + opts := []ggorm.Option{ + ggorm.WithMaxIdleConns(config.Get().Database.Mysql.MaxIdleConns), + ggorm.WithMaxOpenConns(config.Get().Database.Mysql.MaxOpenConns), + ggorm.WithConnMaxLifetime(time.Duration(config.Get().Database.Mysql.ConnMaxLifetime) * time.Minute), + } + if config.Get().Database.Mysql.EnableLog { + opts = append(opts, + ggorm.WithLogging(logger.Get()), + ggorm.WithLogRequestIDKey("request_id"), + ) + } + + if config.Get().App.EnableTrace { + opts = append(opts, ggorm.WithEnableTrace()) + } + + // setting mysql slave and master dsn addresses, + // if there is no read/write separation, you can comment out the following piece of code + //opts = append(opts, ggorm.WithRWSeparation( + // config.Get().Database.Mysql.SlavesDsn, + // config.Get().Database.Mysql.MastersDsn..., + //)) + + // add custom gorm plugin + //opts = append(opts, ggorm.WithGormPlugin(yourPlugin)) + + var dsn = utils.AdaptiveMysqlDsn(config.Get().Database.Mysql.Dsn) + var err error + db, err = ggorm.InitMysql(dsn, opts...) + if err != nil { + panic("InitMysql error: " + err.Error()) + } + sdb, err = db.DB() + if err != nil { + panic("InitMysql error: " + err.Error()) + } +} + +// GetDB get gorm db +func GetDB() *gorm.DB { + if db == nil { + once1.Do(func() { + InitDB() + }) + } + + return db +} + +// GetSDB get sql db +func GetSDB() *sql.DB { + if sdb == nil { + once1.Do(func() { + InitDB() + }) + } + + return sdb +} + +// CloseDB close db +func CloseDB() error { + return ggorm.CloseDB(db) +} + +// ------------------------------------------------------------------------------------------ + +var ( + cacheClient *rockscache.Client + cacheClientOnce sync.Once + + strongCacheClient *rockscache.Client + strongCacheClientOnce sync.Once +) + +// InitRockscache initial rockscache +func InitRockscache() { + cacheClientOnce.Do(func() { + rdb := GetRedisCli() + cacheClient = rockscache.NewClient(rdb, rockscache.NewDefaultOptions()) + }) +} + +// GetRockscacheClient get rockscache client +func GetRockscacheClient() *rockscache.Client { + if cacheClient == nil { + InitRockscache() + } + return cacheClient +} + +// InitStrongRockscache initial rockscache +func InitStrongRockscache() { + strongCacheClientOnce.Do(func() { + rdb := GetRedisCli() + options := rockscache.NewDefaultOptions() + options.StrongConsistency = true // enable strong consistency + strongCacheClient = rockscache.NewClient(rdb, options) + }) +} + +// GetStrongRockscacheClient get strong rockscache client +func GetStrongRockscacheClient() *rockscache.Client { + if strongCacheClient == nil { + InitStrongRockscache() + } + return strongCacheClient +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/model/stock.go b/_13_sponge-dtm-cache/grpc+http/internal/model/stock.go new file mode 100644 index 0000000..c815d6e --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/model/stock.go @@ -0,0 +1,19 @@ +package model + +import ( + "time" +) + +type Stock struct { + ID uint64 `gorm:"column:id;type:bigint(20) unsigned;primary_key;AUTO_INCREMENT" json:"id"` + ProductID uint64 `gorm:"column:product_id;type:bigint(20) unsigned;NOT NULL" json:"productId"` // 商品id + Stock uint `gorm:"column:stock;type:int(11) unsigned;NOT NULL" json:"stock"` // 库存 + CreatedAt *time.Time `gorm:"column:created_at;type:datetime" json:"createdAt"` + UpdatedAt *time.Time `gorm:"column:updated_at;type:datetime" json:"updatedAt"` + DeletedAt *time.Time `gorm:"column:deleted_at;type:datetime" json:"deletedAt"` +} + +// TableName table name +func (m *Stock) TableName() string { + return "stock" +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/routers/atomic_router.go b/_13_sponge-dtm-cache/grpc+http/internal/routers/atomic_router.go new file mode 100644 index 0000000..116a425 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/routers/atomic_router.go @@ -0,0 +1,68 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "context" + + "github.com/gin-gonic/gin" + "google.golang.org/grpc/metadata" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/logger" + + stockV1 "stock/api/stock/v1" + "stock/internal/handler" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + atomicMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + atomicRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewAtomicHandler()) + }) +} + +func atomicRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService stockV1.AtomicLogicer) { + ctxFn := func(c *gin.Context) context.Context { + md := metadata.New(map[string]string{ + middleware.ContextRequestIDKey: middleware.GCtxRequestID(c), + }) + return metadata.NewIncomingContext(c.Request.Context(), md) + } + stockV1.RegisterAtomicRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + stockV1.WithAtomicLogger(logger.Get()), + stockV1.WithAtomicRPCResponse(), + stockV1.WithAtomicWrapCtx(ctxFn), + stockV1.WithAtomicErrorToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable + // example: + // ecode.Forbidden, ecode.LimitExceed, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func atomicMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/atomic/:id will take effect + // c.setGroupPath("/api/v1/atomic", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("PUT", "/api/v1/stock/:id/atomic", middleware.Auth()) + //c.setSinglePath("GET", "/api/v1/stock/:id/atomic", middleware.Auth()) +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/routers/callback_router.go b/_13_sponge-dtm-cache/grpc+http/internal/routers/callback_router.go new file mode 100644 index 0000000..0108423 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/routers/callback_router.go @@ -0,0 +1,68 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "context" + + "github.com/gin-gonic/gin" + "google.golang.org/grpc/metadata" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/logger" + + stockV1 "stock/api/stock/v1" + "stock/internal/handler" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + callbackMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + callbackRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewCallbackHandler()) + }) +} + +func callbackRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService stockV1.CallbackLogicer) { + ctxFn := func(c *gin.Context) context.Context { + md := metadata.New(map[string]string{ + middleware.ContextRequestIDKey: middleware.GCtxRequestID(c), + }) + return metadata.NewIncomingContext(c.Request.Context(), md) + } + stockV1.RegisterCallbackRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + stockV1.WithCallbackLogger(logger.Get()), + stockV1.WithCallbackRPCResponse(), + stockV1.WithCallbackWrapCtx(ctxFn), + stockV1.WithCallbackErrorToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable + // example: + // ecode.Forbidden, ecode.LimitExceed, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func callbackMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/callback/:id will take effect + // c.setGroupPath("/api/v1/callback", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("GET", "/api/v1/stock/queryPrepared", middleware.Auth()) + //c.setSinglePath("POST", "/api/v1/stock/deleteCache", middleware.Auth()) +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/routers/downgrade_router.go b/_13_sponge-dtm-cache/grpc+http/internal/routers/downgrade_router.go new file mode 100644 index 0000000..a5a3a3c --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/routers/downgrade_router.go @@ -0,0 +1,69 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "context" + + "github.com/gin-gonic/gin" + "google.golang.org/grpc/metadata" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/logger" + + stockV1 "stock/api/stock/v1" + "stock/internal/handler" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + downgradeMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + downgradeRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewDowngradeHandler()) + }) +} + +func downgradeRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService stockV1.DowngradeLogicer) { + ctxFn := func(c *gin.Context) context.Context { + md := metadata.New(map[string]string{ + middleware.ContextRequestIDKey: middleware.GCtxRequestID(c), + }) + return metadata.NewIncomingContext(c.Request.Context(), md) + } + stockV1.RegisterDowngradeRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + stockV1.WithDowngradeLogger(logger.Get()), + stockV1.WithDowngradeRPCResponse(), + stockV1.WithDowngradeWrapCtx(ctxFn), + stockV1.WithDowngradeErrorToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable + // example: + // ecode.Forbidden, ecode.LimitExceed, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func downgradeMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/downgrade/:id will take effect + // c.setGroupPath("/api/v1/downgrade", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("PUT", "/api/v1/stock/:id/downgrade", middleware.Auth()) + //c.setSinglePath("GET", "/api/v1/stock/:id/downgrade", middleware.Auth()) + //c.setSinglePath("POST", "/api/v1/stock/downgradeBranch", middleware.Auth()) +} diff --git a/a_micro-grpc-http-protobuf/internal/routers/user_router.go b/_13_sponge-dtm-cache/grpc+http/internal/routers/final_router.go similarity index 66% rename from a_micro-grpc-http-protobuf/internal/routers/user_router.go rename to _13_sponge-dtm-cache/grpc+http/internal/routers/final_router.go index bd929f8..bffcd3a 100644 --- a/a_micro-grpc-http-protobuf/internal/routers/user_router.go +++ b/_13_sponge-dtm-cache/grpc+http/internal/routers/final_router.go @@ -11,41 +11,41 @@ import ( "github.com/zhufuyi/sponge/pkg/gin/middleware" "github.com/zhufuyi/sponge/pkg/logger" - userV1 "user/api/user/v1" - "user/internal/handler" + stockV1 "stock/api/stock/v1" + "stock/internal/handler" ) func init() { allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { - userMiddlewares(c) + finalMiddlewares(c) }) allRouteFns = append(allRouteFns, func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { - userRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewUserHandler()) + finalRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewFinalHandler()) }) } -func userRouter( +func finalRouter( r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc, - iService userV1.UserLogicer) { + iService stockV1.FinalLogicer) { ctxFn := func(c *gin.Context) context.Context { md := metadata.New(map[string]string{ middleware.ContextRequestIDKey: middleware.GCtxRequestID(c), }) return metadata.NewIncomingContext(c.Request.Context(), md) } - userV1.RegisterUserRouter( + stockV1.RegisterFinalRouter( r, groupPathMiddlewares, singlePathMiddlewares, iService, - userV1.WithUserLogger(logger.Get()), - userV1.WithUserRPCResponse(), - userV1.WithUserWrapCtx(ctxFn), - userV1.WithUserErrorToHTTPCode( + stockV1.WithFinalLogger(logger.Get()), + stockV1.WithFinalRPCResponse(), + stockV1.WithFinalWrapCtx(ctxFn), + stockV1.WithFinalErrorToHTTPCode( // Set some error codes to standard http return codes, // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable // example: @@ -57,14 +57,12 @@ func userRouter( // you can set the middleware of a route group, or set the middleware of a single route, // or you can mix them, pay attention to the duplication of middleware when mixing them, // it is recommended to set the middleware of a single route in preference -func userMiddlewares(c *middlewareConfig) { +func finalMiddlewares(c *middlewareConfig) { // set up group route middleware, group path is left prefix rules, - // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/user/:id will take effect - // c.setGroupPath("/api/v1/user", middleware.Auth()) + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/final/:id will take effect + // c.setGroupPath("/api/v1/final", middleware.Auth()) // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed - //c.setSinglePath("POST", "/api/v1/auth/register", middleware.Auth()) - //c.setSinglePath("POST", "/api/v1/auth/login", middleware.Auth()) - //c.setSinglePath("POST", "/api/v1/auth/logout", middleware.Auth()) - //c.setSinglePath("POST", "/api/v1/changePassword", middleware.Auth()) + //c.setSinglePath("PUT", "/api/v1/stock/:id/final", middleware.Auth()) + //c.setSinglePath("GET", "/api/v1/stock/:id/final", middleware.Auth()) } diff --git a/a_micro-grpc-http-protobuf/internal/routers/routers.go b/_13_sponge-dtm-cache/grpc+http/internal/routers/routers.go similarity index 99% rename from a_micro-grpc-http-protobuf/internal/routers/routers.go rename to _13_sponge-dtm-cache/grpc+http/internal/routers/routers.go index 5a7d692..59494cb 100644 --- a/a_micro-grpc-http-protobuf/internal/routers/routers.go +++ b/_13_sponge-dtm-cache/grpc+http/internal/routers/routers.go @@ -18,8 +18,8 @@ import ( "github.com/zhufuyi/sponge/pkg/jwt" "github.com/zhufuyi/sponge/pkg/logger" - "user/docs" - "user/internal/config" + "stock/docs" + "stock/internal/config" ) type routeFns = []func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) diff --git a/_13_sponge-dtm-cache/grpc+http/internal/routers/stock_router.go b/_13_sponge-dtm-cache/grpc+http/internal/routers/stock_router.go new file mode 100644 index 0000000..b738e99 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/routers/stock_router.go @@ -0,0 +1,71 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "context" + + "github.com/gin-gonic/gin" + "google.golang.org/grpc/metadata" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/logger" + + stockV1 "stock/api/stock/v1" + "stock/internal/handler" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + stockMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + stockRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewStockHandler()) + }) +} + +func stockRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService stockV1.StockLogicer) { + ctxFn := func(c *gin.Context) context.Context { + md := metadata.New(map[string]string{ + middleware.ContextRequestIDKey: middleware.GCtxRequestID(c), + }) + return metadata.NewIncomingContext(c.Request.Context(), md) + } + stockV1.RegisterStockRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + stockV1.WithStockLogger(logger.Get()), + stockV1.WithStockRPCResponse(), + stockV1.WithStockWrapCtx(ctxFn), + stockV1.WithStockErrorToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable + // example: + // ecode.Forbidden, ecode.LimitExceed, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func stockMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/stock/:id will take effect + // c.setGroupPath("/api/v1/stock", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("POST", "/api/v1/stock", middleware.Auth()) + //c.setSinglePath("DELETE", "/api/v1/stock/:id", middleware.Auth()) + //c.setSinglePath("PUT", "/api/v1/stock/:id", middleware.Auth()) + //c.setSinglePath("GET", "/api/v1/stock/:id", middleware.Auth()) + //c.setSinglePath("POST", "/api/v1/stock/list", middleware.Auth()) +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/routers/strong_router.go b/_13_sponge-dtm-cache/grpc+http/internal/routers/strong_router.go new file mode 100644 index 0000000..58ad50c --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/routers/strong_router.go @@ -0,0 +1,68 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "context" + + "github.com/gin-gonic/gin" + "google.golang.org/grpc/metadata" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/logger" + + stockV1 "stock/api/stock/v1" + "stock/internal/handler" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + strongMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + strongRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewStrongHandler()) + }) +} + +func strongRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService stockV1.StrongLogicer) { + ctxFn := func(c *gin.Context) context.Context { + md := metadata.New(map[string]string{ + middleware.ContextRequestIDKey: middleware.GCtxRequestID(c), + }) + return metadata.NewIncomingContext(c.Request.Context(), md) + } + stockV1.RegisterStrongRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + stockV1.WithStrongLogger(logger.Get()), + stockV1.WithStrongRPCResponse(), + stockV1.WithStrongWrapCtx(ctxFn), + stockV1.WithStrongErrorToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable + // example: + // ecode.Forbidden, ecode.LimitExceed, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func strongMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/strong/:id will take effect + // c.setGroupPath("/api/v1/strong", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("PUT", "/api/v1/stock/:id/strong", middleware.Auth()) + //c.setSinglePath("GET", "/api/v1/stock/:id/strong", middleware.Auth()) +} diff --git a/b_sponge-dtm-msg/internal/rpcclient/dtmservice.go b/_13_sponge-dtm-cache/grpc+http/internal/rpcclient/dtmservice.go similarity index 95% rename from b_sponge-dtm-msg/internal/rpcclient/dtmservice.go rename to _13_sponge-dtm-cache/grpc+http/internal/rpcclient/dtmservice.go index 1df1e40..e421123 100644 --- a/b_sponge-dtm-msg/internal/rpcclient/dtmservice.go +++ b/_13_sponge-dtm-cache/grpc+http/internal/rpcclient/dtmservice.go @@ -19,7 +19,7 @@ import ( "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" - "transfer/internal/config" + "stock/internal/config" ) var ( @@ -41,7 +41,7 @@ func InitDtmServerResolver() { } if grpcClientCfg.Name == "" { panic(fmt.Sprintf("not found grpc service name '%v' in configuration file(yaml), "+ - "please add gprc service configuration in the configuration file(yaml) under the field grpcClient.", serverName)) + "please add gprc service configuration in the configuration file(yaml) under the field grpcClient", serverName)) } var ( @@ -95,7 +95,7 @@ func InitDtmServerResolver() { builder := discovery.NewBuilder(iDiscovery, discovery.WithInsecure(true), discovery.DisableDebugLog()) resolver.Register(builder) } else { - logger.Infof("using address connect directly, endpoint = %s", dtmServerEndPoint) + logger.Infof("using IP address connect directly, endpoint = %s", dtmServerEndPoint) } } diff --git a/_13_sponge-dtm-cache/grpc+http/internal/rpcclient/endpointForDtm.go b/_13_sponge-dtm-cache/grpc+http/internal/rpcclient/endpointForDtm.go new file mode 100644 index 0000000..a14ec32 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/rpcclient/endpointForDtm.go @@ -0,0 +1,74 @@ +package rpcclient + +import ( + "fmt" + "strings" + "sync" + + "github.com/zhufuyi/sponge/pkg/logger" + + "stock/internal/config" +) + +var ( + stockEndPoint string + stockOnce sync.Once +) + +// get endpoint for dtm service +func getEndpoint(serverName string) (string, error) { + cfg := config.Get() + + var grpcClientCfg config.GrpcClient + for _, cli := range cfg.GrpcClient { + if strings.EqualFold(cli.Name, serverName) { + grpcClientCfg = cli + break + } + } + if grpcClientCfg.Name == "" { + return "", fmt.Errorf("not found grpc service name '%v' in configuration file(yaml), "+ + "please add gprc service configuration in the configuration file(yaml) under the field grpcClient", serverName) + } + + var endpoint string + var isUseDiscover bool + switch grpcClientCfg.RegistryDiscoveryType { + case "consul", "etcd": + endpoint = "discovery:///" + grpcClientCfg.Name + isUseDiscover = true + case "nacos": + endpoint = "discovery:///" + grpcClientCfg.Name + ".grpc" + isUseDiscover = true + default: + endpoint = fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + } + + if isUseDiscover { + logger.Infof("[dtm] connects to the [%s] service through service discovery, type = %s, endpoint = %s", serverName, grpcClientCfg.RegistryDiscoveryType, endpoint) + } else { + logger.Infof("[dtm] connects to the [%s] service through IP address, endpoint = %s", serverName, endpoint) + } + + return endpoint, nil +} + +// InitEndpointsForDtm init endpoints for dtm service +func InitEndpointsForDtm() { + GetStockEndpoint() +} + +// GetStockEndpoint get stock endpoint +func GetStockEndpoint() string { + if stockEndPoint == "" { + stockOnce.Do(func() { + endpoint, err := getEndpoint("stock") + if err != nil { + panic(err) + } + stockEndPoint = endpoint + }) + } + + return stockEndPoint +} diff --git a/a_micro-grpc-http-protobuf/internal/server/grpc.go b/_13_sponge-dtm-cache/grpc+http/internal/server/grpc.go similarity index 99% rename from a_micro-grpc-http-protobuf/internal/server/grpc.go rename to _13_sponge-dtm-cache/grpc+http/internal/server/grpc.go index 9172c94..d1d2b1a 100644 --- a/a_micro-grpc-http-protobuf/internal/server/grpc.go +++ b/_13_sponge-dtm-cache/grpc+http/internal/server/grpc.go @@ -22,9 +22,9 @@ import ( "github.com/zhufuyi/sponge/pkg/prof" "github.com/zhufuyi/sponge/pkg/servicerd/registry" - "user/internal/config" - "user/internal/ecode" - "user/internal/service" + "stock/internal/config" + "stock/internal/ecode" + "stock/internal/service" ) var _ app.IServer = (*grpcServer)(nil) diff --git a/_13_sponge-dtm-cache/grpc+http/internal/server/grpc_option.go b/_13_sponge-dtm-cache/grpc+http/internal/server/grpc_option.go new file mode 100644 index 0000000..02c37d7 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/server/grpc_option.go @@ -0,0 +1,34 @@ +package server + +import ( + "github.com/zhufuyi/sponge/pkg/servicerd/registry" +) + +// GrpcOption grpc settings +type GrpcOption func(*grpcOptions) + +type grpcOptions struct { + instance *registry.ServiceInstance + iRegistry registry.Registry +} + +func defaultGrpcOptions() *grpcOptions { + return &grpcOptions{ + instance: nil, + iRegistry: nil, + } +} + +func (o *grpcOptions) apply(opts ...GrpcOption) { + for _, opt := range opts { + opt(o) + } +} + +// WithGrpcRegistry registration services +func WithGrpcRegistry(iRegistry registry.Registry, instance *registry.ServiceInstance) GrpcOption { + return func(o *grpcOptions) { + o.iRegistry = iRegistry + o.instance = instance + } +} diff --git a/a_micro-grpc-http-protobuf/internal/server/http.go b/_13_sponge-dtm-cache/grpc+http/internal/server/http.go similarity index 98% rename from a_micro-grpc-http-protobuf/internal/server/http.go rename to _13_sponge-dtm-cache/grpc+http/internal/server/http.go index 62eee0b..8151dd4 100644 --- a/a_micro-grpc-http-protobuf/internal/server/http.go +++ b/_13_sponge-dtm-cache/grpc+http/internal/server/http.go @@ -11,7 +11,7 @@ import ( "github.com/zhufuyi/sponge/pkg/app" "github.com/zhufuyi/sponge/pkg/servicerd/registry" - "user/internal/routers" + "stock/internal/routers" ) var _ app.IServer = (*httpServer)(nil) diff --git a/_13_sponge-dtm-cache/grpc+http/internal/server/http_option.go b/_13_sponge-dtm-cache/grpc+http/internal/server/http_option.go new file mode 100644 index 0000000..3704401 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/server/http_option.go @@ -0,0 +1,43 @@ +package server + +import ( + "github.com/zhufuyi/sponge/pkg/servicerd/registry" +) + +// HTTPOption setting up http +type HTTPOption func(*httpOptions) + +type httpOptions struct { + isProd bool + instance *registry.ServiceInstance + iRegistry registry.Registry +} + +func defaultHTTPOptions() *httpOptions { + return &httpOptions{ + isProd: false, + instance: nil, + iRegistry: nil, + } +} + +func (o *httpOptions) apply(opts ...HTTPOption) { + for _, opt := range opts { + opt(o) + } +} + +// WithHTTPIsProd setting up production environment markers +func WithHTTPIsProd(isProd bool) HTTPOption { + return func(o *httpOptions) { + o.isProd = isProd + } +} + +// WithHTTPRegistry registration services +func WithHTTPRegistry(iRegistry registry.Registry, instance *registry.ServiceInstance) HTTPOption { + return func(o *httpOptions) { + o.iRegistry = iRegistry + o.instance = instance + } +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/atomic.go b/_13_sponge-dtm-cache/grpc+http/internal/service/atomic.go new file mode 100644 index 0000000..ad220f9 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/atomic.go @@ -0,0 +1,116 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package service + +import ( + "context" + "database/sql" + "errors" + "time" + + "github.com/dtm-labs/client/dtmcli" + "github.com/dtm-labs/client/dtmgrpc" + "github.com/dtm-labs/rockscache" + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + + stockV1 "stock/api/stock/v1" + "stock/internal/dao" + "stock/internal/ecode" + "stock/internal/model" + "stock/internal/rpcclient" +) + +func init() { + registerFns = append(registerFns, func(server *grpc.Server) { + stockV1.RegisterAtomicServer(server, NewAtomicServer()) + }) +} + +var _ stockV1.AtomicServer = (*atomic)(nil) + +type atomic struct { + stockV1.UnimplementedAtomicServer + + db *sql.DB + cacheClient *rockscache.Client +} + +// NewAtomicServer create a server +func NewAtomicServer() stockV1.AtomicServer { + return &atomic{ + db: model.GetSDB(), + cacheClient: model.GetRockscacheClient(), + } +} + +// Update 更新数据,保证DB与缓存操作的原子性。 +func (s *atomic) Update(ctx context.Context, req *stockV1.UpdateAtomicRequest) (*stockV1.UpdateAtomicRequestReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.ToRPCErr() + } + ctx = interceptor.WrapServerCtx(ctx) + + gid := newGid() + deleteCacheURL := rpcclient.GetStockEndpoint() + stockV1.Callback_DeleteCache_FullMethodName + queryPreparedURL := rpcclient.GetStockEndpoint() + stockV1.Callback_QueryPrepared_FullMethodName + deleteCacheReq := &stockV1.DeleteCacheRequest{ + Key: getStockCacheKey(req.Id), + } + stockObj := &model.Stock{ + ID: req.Id, + Stock: uint(req.Stock), + } + headers := map[string]string{interceptor.ContextRequestIDKey: interceptor.ServerCtxRequestID(ctx)} + + // 创建二阶段消息事务 + msg := dtmgrpc.NewMsgGrpc(rpcclient.GetDtmEndpoint(), gid, dtmgrpc.WithBranchHeaders(headers)) + msg.Add(deleteCacheURL, deleteCacheReq) + msg.TimeoutToFail = 3 + err = msg.DoAndSubmit(queryPreparedURL, func(bb *dtmcli.BranchBarrier) error { + return bb.CallWithDB(s.db, func(tx *sql.Tx) error { + return dao.UpdateStockInTx(tx, stockObj) + }) + }) + if err != nil { + logger.Warn("msg.DoAndSubmit error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + logger.Info("更新数据,DB与缓存操作的原子性", logger.Any("dtm gid", gid)) + + return &stockV1.UpdateAtomicRequestReply{}, nil +} + +// Query 查询 +func (s *atomic) Query(ctx context.Context, req *stockV1.QueryAtomicRequest) (*stockV1.QueryAtomicReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + key := getStockCacheKey(req.Id) + queryFn := func() (string, error) { + return dao.GetStockByID(s.db, req.Id) + } + + value, err := s.cacheClient.Fetch(key, 300*time.Second, queryFn) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ecode.StatusNotFound.Err() + } + logger.Warn("s.strongCacheClient.Fetch error", logger.Err(err), logger.String("key", key), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + return &stockV1.QueryAtomicReply{ + Stock: utils.StrToUint32(value), + }, nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/atomic_client_test.go b/_13_sponge-dtm-cache/grpc+http/internal/service/atomic_client_test.go new file mode 100644 index 0000000..2e066b1 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/atomic_client_test.go @@ -0,0 +1,145 @@ +// Code generated by https://github.com/zhufuyi/sponge +// Test_service_atomic_methods is used to test the atomic api +// Test_service_atomic_benchmark is used to performance test the atomic api + +package service + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/zhufuyi/sponge/pkg/grpc/benchmark" + + stockV1 "stock/api/stock/v1" + "stock/configs" + "stock/internal/config" +) + +// Test service atomic api via grpc client +func Test_service_atomic_methods(t *testing.T) { + conn := getRPCClientConnForTest() + cli := stockV1.NewAtomicClient(conn) + ctx, _ := context.WithTimeout(context.Background(), time.Second*30) + + tests := []struct { + name string + fn func() (interface{}, error) + wantErr bool + }{ + + { + name: "Update", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.UpdateAtomicRequest{ + Id: 1, + Stock: 100, // 库存数量 + } + + return cli.Update(ctx, req) + }, + wantErr: false, + }, + + { + name: "Query", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.QueryAtomicRequest{ + Id: 1, + } + + return cli.Query(ctx, req) + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + data, _ := json.MarshalIndent(got, "", " ") + fmt.Println(string(data)) + }) + } +} + +// performance test service atomic api, copy the report to +// the browser to view when the pressure test is finished. +func Test_service_atomic_benchmark(t *testing.T) { + err := config.Init(configs.Path("stock.yml")) + if err != nil { + panic(err) + } + + grpcClientCfg := getGRPCClientCfg() + host := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + protoFile := configs.Path("../api/stock/v1/atomic.proto") + // If third-party dependencies are missing during the press test, + // copy them to the project's third_party directory. + dependentProtoFilePath := []string{ + configs.Path("../third_party"), // third_party directory + configs.Path(".."), // Previous level of third_party + } + + tests := []struct { + name string + fn func() error + wantErr bool + }{ + + { + name: "Update", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.UpdateAtomicRequest{ + Id: 0, + Stock: 0, // 库存数量 + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "Update", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + + { + name: "Query", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.QueryAtomicRequest{ + Id: 0, + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "Query", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + }) + } +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/callback.go b/_13_sponge-dtm-cache/grpc+http/internal/service/callback.go new file mode 100644 index 0000000..cdd6676 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/callback.go @@ -0,0 +1,101 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package service + +import ( + "context" + "database/sql" + "errors" + "github.com/dtm-labs/client/dtmcli" + "github.com/dtm-labs/client/dtmgrpc" + "github.com/dtm-labs/rockscache" + "github.com/go-redis/redis/v8" + "google.golang.org/grpc" + "stock/pkg/goredis" + "strconv" + + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/krand" + "github.com/zhufuyi/sponge/pkg/logger" + + stockV1 "stock/api/stock/v1" + "stock/internal/ecode" + "stock/internal/model" +) + +func init() { + registerFns = append(registerFns, func(server *grpc.Server) { + stockV1.RegisterCallbackServer(server, NewCallbackServer()) + }) +} + +var _ stockV1.CallbackServer = (*callback)(nil) + +type callback struct { + stockV1.UnimplementedCallbackServer + + db *sql.DB + rdb *redis.Client + cacheClient *rockscache.Client +} + +// NewCallbackServer create a server +func NewCallbackServer() stockV1.CallbackServer { + return &callback{ + db: model.GetSDB(), + rdb: goredis.GetRedisCli(), + cacheClient: model.GetRockscacheClient(), + } +} + +// QueryPrepared 反查数据,注:这是dtm回调,只返回nil、codes.Internal、codes.Aborted错误码 +func (s *callback) QueryPrepared(ctx context.Context, req *stockV1.QueryPreparedRequest) (*stockV1.QueryPreparedReply, error) { + ctx = interceptor.WrapServerCtx(ctx) + + bb, err := dtmgrpc.BarrierFromGrpc(ctx) + if err != nil { + logger.Warn("BarrierFromQuery error", logger.Err(err), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + err = bb.RedisQueryPrepared(s.rdb, 7*86400) + if err != nil { + logger.Warn("RedisQueryPrepared error", logger.Err(err), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + return &stockV1.QueryPreparedReply{}, adaptErr(err) +} + +// DeleteCache 删除缓存,注:这是dtm回调,只返回nil、codes.Internal、codes.Aborted错误码 +func (s *callback) DeleteCache(ctx context.Context, req *stockV1.DeleteCacheRequest) (*stockV1.DeleteCacheReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + err = s.cacheClient.TagAsDeleted(req.Key) + + return &stockV1.DeleteCacheReply{}, err +} + +func adaptErr(err error) error { + if err == nil { + return nil + } + + if errors.Is(err, dtmcli.ErrFailure) { + return ecode.StatusAborted.ToRPCErr() + } + + return ecode.StatusInternalServerError.ToRPCErr() +} + +func newGid() string { + return krand.NewSeriesID() +} + +func getStockCacheKey(id uint64) string { + return "stock:" + strconv.FormatUint(id, 10) +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/callback_client_test.go b/_13_sponge-dtm-cache/grpc+http/internal/service/callback_client_test.go new file mode 100644 index 0000000..1620d38 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/callback_client_test.go @@ -0,0 +1,139 @@ +// Code generated by https://github.com/zhufuyi/sponge +// Test_service_callback_methods is used to test the callback api +// Test_service_callback_benchmark is used to performance test the callback api + +package service + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/zhufuyi/sponge/pkg/grpc/benchmark" + + stockV1 "stock/api/stock/v1" + "stock/configs" + "stock/internal/config" +) + +// Test service callback api via grpc client +func Test_service_callback_methods(t *testing.T) { + conn := getRPCClientConnForTest() + cli := stockV1.NewCallbackClient(conn) + ctx, _ := context.WithTimeout(context.Background(), time.Second*30) + + tests := []struct { + name string + fn func() (interface{}, error) + wantErr bool + }{ + + { + name: "QueryPrepared", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.QueryPreparedRequest{} + + return cli.QueryPrepared(ctx, req) + }, + wantErr: false, + }, + + { + name: "DeleteCache", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.DeleteCacheRequest{ + Key: "", + } + + return cli.DeleteCache(ctx, req) + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + data, _ := json.MarshalIndent(got, "", " ") + fmt.Println(string(data)) + }) + } +} + +// performance test service callback api, copy the report to +// the browser to view when the pressure test is finished. +func Test_service_callback_benchmark(t *testing.T) { + err := config.Init(configs.Path("stock.yml")) + if err != nil { + panic(err) + } + + grpcClientCfg := getGRPCClientCfg() + host := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + protoFile := configs.Path("../api/stock/v1/callback.proto") + // If third-party dependencies are missing during the press test, + // copy them to the project's third_party directory. + dependentProtoFilePath := []string{ + configs.Path("../third_party"), // third_party directory + configs.Path(".."), // Previous level of third_party + } + + tests := []struct { + name string + fn func() error + wantErr bool + }{ + + { + name: "QueryPrepared", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.QueryPreparedRequest{} + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "QueryPrepared", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + + { + name: "DeleteCache", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.DeleteCacheRequest{ + Key: "", + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "DeleteCache", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + }) + } +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/downgrade.go b/_13_sponge-dtm-cache/grpc+http/internal/service/downgrade.go new file mode 100644 index 0000000..e400860 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/downgrade.go @@ -0,0 +1,157 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package service + +import ( + "context" + "database/sql" + "errors" + "github.com/dtm-labs/client/dtmgrpc" + "github.com/dtm-labs/rockscache" + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + "stock/internal/dao" + "stock/internal/ecode" + "stock/internal/model" + "stock/internal/rpcclient" + "time" + + "google.golang.org/grpc" + + //"github.com/zhufuyi/sponge/pkg/grpc/interceptor" + //"github.com/zhufuyi/sponge/pkg/logger" + + stockV1 "stock/api/stock/v1" + //"stock/internal/cache" + //"stock/internal/dao" + //"stock/internal/ecode" + //"stock/internal/model" +) + +func init() { + registerFns = append(registerFns, func(server *grpc.Server) { + stockV1.RegisterDowngradeServer(server, NewDowngradeServer()) + }) +} + +var _ stockV1.DowngradeServer = (*downgrade)(nil) + +type downgrade struct { + stockV1.UnimplementedDowngradeServer + + db *sql.DB + strongCacheClient *rockscache.Client +} + +// NewDowngradeServer create a server +func NewDowngradeServer() stockV1.DowngradeServer { + return &downgrade{ + db: model.GetSDB(), + strongCacheClient: model.GetStrongRockscacheClient(), + } +} + +// Update 更新数据,升降级中的DB和缓存强一致性 +func (s *downgrade) Update(ctx context.Context, req *stockV1.UpdateDowngradeRequest) (*stockV1.UpdateDowngradeRequestReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + gid := newGid() + downgradeBranchURL := rpcclient.GetStockEndpoint() + stockV1.Downgrade_DowngradeBranch_FullMethodName + downgradeBranchBody := &stockV1.DowngradeBranchRequest{ + Gid: gid, + Key: getStockCacheKey(req.Id), + Id: req.Id, + Stock: req.Stock, + } + headers := map[string]string{interceptor.ContextRequestIDKey: interceptor.ServerCtxRequestID(ctx)} + + saga := dtmgrpc.NewSagaGrpc(rpcclient.GetDtmEndpoint(), gid, dtmgrpc.WithBranchHeaders(headers)) + saga.Add(downgradeBranchURL, "", downgradeBranchBody) + saga.RetryInterval = 3 + err = saga.Submit() + if err != nil { + logger.Warn("saga.Submit error", logger.Err(err), logger.String("dtm gid", gid), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + logger.Info("更新数据,升降级中的DB和缓存强一致性", logger.Any("dtm gid", gid)) + + return &stockV1.UpdateDowngradeRequestReply{}, nil +} + +// Query 查询 +func (s *downgrade) Query(ctx context.Context, req *stockV1.QueryDowngradeRequest) (*stockV1.QueryDowngradeReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + key := getStockCacheKey(req.Id) + queryFn := func() (string, error) { + return dao.GetStockByID(s.db, req.Id) + } + + value, err := s.strongCacheClient.Fetch(key, 300*time.Second, queryFn) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ecode.StatusNotFound.Err() + } + logger.Warn("strongCacheClient.Fetch error", logger.Err(err), logger.String("key", key), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + return &stockV1.QueryDowngradeReply{ + Stock: utils.StrToUint32(value), + }, nil +} + +// DowngradeBranch 升降级中的强一致性分支,注:这是dtm回调,只返回nil、codes.Internal、codes.Aborted错误码 +func (s *downgrade) DowngradeBranch(ctx context.Context, req *stockV1.DowngradeBranchRequest) (*stockV1.DowngradeBranchReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + update := &model.Stock{ + ID: req.Id, + Stock: uint(req.Stock), + } + + ctx, _ = context.WithTimeout(ctx, 15*time.Second) + + err = s.strongCacheClient.LockForUpdate(ctx, req.Key, req.Gid) + if err != nil { + logger.Warn("s.strongCacheClient.LockForUpdate", logger.Err(err), logger.String("key", req.Key), logger.String("gid", req.Gid), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + bb, err := dtmgrpc.BarrierFromGrpc(ctx) + if err != nil { + return nil, adaptErr(err) + } + err = bb.CallWithDB(s.db, func(tx *sql.Tx) error { + // if business failed, user should return error dtmcli.ErrFailure + // other error will be retried + return dao.UpdateStockInTx(tx, update) + }) + if err != nil { + logger.Warn("dao.UpdateStockInTx", logger.Err(err), logger.String("key", req.Key), logger.String("gid", req.Gid), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + err = s.strongCacheClient.UnlockForUpdate(ctx, req.Key, req.Gid) + if err != nil { + logger.Warn("s.strongCacheClient.UnlockForUpdate", logger.Err(err), logger.String("key", req.Key), logger.String("gid", req.Gid), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + return &stockV1.DowngradeBranchReply{}, nil +} diff --git a/a_micro-grpc-http-protobuf/internal/service/user_client_test.go b/_13_sponge-dtm-cache/grpc+http/internal/service/downgrade_client_test.go similarity index 52% rename from a_micro-grpc-http-protobuf/internal/service/user_client_test.go rename to _13_sponge-dtm-cache/grpc+http/internal/service/downgrade_client_test.go index 44b01cf..7f2ed71 100644 --- a/a_micro-grpc-http-protobuf/internal/service/user_client_test.go +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/downgrade_client_test.go @@ -1,6 +1,6 @@ // Code generated by https://github.com/zhufuyi/sponge -// Test_service_user_methods is used to test the user api -// Test_service_user_benchmark is used to performance test the user api +// Test_service_downgrade_methods is used to test the downgrade api +// Test_service_downgrade_benchmark is used to performance test the downgrade api package service @@ -13,15 +13,15 @@ import ( "github.com/zhufuyi/sponge/pkg/grpc/benchmark" - userV1 "user/api/user/v1" - "user/configs" - "user/internal/config" + stockV1 "stock/api/stock/v1" + "stock/configs" + "stock/internal/config" ) -// Test service user api via grpc client -func Test_service_user_methods(t *testing.T) { +// Test service downgrade api via grpc client +func Test_service_downgrade_methods(t *testing.T) { conn := getRPCClientConnForTest() - cli := userV1.NewUserClient(conn) + cli := stockV1.NewDowngradeClient(conn) ctx, _ := context.WithTimeout(context.Background(), time.Second*30) tests := []struct { @@ -31,57 +31,44 @@ func Test_service_user_methods(t *testing.T) { }{ { - name: "Register", + name: "Update", fn: func() (interface{}, error) { // todo type in the parameters before testing - req := &userV1.RegisterRequest{ - Email: "", - Password: "", + req := &stockV1.UpdateDowngradeRequest{ + Id: 0, + Stock: 0, // 库存数量 } - return cli.Register(ctx, req) + return cli.Update(ctx, req) }, wantErr: false, }, { - name: "Login", + name: "Query", fn: func() (interface{}, error) { // todo type in the parameters before testing - req := &userV1.LoginRequest{ - Email: "foo@bar.com", - Password: "123456", + req := &stockV1.QueryDowngradeRequest{ + Id: 0, } - return cli.Login(ctx, req) + return cli.Query(ctx, req) }, wantErr: false, }, { - name: "Logout", + name: "DowngradeBranch", fn: func() (interface{}, error) { // todo type in the parameters before testing - req := &userV1.LogoutRequest{ + req := &stockV1.DowngradeBranchRequest{ + Gid: "", // dtm gid + Key: "", // 缓存key Id: 0, - Token: "", - } - - return cli.Logout(ctx, req) - }, - wantErr: false, - }, - - { - name: "ChangePassword", - fn: func() (interface{}, error) { - // todo type in the parameters before testing - req := &userV1.ChangePasswordRequest{ - Id: 0, - Password: "", + Stock: 0, // 库存数量 } - return cli.ChangePassword(ctx, req) + return cli.DowngradeBranch(ctx, req) }, wantErr: false, }, @@ -100,19 +87,17 @@ func Test_service_user_methods(t *testing.T) { } } -// performance test service user api, copy the report to +// performance test service downgrade api, copy the report to // the browser to view when the pressure test is finished. -func Test_service_user_benchmark(t *testing.T) { - err := config.Init(configs.Path("user.yml")) +func Test_service_downgrade_benchmark(t *testing.T) { + err := config.Init(configs.Path("stock.yml")) if err != nil { panic(err) } - if len(config.Get().GrpcClient) == 0 { - t.Error("grpcClient is not set in user.yml") - return - } - host := fmt.Sprintf("%s:%d", config.Get().GrpcClient[0].Host, config.Get().GrpcClient[0].Port) - protoFile := configs.Path("../api/user/v1/user.proto") + + grpcClientCfg := getGRPCClientCfg() + host := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + protoFile := configs.Path("../api/stock/v1/downgrade.proto") // If third-party dependencies are missing during the press test, // copy them to the project's third_party directory. dependentProtoFilePath := []string{ @@ -127,16 +112,16 @@ func Test_service_user_benchmark(t *testing.T) { }{ { - name: "Register", + name: "Update", fn: func() error { // todo type in the parameters before benchmark testing - message := &userV1.RegisterRequest{ - Email: "", - Password: "", + message := &stockV1.UpdateDowngradeRequest{ + Id: 0, + Stock: 0, // 库存数量 } total := 1000 // total number of requests - b, err := benchmark.New(host, protoFile, "Register", message, dependentProtoFilePath, total) + b, err := benchmark.New(host, protoFile, "Update", message, dependentProtoFilePath, total) if err != nil { return err } @@ -146,16 +131,15 @@ func Test_service_user_benchmark(t *testing.T) { }, { - name: "Login", + name: "Query", fn: func() error { // todo type in the parameters before benchmark testing - message := &userV1.LoginRequest{ - Email: "", - Password: "", + message := &stockV1.QueryDowngradeRequest{ + Id: 0, } total := 1000 // total number of requests - b, err := benchmark.New(host, protoFile, "Login", message, dependentProtoFilePath, total) + b, err := benchmark.New(host, protoFile, "Query", message, dependentProtoFilePath, total) if err != nil { return err } @@ -165,35 +149,18 @@ func Test_service_user_benchmark(t *testing.T) { }, { - name: "Logout", + name: "DowngradeBranch", fn: func() error { // todo type in the parameters before benchmark testing - message := &userV1.LogoutRequest{ + message := &stockV1.DowngradeBranchRequest{ + Gid: "", // dtm gid + Key: "", // 缓存key Id: 0, - Token: "", - } - total := 1000 // total number of requests - - b, err := benchmark.New(host, protoFile, "Logout", message, dependentProtoFilePath, total) - if err != nil { - return err - } - return b.Run() - }, - wantErr: false, - }, - - { - name: "ChangePassword", - fn: func() error { - // todo type in the parameters before benchmark testing - message := &userV1.ChangePasswordRequest{ - Id: 0, - Password: "", + Stock: 0, // 库存数量 } total := 1000 // total number of requests - b, err := benchmark.New(host, protoFile, "ChangePassword", message, dependentProtoFilePath, total) + b, err := benchmark.New(host, protoFile, "DowngradeBranch", message, dependentProtoFilePath, total) if err != nil { return err } diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/final.go b/_13_sponge-dtm-cache/grpc+http/internal/service/final.go new file mode 100644 index 0000000..5362317 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/final.go @@ -0,0 +1,116 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package service + +import ( + "context" + "database/sql" + "errors" + "time" + + "github.com/dtm-labs/client/dtmcli" + "github.com/dtm-labs/client/dtmgrpc" + "github.com/dtm-labs/rockscache" + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + + stockV1 "stock/api/stock/v1" + "stock/internal/dao" + "stock/internal/ecode" + "stock/internal/model" + "stock/internal/rpcclient" +) + +func init() { + registerFns = append(registerFns, func(server *grpc.Server) { + stockV1.RegisterFinalServer(server, NewFinalServer()) + }) +} + +var _ stockV1.FinalServer = (*final)(nil) + +type final struct { + stockV1.UnimplementedFinalServer + + db *sql.DB + cacheClient *rockscache.Client +} + +// NewFinalServer create a server +func NewFinalServer() stockV1.FinalServer { + return &final{ + db: model.GetSDB(), + cacheClient: model.GetRockscacheClient(), + } +} + +// Update 更新数据,DB和缓存最终一致性 +func (s *final) Update(ctx context.Context, req *stockV1.UpdateFinalRequest) (*stockV1.UpdateFinalRequestReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + gid := newGid() + deleteCacheURL := rpcclient.GetStockEndpoint() + stockV1.Callback_DeleteCache_FullMethodName + queryPreparedURL := rpcclient.GetStockEndpoint() + stockV1.Callback_QueryPrepared_FullMethodName + deleteCacheReq := &stockV1.DeleteCacheRequest{ + Key: getStockCacheKey(req.Id), + } + stockObj := &model.Stock{ + ID: req.Id, + Stock: uint(req.Stock), + } + headers := map[string]string{interceptor.ContextRequestIDKey: interceptor.ServerCtxRequestID(ctx)} + + // 创建二阶段消息事务 + msg := dtmgrpc.NewMsgGrpc(rpcclient.GetDtmEndpoint(), gid, dtmgrpc.WithBranchHeaders(headers)) + msg.Add(deleteCacheURL, deleteCacheReq) + msg.WaitResult = true // when return success, the global transaction has finished + err = msg.DoAndSubmit(queryPreparedURL, func(bb *dtmcli.BranchBarrier) error { + return bb.CallWithDB(s.db, func(tx *sql.Tx) error { + return dao.UpdateStockInTx(tx, stockObj) + }) + }) + if err != nil { + logger.Warn("msg.DoAndSubmit error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + logger.Info("更新数据,DB和缓存最终一致性", logger.Any("dtm gid", gid)) + + return &stockV1.UpdateFinalRequestReply{}, nil +} + +// Query 查询 +func (s *final) Query(ctx context.Context, req *stockV1.QueryFinalRequest) (*stockV1.QueryFinalReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + key := getStockCacheKey(req.Id) + queryFn := func() (string, error) { + return dao.GetStockByID(s.db, req.Id) + } + + value, err := s.cacheClient.Fetch(key, 300*time.Second, queryFn) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ecode.StatusNotFound.Err() + } + logger.Warn("s.strongCacheClient.Fetch error", logger.Err(err), logger.String("key", key), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + return &stockV1.QueryFinalReply{ + Stock: utils.StrToUint32(value), + }, nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/final_client_test.go b/_13_sponge-dtm-cache/grpc+http/internal/service/final_client_test.go new file mode 100644 index 0000000..fe53033 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/final_client_test.go @@ -0,0 +1,145 @@ +// Code generated by https://github.com/zhufuyi/sponge +// Test_service_final_methods is used to test the final api +// Test_service_final_benchmark is used to performance test the final api + +package service + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/zhufuyi/sponge/pkg/grpc/benchmark" + + stockV1 "stock/api/stock/v1" + "stock/configs" + "stock/internal/config" +) + +// Test service final api via grpc client +func Test_service_final_methods(t *testing.T) { + conn := getRPCClientConnForTest() + cli := stockV1.NewFinalClient(conn) + ctx, _ := context.WithTimeout(context.Background(), time.Second*30) + + tests := []struct { + name string + fn func() (interface{}, error) + wantErr bool + }{ + + { + name: "Update", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.UpdateFinalRequest{ + Id: 0, + Stock: 0, // 库存数量 + } + + return cli.Update(ctx, req) + }, + wantErr: false, + }, + + { + name: "Query", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.QueryFinalRequest{ + Id: 0, + } + + return cli.Query(ctx, req) + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + data, _ := json.MarshalIndent(got, "", " ") + fmt.Println(string(data)) + }) + } +} + +// performance test service final api, copy the report to +// the browser to view when the pressure test is finished. +func Test_service_final_benchmark(t *testing.T) { + err := config.Init(configs.Path("stock.yml")) + if err != nil { + panic(err) + } + + grpcClientCfg := getGRPCClientCfg() + host := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + protoFile := configs.Path("../api/stock/v1/final.proto") + // If third-party dependencies are missing during the press test, + // copy them to the project's third_party directory. + dependentProtoFilePath := []string{ + configs.Path("../third_party"), // third_party directory + configs.Path(".."), // Previous level of third_party + } + + tests := []struct { + name string + fn func() error + wantErr bool + }{ + + { + name: "Update", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.UpdateFinalRequest{ + Id: 0, + Stock: 0, // 库存数量 + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "Update", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + + { + name: "Query", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.QueryFinalRequest{ + Id: 0, + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "Query", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + }) + } +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/service.go b/_13_sponge-dtm-cache/grpc+http/internal/service/service.go new file mode 100644 index 0000000..83c3ce5 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/service.go @@ -0,0 +1,22 @@ +// Package service A grpc server-side or client-side package that handles business logic. +package service + +import ( + "google.golang.org/grpc" + "google.golang.org/grpc/health" + healthPB "google.golang.org/grpc/health/grpc_health_v1" +) + +var ( + // registerFns collection of registration methods + registerFns []func(server *grpc.Server) +) + +// RegisterAllService register all services to the service +func RegisterAllService(server *grpc.Server) { + healthPB.RegisterHealthServer(server, health.NewServer()) // Register for Health Screening + + for _, fn := range registerFns { + fn(server) + } +} diff --git a/a_micro-grpc-http-protobuf/internal/service/service_test.go b/_13_sponge-dtm-cache/grpc+http/internal/service/service_test.go similarity index 76% rename from a_micro-grpc-http-protobuf/internal/service/service_test.go rename to _13_sponge-dtm-cache/grpc+http/internal/service/service_test.go index 097b83a..dbce1df 100644 --- a/a_micro-grpc-http-protobuf/internal/service/service_test.go +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/service_test.go @@ -20,8 +20,8 @@ import ( "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" "github.com/zhufuyi/sponge/pkg/utils" - "user/configs" - "user/internal/config" + "stock/configs" + "stock/internal/config" ) var ioEOF = io.EOF @@ -37,32 +37,11 @@ func TestRegisterAllService(t *testing.T) { // The default is to connect to the local grpc server, if you want to connect to a remote grpc server, // pass in the parameter grpcClient. func getRPCClientConnForTest(grpcClient ...config.GrpcClient) *grpc.ClientConn { - err := config.Init(configs.Path("user.yml")) + err := config.Init(configs.Path("stock.yml")) if err != nil { panic(err) } - var grpcClientCfg config.GrpcClient - - if len(grpcClient) == 0 { - // the default priority is to use the GrpcClient's 0th grpc server address - if len(config.Get().GrpcClient) > 0 { - grpcClientCfg = config.Get().GrpcClient[0] - } else { - grpcClientCfg = config.GrpcClient{ - Host: config.Get().App.Host, - Port: config.Get().Grpc.Port, - // If RegistryDiscoveryType is not empty, service discovery is used, and Host and Port values are invalid - RegistryDiscoveryType: config.Get().App.RegistryDiscoveryType, // supports consul, etcd and nacos - Name: config.Get().App.Name, - } - if grpcClientCfg.RegistryDiscoveryType != "" { - grpcClientCfg.EnableLoadBalance = true - } - } - } else { - // custom config - grpcClientCfg = grpcClient[0] - } + grpcClientCfg := getGRPCClientCfg(grpcClient...) var cliOptions []grpccli.Option @@ -156,3 +135,39 @@ func getRPCClientConnForTest(grpcClient ...config.GrpcClient) *grpc.ClientConn { return conn } + +func getGRPCClientCfg(grpcClient ...config.GrpcClient) config.GrpcClient { + var grpcClientCfg config.GrpcClient + + // custom config + if len(grpcClient) > 0 { + // parameter config, highest priority + grpcClientCfg = grpcClient[0] + } else { + // grpcClient config in the yaml file, second priority + if len(config.Get().GrpcClient) > 0 { + for _, v := range config.Get().GrpcClient { + if v.Name == config.Get().App.Name { // match the current app name + grpcClientCfg = v + break + } + } + } + } + + // if there is no custom configuration, use the default configuration + if grpcClientCfg.Name == "" { + grpcClientCfg = config.GrpcClient{ + Host: config.Get().App.Host, + Port: config.Get().Grpc.Port, + // If RegistryDiscoveryType is not empty, service discovery is used, and Host and Port values are invalid + RegistryDiscoveryType: config.Get().App.RegistryDiscoveryType, // supports consul, etcd and nacos + Name: config.Get().App.Name, + } + if grpcClientCfg.RegistryDiscoveryType != "" { + grpcClientCfg.EnableLoadBalance = true + } + } + + return grpcClientCfg +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/stock.go b/_13_sponge-dtm-cache/grpc+http/internal/service/stock.go new file mode 100644 index 0000000..1170b97 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/stock.go @@ -0,0 +1,200 @@ +package service + +import ( + "context" + "errors" + "strings" + "time" + + "github.com/jinzhu/copier" + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/ggorm/query" + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + + stockV1 "stock/api/stock/v1" + "stock/internal/cache" + "stock/internal/dao" + "stock/internal/ecode" + "stock/internal/model" +) + +func init() { + registerFns = append(registerFns, func(server *grpc.Server) { + stockV1.RegisterStockServer(server, NewStockServer()) // register service to the rpc service + }) +} + +var _ stockV1.StockServer = (*stock)(nil) +var _ time.Time + +type stock struct { + stockV1.UnimplementedStockServer + + iDao dao.StockDao +} + +// NewStockServer create a new service +func NewStockServer() stockV1.StockServer { + return &stock{ + iDao: dao.NewStockDao( + model.GetDB(), + cache.NewStockCache(model.GetCacheType()), + ), + } +} + +// Create a record +func (s *stock) Create(ctx context.Context, req *stockV1.CreateStockRequest) (*stockV1.CreateStockReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + record := &model.Stock{} + err = copier.Copy(record, req) + if err != nil { + return nil, ecode.StatusCreateStock.Err() + } + // Note: if copier.Copy cannot assign a value to a field, add it here + + err = s.iDao.Create(ctx, record) + if err != nil { + logger.Error("Create error", logger.Err(err), logger.Any("stock", record), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInternalServerError.ToRPCErr() + } + + return &stockV1.CreateStockReply{Id: record.ID}, nil +} + +// DeleteByID delete a record by id +func (s *stock) DeleteByID(ctx context.Context, req *stockV1.DeleteStockByIDRequest) (*stockV1.DeleteStockByIDReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + err = s.iDao.DeleteByID(ctx, req.Id) + if err != nil { + logger.Error("DeleteByID error", logger.Err(err), logger.Any("id", req.Id), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInternalServerError.ToRPCErr() + } + + return &stockV1.DeleteStockByIDReply{}, nil +} + +// UpdateByID update a record by id +func (s *stock) UpdateByID(ctx context.Context, req *stockV1.UpdateStockByIDRequest) (*stockV1.UpdateStockByIDReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + record := &model.Stock{} + err = copier.Copy(record, req) + if err != nil { + return nil, ecode.StatusUpdateByIDStock.Err() + } + // Note: if copier.Copy cannot assign a value to a field, add it here + record.ID = req.Id + + err = s.iDao.UpdateByID(ctx, record) + if err != nil { + logger.Error("UpdateByID error", logger.Err(err), logger.Any("stock", record), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInternalServerError.ToRPCErr() + } + + return &stockV1.UpdateStockByIDReply{}, nil +} + +// GetByID get a record by id +func (s *stock) GetByID(ctx context.Context, req *stockV1.GetStockByIDRequest) (*stockV1.GetStockByIDReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + record, err := s.iDao.GetByID(ctx, req.Id) + if err != nil { + if errors.Is(err, model.ErrRecordNotFound) { + logger.Warn("GetByID error", logger.Err(err), logger.Any("id", req.Id), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusNotFound.Err() + } + logger.Error("GetByID error", logger.Err(err), logger.Any("id", req.Id), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInternalServerError.ToRPCErr() + } + + data, err := convertStock(record) + if err != nil { + logger.Warn("convertStock error", logger.Err(err), logger.Any("stock", record), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusGetByIDStock.Err() + } + + return &stockV1.GetStockByIDReply{Stock: data}, nil +} + +// List of records by query parameters +func (s *stock) List(ctx context.Context, req *stockV1.ListStockRequest) (*stockV1.ListStockReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + params := &query.Params{} + err = copier.Copy(params, req.Params) + if err != nil { + return nil, ecode.StatusListStock.Err() + } + // Note: if copier.Copy cannot assign a value to a field, add it here + + records, total, err := s.iDao.GetByColumns(ctx, params) + if err != nil { + if strings.Contains(err.Error(), "query params error:") { + logger.Warn("GetByColumns error", logger.Err(err), logger.Any("params", params), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + logger.Error("GetByColumns error", logger.Err(err), logger.Any("params", params), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInternalServerError.ToRPCErr() + } + + stocks := []*stockV1.Stock{} + for _, record := range records { + data, err := convertStock(record) + if err != nil { + logger.Warn("convertStock error", logger.Err(err), logger.Any("id", record.ID), interceptor.ServerCtxRequestIDField(ctx)) + continue + } + stocks = append(stocks, data) + } + + return &stockV1.ListStockReply{ + Total: total, + Stocks: stocks, + }, nil +} + +func convertStock(record *model.Stock) (*stockV1.Stock, error) { + value := &stockV1.Stock{} + err := copier.Copy(value, record) + if err != nil { + return nil, err + } + // Note: if copier.Copy cannot assign a value to a field, add it here, e.g. CreatedAt, UpdatedAt + value.Id = record.ID + value.CreatedAt = utils.FormatDateTimeRFC3339(*record.CreatedAt) + value.UpdatedAt = utils.FormatDateTimeRFC3339(*record.UpdatedAt) + + return value, nil +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/stock_client_test.go b/_13_sponge-dtm-cache/grpc+http/internal/service/stock_client_test.go new file mode 100644 index 0000000..eb9806b --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/stock_client_test.go @@ -0,0 +1,242 @@ +// Code generated by https://github.com/zhufuyi/sponge +// Test_service_stock_methods is used to test the stock api +// Test_service_stock_benchmark is used to performance test the stock api + +package service + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/zhufuyi/sponge/pkg/grpc/benchmark" + + stockV1 "stock/api/stock/v1" + "stock/configs" + "stock/internal/config" +) + +// Test service stock api via grpc client +func Test_service_stock_methods(t *testing.T) { + conn := getRPCClientConnForTest() + cli := stockV1.NewStockClient(conn) + ctx, _ := context.WithTimeout(context.Background(), time.Second*30) + + tests := []struct { + name string + fn func() (interface{}, error) + wantErr bool + }{ + + { + name: "Create", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.CreateStockRequest{ + ProductID: 0, // 商品id + Stock: 0, // 库存 + } + + return cli.Create(ctx, req) + }, + wantErr: false, + }, + + { + name: "DeleteByID", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.DeleteStockByIDRequest{ + Id: 0, + } + + return cli.DeleteByID(ctx, req) + }, + wantErr: false, + }, + + { + name: "UpdateByID", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.UpdateStockByIDRequest{ + Id: 0, + ProductID: 0, // 商品id + Stock: 0, // 库存 + } + + return cli.UpdateByID(ctx, req) + }, + wantErr: false, + }, + + { + name: "GetByID", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.GetStockByIDRequest{ + Id: 0, + } + + return cli.GetByID(ctx, req) + }, + wantErr: false, + }, + + { + name: "List", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.ListStockRequest{ + Params: nil, + } + + return cli.List(ctx, req) + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + data, _ := json.MarshalIndent(got, "", " ") + fmt.Println(string(data)) + }) + } +} + +// performance test service stock api, copy the report to +// the browser to view when the pressure test is finished. +func Test_service_stock_benchmark(t *testing.T) { + err := config.Init(configs.Path("stock.yml")) + if err != nil { + panic(err) + } + + grpcClientCfg := getGRPCClientCfg() + host := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + protoFile := configs.Path("../api/stock/v1/stock.proto") + // If third-party dependencies are missing during the press test, + // copy them to the project's third_party directory. + dependentProtoFilePath := []string{ + configs.Path("../third_party"), // third_party directory + configs.Path(".."), // Previous level of third_party + } + + tests := []struct { + name string + fn func() error + wantErr bool + }{ + + { + name: "Create", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.CreateStockRequest{ + ProductID: 0, // 商品id + Stock: 0, // 库存 + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "Create", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + + { + name: "DeleteByID", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.DeleteStockByIDRequest{ + Id: 0, + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "DeleteByID", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + + { + name: "UpdateByID", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.UpdateStockByIDRequest{ + Id: 0, + ProductID: 0, // 商品id + Stock: 0, // 库存 + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "UpdateByID", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + + { + name: "GetByID", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.GetStockByIDRequest{ + Id: 0, + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "GetByID", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + + { + name: "List", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.ListStockRequest{ + Params: nil, + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "List", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + }) + } +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/strong.go b/_13_sponge-dtm-cache/grpc+http/internal/service/strong.go new file mode 100644 index 0000000..c7724c8 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/strong.go @@ -0,0 +1,116 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package service + +import ( + "context" + "database/sql" + "errors" + "time" + + "github.com/dtm-labs/client/dtmcli" + "github.com/dtm-labs/client/dtmgrpc" + "github.com/dtm-labs/rockscache" + "google.golang.org/grpc" + + "github.com/zhufuyi/sponge/pkg/grpc/interceptor" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + + stockV1 "stock/api/stock/v1" + "stock/internal/dao" + "stock/internal/ecode" + "stock/internal/model" + "stock/internal/rpcclient" +) + +func init() { + registerFns = append(registerFns, func(server *grpc.Server) { + stockV1.RegisterStrongServer(server, NewStrongServer()) + }) +} + +var _ stockV1.StrongServer = (*strong)(nil) + +type strong struct { + stockV1.UnimplementedStrongServer + + db *sql.DB + strongCacheClient *rockscache.Client +} + +// NewStrongServer create a server +func NewStrongServer() stockV1.StrongServer { + return &strong{ + db: model.GetSDB(), + strongCacheClient: model.GetStrongRockscacheClient(), + } +} + +// Update 更新数据,DB和缓存强一致性 +func (s *strong) Update(ctx context.Context, req *stockV1.UpdateStrongRequest) (*stockV1.UpdateStrongRequestReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + gid := newGid() + queryPreparedURL := rpcclient.GetStockEndpoint() + stockV1.Callback_QueryPrepared_FullMethodName + deleteCacheURL := rpcclient.GetStockEndpoint() + stockV1.Callback_DeleteCache_FullMethodName + deleteCacheReq := &stockV1.DeleteCacheRequest{ + Key: getStockCacheKey(req.Id), + } + stockObj := &model.Stock{ + ID: req.Id, + Stock: uint(req.Stock), + } + headers := map[string]string{interceptor.ContextRequestIDKey: interceptor.ServerCtxRequestID(ctx)} + + // 创建二阶段消息事务 + msg := dtmgrpc.NewMsgGrpc(rpcclient.GetDtmEndpoint(), gid, dtmgrpc.WithBranchHeaders(headers)) + msg.Add(deleteCacheURL, deleteCacheReq) + msg.WaitResult = false // when return success, the global transaction has finished + err = msg.DoAndSubmit(queryPreparedURL, func(bb *dtmcli.BranchBarrier) error { + return bb.CallWithDB(s.db, func(tx *sql.Tx) error { + return dao.UpdateStockInTx(tx, stockObj) + }) + }) + if err != nil { + logger.Warn("DoAndSubmit error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + logger.Info("更新数据,DB和缓存强一致性", logger.Any("dtm gid", gid)) + + return &stockV1.UpdateStrongRequestReply{}, nil +} + +// Query 查询 +func (s *strong) Query(ctx context.Context, req *stockV1.QueryStrongRequest) (*stockV1.QueryStrongReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) + return nil, ecode.StatusInvalidParams.Err() + } + ctx = interceptor.WrapServerCtx(ctx) + + key := getStockCacheKey(req.Id) + queryFn := func() (string, error) { + return dao.GetStockByID(s.db, req.Id) + } + + value, err := s.strongCacheClient.Fetch(key, 300*time.Second, queryFn) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ecode.StatusNotFound.Err() + } + logger.Warn("s.strongCacheClient.Fetch error", logger.Err(err), logger.String("key", key), interceptor.ServerCtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + return &stockV1.QueryStrongReply{ + Stock: utils.StrToUint32(value), + }, err +} diff --git a/_13_sponge-dtm-cache/grpc+http/internal/service/strong_client_test.go b/_13_sponge-dtm-cache/grpc+http/internal/service/strong_client_test.go new file mode 100644 index 0000000..cd83c1a --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/internal/service/strong_client_test.go @@ -0,0 +1,145 @@ +// Code generated by https://github.com/zhufuyi/sponge +// Test_service_strong_methods is used to test the strong api +// Test_service_strong_benchmark is used to performance test the strong api + +package service + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/zhufuyi/sponge/pkg/grpc/benchmark" + + stockV1 "stock/api/stock/v1" + "stock/configs" + "stock/internal/config" +) + +// Test service strong api via grpc client +func Test_service_strong_methods(t *testing.T) { + conn := getRPCClientConnForTest() + cli := stockV1.NewStrongClient(conn) + ctx, _ := context.WithTimeout(context.Background(), time.Second*30) + + tests := []struct { + name string + fn func() (interface{}, error) + wantErr bool + }{ + + { + name: "Update", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.UpdateStrongRequest{ + Id: 0, + Stock: 0, // 库存数量 + } + + return cli.Update(ctx, req) + }, + wantErr: false, + }, + + { + name: "Query", + fn: func() (interface{}, error) { + // todo type in the parameters before testing + req := &stockV1.QueryStrongRequest{ + Id: 0, + } + + return cli.Query(ctx, req) + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + data, _ := json.MarshalIndent(got, "", " ") + fmt.Println(string(data)) + }) + } +} + +// performance test service strong api, copy the report to +// the browser to view when the pressure test is finished. +func Test_service_strong_benchmark(t *testing.T) { + err := config.Init(configs.Path("stock.yml")) + if err != nil { + panic(err) + } + + grpcClientCfg := getGRPCClientCfg() + host := fmt.Sprintf("%s:%d", grpcClientCfg.Host, grpcClientCfg.Port) + protoFile := configs.Path("../api/stock/v1/strong.proto") + // If third-party dependencies are missing during the press test, + // copy them to the project's third_party directory. + dependentProtoFilePath := []string{ + configs.Path("../third_party"), // third_party directory + configs.Path(".."), // Previous level of third_party + } + + tests := []struct { + name string + fn func() error + wantErr bool + }{ + + { + name: "Update", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.UpdateStrongRequest{ + Id: 0, + Stock: 0, // 库存数量 + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "Update", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + + { + name: "Query", + fn: func() error { + // todo type in the parameters before benchmark testing + message := &stockV1.QueryStrongRequest{ + Id: 0, + } + total := 1000 // total number of requests + + b, err := benchmark.New(host, protoFile, "Query", message, dependentProtoFilePath, total) + if err != nil { + return err + } + return b.Run() + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.fn() + if (err != nil) != tt.wantErr { + t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) + return + } + }) + } +} diff --git a/_13_sponge-dtm-cache/grpc+http/pkg/goredis/goredis.go b/_13_sponge-dtm-cache/grpc+http/pkg/goredis/goredis.go new file mode 100644 index 0000000..42c1733 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/pkg/goredis/goredis.go @@ -0,0 +1,75 @@ +package goredis + +import ( + "context" + "errors" + "strings" + "time" + + "github.com/go-redis/redis/v8" +) + +var ( + redisCli *redis.Client + + // ErrCacheNotFound No hit cache + ErrCacheNotFound = redis.Nil +) + +// Init connecting to redis +// dsn supported formats. +// (1) no password, no db: localhost:6379 +// (2) with password and db: :@localhost:6379/2 +// (3) redis://default:123456@localhost:6379/0?max_retries=3 +// for more parameters see the redis source code for the setupConnParams function +func Init(dsn string) error { + opt, err := getRedisOptions(dsn) + if err != nil { + return err + } + + redisCli = redis.NewClient(opt) + + ctx, _ := context.WithTimeout(context.Background(), 15*time.Second) //nolint + err = redisCli.Ping(ctx).Err() + + return nil +} + +func getRedisOptions(dsn string) (*redis.Options, error) { + dsn = strings.ReplaceAll(dsn, " ", "") + if len(dsn) > 8 { + if !strings.Contains(dsn[len(dsn)-3:], "/") { + dsn += "/0" // use db 0 by default + } + + if dsn[:8] != "redis://" && dsn[:9] != "rediss://" { + dsn = "redis://" + dsn + } + } + + return redis.ParseURL(dsn) +} + +// GetRedisCli get redis client +func GetRedisCli() *redis.Client { + if redisCli == nil { + panic("redis client not initialized, call InitRedis first") + } + + return redisCli +} + +// CloseRedis close redis +func CloseRedis() error { + if redisCli == nil { + return nil + } + + err := redisCli.Close() + if err != nil && errors.Is(err, redis.ErrClosed) { + return err + } + + return nil +} diff --git a/a_micro-grpc-http-protobuf/scripts/binary-package.sh b/_13_sponge-dtm-cache/grpc+http/scripts/binary-package.sh similarity index 97% rename from a_micro-grpc-http-protobuf/scripts/binary-package.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/binary-package.sh index 2467449..33e8e25 100644 --- a/a_micro-grpc-http-protobuf/scripts/binary-package.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/binary-package.sh @@ -1,6 +1,6 @@ #!/bin/bash -serviceName="user" +serviceName="stock" mkdir -p ${serviceName}-binary/configs diff --git a/a_micro-grpc-http-protobuf/scripts/build/Dockerfile b/_13_sponge-dtm-cache/grpc+http/scripts/build/Dockerfile similarity index 65% rename from a_micro-grpc-http-protobuf/scripts/build/Dockerfile rename to _13_sponge-dtm-cache/grpc+http/scripts/build/Dockerfile index 6faf068..935789f 100644 --- a/a_micro-grpc-http-protobuf/scripts/build/Dockerfile +++ b/_13_sponge-dtm-cache/grpc+http/scripts/build/Dockerfile @@ -12,8 +12,8 @@ COPY grpc_health_probe /bin/grpc_health_probe RUN chmod +x /bin/grpc_health_probe COPY configs/ /app/configs/ -COPY user /app/user -RUN chmod +x /app/user +COPY stock /app/stock +RUN chmod +x /app/stock # grpc and http port EXPOSE 8282 8283 @@ -21,6 +21,6 @@ EXPOSE 8282 8283 WORKDIR /app -CMD ["./user", "-c", "configs/user.yml"] -# if you use the Configuration Center, user.yml is changed to the Configuration Center configuration. -#CMD ["./user", "-c", "configs/user.yml", "-enable-cc"] +CMD ["./stock", "-c", "configs/stock.yml"] +# if you use the Configuration Center, stock.yml is changed to the Configuration Center configuration. +#CMD ["./stock", "-c", "configs/stock.yml", "-enable-cc"] diff --git a/a_micro-grpc-http-protobuf/scripts/build/Dockerfile_build b/_13_sponge-dtm-cache/grpc+http/scripts/build/Dockerfile_build similarity index 63% rename from a_micro-grpc-http-protobuf/scripts/build/Dockerfile_build rename to _13_sponge-dtm-cache/grpc+http/scripts/build/Dockerfile_build index de417df..058aa5e 100644 --- a/a_micro-grpc-http-protobuf/scripts/build/Dockerfile_build +++ b/_13_sponge-dtm-cache/grpc+http/scripts/build/Dockerfile_build @@ -1,13 +1,13 @@ -# Need to package the code first `tar zcf user.tar.gz $(ls)` and move it to the same directory as Dokerfile +# Need to package the code first `tar zcf stock.tar.gz $(ls)` and move it to the same directory as Dokerfile # Compile the go code, you can specify the golang version FROM golang:1.21-alpine as build -COPY . /go/src/user -WORKDIR /go/src/user -RUN tar zxf user.tar.gz +COPY . /go/src/stock +WORKDIR /go/src/stock +RUN tar zxf stock.tar.gz RUN go env -w GOPROXY=https://goproxy.cn,direct RUN go mod download -RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o /user cmd/user/main.go +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o /stock cmd/stock/main.go # install grpc-health-probe, for health check of grpc service RUN go install github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 @@ -17,7 +17,7 @@ RUN cd $GOPATH/pkg/mod/github.com/grpc-ecosystem/grpc-health-probe@v0.4.12 \ # compressing binary files #cd / -#upx -9 user +#upx -9 stock #upx -9 grpc_health_probe @@ -33,8 +33,8 @@ RUN apk add tzdata \ # add grpc_health_probe for health check of grpc services COPY --from=build /grpc_health_probe /bin/grpc_health_probe -COPY --from=build /user /app/user -COPY --from=build /go/src/user/configs/user.yml /app/configs/user.yml +COPY --from=build /stock /app/stock +COPY --from=build /go/src/stock/configs/stock.yml /app/configs/stock.yml # grpc and http port EXPOSE 8282 8283 @@ -42,6 +42,6 @@ EXPOSE 8282 8283 WORKDIR /app -CMD ["./user", "-c", "configs/user.yml"] -# if you use the Configuration Center, user.yml is changed to the Configuration Center configuration. -#CMD ["./user", "-c", "configs/user.yml", "-enable-cc"] +CMD ["./stock", "-c", "configs/stock.yml"] +# if you use the Configuration Center, stock.yml is changed to the Configuration Center configuration. +#CMD ["./stock", "-c", "configs/stock.yml", "-enable-cc"] diff --git a/a_micro-grpc-http-protobuf/scripts/build/Dockerfile_test b/_13_sponge-dtm-cache/grpc+http/scripts/build/Dockerfile_test similarity index 58% rename from a_micro-grpc-http-protobuf/scripts/build/Dockerfile_test rename to _13_sponge-dtm-cache/grpc+http/scripts/build/Dockerfile_test index 06a0404..c4f380f 100644 --- a/a_micro-grpc-http-protobuf/scripts/build/Dockerfile_test +++ b/_13_sponge-dtm-cache/grpc+http/scripts/build/Dockerfile_test @@ -1,4 +1,4 @@ -# Need to package the code first `tar zcf user.tar.gz $(ls)` and move it to the same directory as Dokerfile +# Need to package the code first `tar zcf stock.tar.gz $(ls)` and move it to the same directory as Dokerfile # rpc server source code, used to test rpc methods FROM golang:1.21-alpine MAINTAINER zhufuyi "g.zhufuyi@gmail.com" @@ -6,11 +6,11 @@ MAINTAINER zhufuyi "g.zhufuyi@gmail.com" # go test dependency packages RUN apk add bash alpine-sdk build-base gcc -COPY . /go/src/user -WORKDIR /go/src/user -RUN tar zxf user.tar.gz +COPY . /go/src/stock +WORKDIR /go/src/stock +RUN tar zxf stock.tar.gz RUN go env -w GOPROXY=https://goproxy.cn,direct RUN go mod download -RUN rm -f user.tar.gz +RUN rm -f stock.tar.gz CMD ["sleep","86400"] diff --git a/_13_sponge-dtm-cache/grpc+http/scripts/build/README.md b/_13_sponge-dtm-cache/grpc+http/scripts/build/README.md new file mode 100644 index 0000000..ba0f3e8 --- /dev/null +++ b/_13_sponge-dtm-cache/grpc+http/scripts/build/README.md @@ -0,0 +1,4 @@ + +- `Dockerfile`: build the image by directly copying the compiled binaries, fast build speed. +- `Dockerfile_build`: two-stage build of the image, slower build speed, you can specify the golang version. +- `Dockerfile_test`: container for testing rpc services. diff --git a/a_micro-grpc-http-protobuf/scripts/deploy-binary.sh b/_13_sponge-dtm-cache/grpc+http/scripts/deploy-binary.sh similarity index 96% rename from a_micro-grpc-http-protobuf/scripts/deploy-binary.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/deploy-binary.sh index e31b058..4d3cf46 100644 --- a/a_micro-grpc-http-protobuf/scripts/deploy-binary.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/deploy-binary.sh @@ -1,6 +1,6 @@ #!/usr/bin/expect -set serviceName "user" +set serviceName "stock" # parameters set username [lindex $argv 0] diff --git a/a_micro-grpc-http-protobuf/scripts/deploy-docker.sh b/_13_sponge-dtm-cache/grpc+http/scripts/deploy-docker.sh similarity index 84% rename from a_micro-grpc-http-protobuf/scripts/deploy-docker.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/deploy-docker.sh index 0175fb3..ef8f27a 100644 --- a/a_micro-grpc-http-protobuf/scripts/deploy-docker.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/deploy-docker.sh @@ -10,8 +10,8 @@ function checkResult() { } mkdir -p ${dockerComposeFilePath}/configs -if [ ! -f "${dockerComposeFilePath}/configs/user.yml" ];then - cp configs/user.yml ${dockerComposeFilePath}/configs +if [ ! -f "${dockerComposeFilePath}/configs/stock.yml" ];then + cp configs/stock.yml ${dockerComposeFilePath}/configs fi # shellcheck disable=SC2164 diff --git a/a_micro-grpc-http-protobuf/scripts/deploy-k8s.sh b/_13_sponge-dtm-cache/grpc+http/scripts/deploy-k8s.sh similarity index 96% rename from a_micro-grpc-http-protobuf/scripts/deploy-k8s.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/deploy-k8s.sh index 179ed4b..8a84082 100644 --- a/a_micro-grpc-http-protobuf/scripts/deploy-k8s.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/deploy-k8s.sh @@ -1,6 +1,6 @@ #!/bin/bash -SERVER_NAME="user" +SERVER_NAME="stock" DEPLOY_FILE="deployments/kubernetes/${SERVER_NAME}-deployment.yml" function checkResult() { diff --git a/a_micro-grpc-http-protobuf/scripts/image-build-local.sh b/_13_sponge-dtm-cache/grpc+http/scripts/image-build-local.sh similarity index 97% rename from a_micro-grpc-http-protobuf/scripts/image-build-local.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/image-build-local.sh index 70893c3..1bce8fd 100644 --- a/a_micro-grpc-http-protobuf/scripts/image-build-local.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/image-build-local.sh @@ -3,9 +3,9 @@ # build the image for local docker, using the binaries, if you want to reduce the size of the image, # use upx to compress the binaries before building the image. -serverName="user" +serverName="stock" # image name of the service, prohibit uppercase letters in names. -IMAGE_NAME="edusys/user" +IMAGE_NAME="eshop/stock" # Dockerfile file directory DOCKERFILE_PATH="scripts/build" DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" diff --git a/a_micro-grpc-http-protobuf/scripts/image-build.sh b/_13_sponge-dtm-cache/grpc+http/scripts/image-build.sh similarity index 98% rename from a_micro-grpc-http-protobuf/scripts/image-build.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/image-build.sh index 1db685e..9890b02 100644 --- a/a_micro-grpc-http-protobuf/scripts/image-build.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/image-build.sh @@ -3,9 +3,9 @@ # build the docker image using the binaries, if you want to reduce the size of the image, # use upx to compress the binaries before building the image. -serverName="user" +serverName="stock" # image name of the service, prohibit uppercase letters in names. -IMAGE_NAME="edusys/user" +IMAGE_NAME="eshop/stock" # Dockerfile file directory DOCKERFILE_PATH="scripts/build" DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" diff --git a/a_micro-grpc-http-protobuf/scripts/image-build2.sh b/_13_sponge-dtm-cache/grpc+http/scripts/image-build2.sh similarity index 96% rename from a_micro-grpc-http-protobuf/scripts/image-build2.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/image-build2.sh index c8fbcab..53f78aa 100644 --- a/a_micro-grpc-http-protobuf/scripts/image-build2.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/image-build2.sh @@ -2,9 +2,9 @@ # two-stage build docker image -serverName="user" +serverName="stock" # image name of the service, prohibit uppercase letters in names. -IMAGE_NAME="edusys/user" +IMAGE_NAME="eshop/stock" # Dockerfile file directory DOCKERFILE_PATH="scripts/build" DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_build" diff --git a/a_micro-grpc-http-protobuf/scripts/image-push.sh b/_13_sponge-dtm-cache/grpc+http/scripts/image-push.sh similarity index 98% rename from a_micro-grpc-http-protobuf/scripts/image-push.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/image-push.sh index 8b4792c..72e37d0 100644 --- a/a_micro-grpc-http-protobuf/scripts/image-push.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/image-push.sh @@ -1,7 +1,7 @@ #!/bin/bash # image name, prohibit uppercase letters in names. -IMAGE_NAME="edusys/user" +IMAGE_NAME="eshop/stock" # image repo address, passed in via the first parameter REPO_HOST=$1 diff --git a/a_micro-grpc-http-protobuf/scripts/image-rpc-test.sh b/_13_sponge-dtm-cache/grpc+http/scripts/image-rpc-test.sh similarity index 95% rename from a_micro-grpc-http-protobuf/scripts/image-rpc-test.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/image-rpc-test.sh index 290f41c..b2e4e52 100644 --- a/a_micro-grpc-http-protobuf/scripts/image-rpc-test.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/image-rpc-test.sh @@ -2,9 +2,9 @@ # build rpc service test image -serverName="user" +serverName="stock" # image name of the service, prohibit uppercase letters in names. -IMAGE_NAME="edusys/user.rpc-test" +IMAGE_NAME="eshop/stock.rpc-test" # Dockerfile file directory DOCKERFILE_PATH="scripts/build" DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_test" diff --git a/a_micro-grpc-http-protobuf/scripts/patch.sh b/_13_sponge-dtm-cache/grpc+http/scripts/patch.sh similarity index 100% rename from a_micro-grpc-http-protobuf/scripts/patch.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/patch.sh diff --git a/a_micro-grpc-http-protobuf/scripts/proto-doc.sh b/_13_sponge-dtm-cache/grpc+http/scripts/proto-doc.sh similarity index 100% rename from a_micro-grpc-http-protobuf/scripts/proto-doc.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/proto-doc.sh diff --git a/a_micro-grpc-http-protobuf/scripts/protoc.sh b/_13_sponge-dtm-cache/grpc+http/scripts/protoc.sh similarity index 97% rename from a_micro-grpc-http-protobuf/scripts/protoc.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/protoc.sh index 3dabfe4..bcc1c8c 100644 --- a/a_micro-grpc-http-protobuf/scripts/protoc.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/protoc.sh @@ -131,9 +131,9 @@ function generateByAllProto(){ } function generateBySpecifiedProto(){ - # get the proto file of the user server + # get the proto file of the stock server allProtoFiles="" - listProtoFiles ${protoBasePath}/user + listProtoFiles ${protoBasePath}/stock cd .. specifiedProtoFiles="" getSpecifiedProtoFiles @@ -210,9 +210,6 @@ generateByAllProto # generate pb.go by specified proto files generateBySpecifiedProto -# check and add the special_types.go file -sponge patch add-special-types - # delete unused packages in pb.go handlePbGoFiles $protoBasePath diff --git a/a_micro-grpc-http-protobuf/scripts/run-nohup.sh b/_13_sponge-dtm-cache/grpc+http/scripts/run-nohup.sh similarity index 96% rename from a_micro-grpc-http-protobuf/scripts/run-nohup.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/run-nohup.sh index 0bd31c3..d0b97e2 100644 --- a/a_micro-grpc-http-protobuf/scripts/run-nohup.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/run-nohup.sh @@ -1,9 +1,9 @@ #!/bin/bash # chkconfig: - 85 15 -# description: user +# description: stock -serverName="user" +serverName="stock" cmdStr="cmd/${serverName}/${serverName}" diff --git a/a_micro-grpc-http-protobuf/scripts/run.sh b/_13_sponge-dtm-cache/grpc+http/scripts/run.sh similarity index 94% rename from a_micro-grpc-http-protobuf/scripts/run.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/run.sh index 89d42b6..ff1d61e 100644 --- a/a_micro-grpc-http-protobuf/scripts/run.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/run.sh @@ -1,6 +1,6 @@ #!/bin/bash -serverName="user" +serverName="stock" binaryFile="cmd/${serverName}/${serverName}" diff --git a/a_micro-grpc-http-protobuf/scripts/swag-docs.sh b/_13_sponge-dtm-cache/grpc+http/scripts/swag-docs.sh similarity index 80% rename from a_micro-grpc-http-protobuf/scripts/swag-docs.sh rename to _13_sponge-dtm-cache/grpc+http/scripts/swag-docs.sh index 7271bb5..5154b70 100644 --- a/a_micro-grpc-http-protobuf/scripts/swag-docs.sh +++ b/_13_sponge-dtm-cache/grpc+http/scripts/swag-docs.sh @@ -11,14 +11,14 @@ function checkResult() { # change host addr if [ "X${HOST_ADDR}" = "X" ];then - HOST_ADDR=$(cat cmd/user/main.go | grep "@host" | awk '{print $3}') + HOST_ADDR=$(cat cmd/stock/main.go | grep "@host" | awk '{print $3}') HOST_ADDR=$(echo ${HOST_ADDR} | cut -d ':' -f 1) else - sed -i "s/@host .*:8080/@host ${HOST_ADDR}:8080/g" cmd/user/main.go + sed -i "s/@host .*:8080/@host ${HOST_ADDR}:8080/g" cmd/stock/main.go fi # generate api docs -swag init -g cmd/user/main.go +swag init -g cmd/stock/main.go checkResult $? # modify duplicate numbers and error codes diff --git a/b_sponge-dtm-msg/third_party/gogo/protobuf/gogoproto/gogo.proto b/_13_sponge-dtm-cache/grpc+http/third_party/gogo/protobuf/gogoproto/gogo.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/gogo/protobuf/gogoproto/gogo.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/gogo/protobuf/gogoproto/gogo.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/BUILD.bazel b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/BUILD.bazel similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/BUILD.bazel rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/BUILD.bazel diff --git a/b_sponge-dtm-msg/third_party/google/api/README.md b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/README.md similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/README.md rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/README.md diff --git a/b_sponge-dtm-msg/third_party/google/api/annotations.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/annotations.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/annotations.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/annotations.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/auth.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/auth.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/auth.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/auth.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/backend.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/backend.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/backend.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/backend.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/billing.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/billing.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/billing.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/billing.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/client.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/client.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/client.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/client.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/config_change.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/config_change.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/config_change.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/config_change.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/consumer.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/consumer.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/consumer.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/consumer.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/context.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/context.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/context.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/context.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/control.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/control.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/control.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/control.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/distribution.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/distribution.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/distribution.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/distribution.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/documentation.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/documentation.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/documentation.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/documentation.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/endpoint.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/endpoint.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/endpoint.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/endpoint.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/BUILD.bazel b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/BUILD.bazel similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/BUILD.bazel rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/BUILD.bazel diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/cel.yaml b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/cel.yaml similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/cel.yaml rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/cel.yaml diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/BUILD.bazel b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/BUILD.bazel similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/BUILD.bazel rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/BUILD.bazel diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/checked.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/checked.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/checked.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/checked.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/conformance_service.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/conformance_service.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/conformance_service.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/conformance_service.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/eval.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/eval.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/eval.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/eval.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/explain.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/explain.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/explain.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/explain.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/syntax.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/syntax.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/syntax.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/syntax.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/value.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/value.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1alpha1/value.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1alpha1/value.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/BUILD.bazel b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/BUILD.bazel similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/BUILD.bazel rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/BUILD.bazel diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/decl.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/decl.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/decl.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/decl.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/eval.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/eval.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/eval.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/eval.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/expr.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/expr.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/expr.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/expr.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/source.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/source.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/source.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/source.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/value.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/value.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/expr/v1beta1/value.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/expr/v1beta1/value.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/field_behavior.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/field_behavior.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/field_behavior.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/field_behavior.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/http.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/http.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/http.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/http.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/httpbody.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/httpbody.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/httpbody.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/httpbody.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/label.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/label.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/label.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/label.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/launch_stage.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/launch_stage.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/launch_stage.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/launch_stage.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/log.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/log.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/log.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/log.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/logging.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/logging.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/logging.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/logging.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/metric.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/metric.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/metric.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/metric.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/monitored_resource.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/monitored_resource.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/monitored_resource.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/monitored_resource.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/monitoring.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/monitoring.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/monitoring.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/monitoring.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/quota.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/quota.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/quota.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/quota.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/resource.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/resource.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/resource.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/resource.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/service.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/service.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/service.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/service.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/serviceconfig.yaml b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/serviceconfig.yaml similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/serviceconfig.yaml rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/serviceconfig.yaml diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/BUILD.bazel b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/BUILD.bazel similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/BUILD.bazel rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/BUILD.bazel diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/README.md b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/README.md similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/README.md rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/README.md diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/BUILD.bazel b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/BUILD.bazel similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/BUILD.bazel rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/BUILD.bazel diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/check_error.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/check_error.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/check_error.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/check_error.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/distribution.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/distribution.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/distribution.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/distribution.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/http_request.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/http_request.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/http_request.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/http_request.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/log_entry.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/log_entry.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/log_entry.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/log_entry.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/metric_value.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/metric_value.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/metric_value.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/metric_value.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/operation.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/operation.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/operation.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/operation.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/quota_controller.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/quota_controller.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/quota_controller.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/quota_controller.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/service_controller.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/service_controller.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/service_controller.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/service_controller.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/servicecontrol.yaml b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/servicecontrol.yaml similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicecontrol/v1/servicecontrol.yaml rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicecontrol/v1/servicecontrol.yaml diff --git a/b_sponge-dtm-msg/third_party/google/api/servicemanagement/BUILD.bazel b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/BUILD.bazel similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicemanagement/BUILD.bazel rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/BUILD.bazel diff --git a/b_sponge-dtm-msg/third_party/google/api/servicemanagement/README.md b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/README.md similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicemanagement/README.md rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/README.md diff --git a/b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/BUILD.bazel b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/BUILD.bazel similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/BUILD.bazel rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/BUILD.bazel diff --git a/b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/resources.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/resources.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/resources.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/resources.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml diff --git a/b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml diff --git a/b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json diff --git a/b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml diff --git a/b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/servicemanager.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/servicemanager.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/servicemanagement/v1/servicemanager.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/servicemanagement/v1/servicemanager.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/source_info.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/source_info.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/source_info.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/source_info.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/system_parameter.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/system_parameter.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/system_parameter.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/system_parameter.proto diff --git a/b_sponge-dtm-msg/third_party/google/api/usage.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/api/usage.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/api/usage.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/api/usage.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/annotations.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/annotations.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/annotations.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/annotations.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/any.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/any.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/any.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/any.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/api.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/api.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/api.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/api.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/compiler/plugin.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/compiler/plugin.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/compiler/plugin.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/compiler/plugin.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/descriptor.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/descriptor.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/descriptor.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/descriptor.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/duration.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/duration.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/duration.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/duration.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/empty.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/empty.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/empty.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/empty.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/field_mask.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/field_mask.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/field_mask.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/field_mask.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/source_context.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/source_context.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/source_context.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/source_context.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/struct.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/struct.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/struct.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/struct.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/timestamp.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/timestamp.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/timestamp.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/timestamp.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/type.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/type.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/type.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/type.proto diff --git a/b_sponge-dtm-msg/third_party/google/protobuf/wrappers.proto b/_13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/wrappers.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/google/protobuf/wrappers.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/google/protobuf/wrappers.proto diff --git a/b_sponge-dtm-msg/third_party/protoc-gen-openapiv2/options/annotations.proto b/_13_sponge-dtm-cache/grpc+http/third_party/protoc-gen-openapiv2/options/annotations.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/protoc-gen-openapiv2/options/annotations.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/protoc-gen-openapiv2/options/annotations.proto diff --git a/b_sponge-dtm-msg/third_party/protoc-gen-openapiv2/options/openapiv2.proto b/_13_sponge-dtm-cache/grpc+http/third_party/protoc-gen-openapiv2/options/openapiv2.proto similarity index 86% rename from b_sponge-dtm-msg/third_party/protoc-gen-openapiv2/options/openapiv2.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/protoc-gen-openapiv2/options/openapiv2.proto index eb8f06e..9a17f02 100644 --- a/b_sponge-dtm-msg/third_party/protoc-gen-openapiv2/options/openapiv2.proto +++ b/_13_sponge-dtm-cache/grpc+http/third_party/protoc-gen-openapiv2/options/openapiv2.proto @@ -26,7 +26,7 @@ enum Scheme { // info: { // title: "Echo API"; // version: "1.0"; -// description: "; +// description: ""; // contact: { // name: "gRPC-Gateway project"; // url: "https://github.com/grpc-ecosystem/grpc-gateway"; @@ -34,7 +34,7 @@ enum Scheme { // }; // license: { // name: "BSD 3-Clause License"; -// url: "https://github.com/grpc-ecosystem/grpc-gateway/blob/master/LICENSE.txt"; +// url: "https://github.com/grpc-ecosystem/grpc-gateway/blob/main/LICENSE"; // }; // }; // schemes: HTTPS; @@ -92,12 +92,14 @@ message Swagger { // (that is, there is a logical OR between the security requirements). // Individual operations can override this definition. repeated SecurityRequirement security = 12; - // field 13 is reserved for 'tags', which are supposed to be exposed as and - // customizable as proto services. TODO(ivucica): add processing of proto - // service objects into OpenAPI v2 Tag objects. - reserved 13; + // A list of tags for API documentation control. Tags can be used for logical + // grouping of operations by resources or any other qualifier. + repeated Tag tags = 13; // Additional external documentation. ExternalDocumentation external_docs = 14; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ map extensions = 15; } @@ -169,7 +171,55 @@ message Operation { // definition overrides any declared top-level security. To remove a top-level // security declaration, an empty array can be used. repeated SecurityRequirement security = 12; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ map extensions = 13; + // Custom parameters such as HTTP request headers. + // See: https://swagger.io/docs/specification/2-0/describing-parameters/ + // and https://swagger.io/specification/v2/#parameter-object. + Parameters parameters = 14; +} + +// `Parameters` is a representation of OpenAPI v2 specification's parameters object. +// Note: This technically breaks compatibility with the OpenAPI 2 definition structure as we only +// allow header parameters to be set here since we do not want users specifying custom non-header +// parameters beyond those inferred from the Protobuf schema. +// See: https://swagger.io/specification/v2/#parameter-object +message Parameters { + // `Headers` is one or more HTTP header parameter. + // See: https://swagger.io/docs/specification/2-0/describing-parameters/#header-parameters + repeated HeaderParameter headers = 1; +} + +// `HeaderParameter` a HTTP header parameter. +// See: https://swagger.io/specification/v2/#parameter-object +message HeaderParameter { + // `Type` is a a supported HTTP header type. + // See https://swagger.io/specification/v2/#parameterType. + enum Type { + UNKNOWN = 0; + STRING = 1; + NUMBER = 2; + INTEGER = 3; + BOOLEAN = 4; + } + + // `Name` is the header name. + string name = 1; + // `Description` is a short description of the header. + string description = 2; + // `Type` is the type of the object. The value MUST be one of "string", "number", "integer", or "boolean". The "array" type is not supported. + // See: https://swagger.io/specification/v2/#parameterType. + Type type = 3; + // `Format` The extending format for the previously mentioned type. + string format = 4; + // `Required` indicates if the header is optional + bool required = 5; + // field 6 is reserved for 'items', but in OpenAPI-specific way. + reserved 6; + // field 7 is reserved `Collection Format`. Determines the format of the array if type array is used. + reserved 7; } // `Header` is a representation of OpenAPI v2 specification's Header object. @@ -235,6 +285,9 @@ message Response { // `Examples` gives per-mimetype response examples. // See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#example-object map examples = 4; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ map extensions = 5; } @@ -248,7 +301,7 @@ message Response { // info: { // title: "Echo API"; // version: "1.0"; -// description: "; +// description: ""; // contact: { // name: "gRPC-Gateway project"; // url: "https://github.com/grpc-ecosystem/grpc-gateway"; @@ -256,7 +309,7 @@ message Response { // }; // license: { // name: "BSD 3-Clause License"; -// url: "https://github.com/grpc-ecosystem/grpc-gateway/blob/master/LICENSE.txt"; +// url: "https://github.com/grpc-ecosystem/grpc-gateway/blob/main/LICENSE"; // }; // }; // ... @@ -277,6 +330,9 @@ message Info { // Provides the version of the application API (not to be confused // with the specification version). string version = 6; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ map extensions = 7; } @@ -321,7 +377,7 @@ message Contact { // ... // license: { // name: "BSD 3-Clause License"; -// url: "https://github.com/grpc-ecosystem/grpc-gateway/blob/master/LICENSE.txt"; +// url: "https://github.com/grpc-ecosystem/grpc-gateway/blob/main/LICENSE"; // }; // ... // }; @@ -518,6 +574,9 @@ message JSONSchema { // for overlapping paths. string path_param_name = 47; } + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ map extensions = 48; } @@ -526,20 +585,19 @@ message JSONSchema { // See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#tagObject // message Tag { - // field 1 is reserved for 'name'. In our generator, this is (to be) extracted - // from the name of proto service, and thus not exposed to the user, as - // changing tag object's name would break the link to the references to the - // tag in individual operation specifications. - // - // TODO(ivucica): Add 'name' property. Use it to allow override of the name of + // The name of the tag. Use it to allow override of the name of a // global Tag object, then use that name to reference the tag throughout the // OpenAPI file. - reserved 1; + string name = 1; // A short description for the tag. GFM syntax can be used for rich text // representation. string description = 2; // Additional external documentation for this tag. ExternalDocumentation external_docs = 3; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ + map extensions = 4; } // `SecurityDefinitions` is a representation of OpenAPI v2 specification's @@ -619,6 +677,9 @@ message SecurityScheme { // The available scopes for the OAuth2 security scheme. // Valid for oauth2. Scopes scopes = 8; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ map extensions = 9; } diff --git a/b_sponge-dtm-msg/third_party/tagger/tagger.proto b/_13_sponge-dtm-cache/grpc+http/third_party/tagger/tagger.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/tagger/tagger.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/tagger/tagger.proto diff --git a/b_sponge-dtm-msg/third_party/validate/validate.proto b/_13_sponge-dtm-cache/grpc+http/third_party/validate/validate.proto similarity index 100% rename from b_sponge-dtm-msg/third_party/validate/validate.proto rename to _13_sponge-dtm-cache/grpc+http/third_party/validate/validate.proto diff --git a/_13_sponge-dtm-cache/http/.gitignore b/_13_sponge-dtm-cache/http/.gitignore new file mode 100644 index 0000000..f91912d --- /dev/null +++ b/_13_sponge-dtm-cache/http/.gitignore @@ -0,0 +1,26 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib +*.log + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +vendor/ +dist/ + +# idea +.idea +*.iml +*.ipr +*.iws + +cmd/stock/stock + diff --git a/_13_sponge-dtm-cache/http/.golangci.yml b/_13_sponge-dtm-cache/http/.golangci.yml new file mode 100644 index 0000000..3994c5b --- /dev/null +++ b/_13_sponge-dtm-cache/http/.golangci.yml @@ -0,0 +1,342 @@ +# This file configures stock. + +run: + # timeout for analysis, e.g. 30s, 5m, default is 1m + timeout: 10m + # default concurrency is available CPU number + concurrency: 4 + # include test files or not, default is true + tests: false + # which dirs to skip: issues from them won't be reported; + # can use regexp here: generated.*, regexp is applied on full path; + # default value is empty list, but default dirs are skipped independently + # from this option's value (see skip-dirs-use-default). + skip-dirs: + - docs + - api + # which files to skip: they will be analyzed, but issues from them + # won't be reported. Default value is empty list, but there is + # no need to include all autogenerated files, we confidently recognize + # autogenerated files. If it's not please let us know. + skip-files: + - _test.go + + # exit code when at least one issue was found, default is 1 + issues-exit-code: 1 + + # list of build tags, all linters use it. Default is empty list. + build-tags: + - mytag + + # default is true. Enables skipping of directories: + # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ + skip-dirs-use-default: true + + +linters: + # please, do not use `enable-all`: it's deprecated and will be removed soon. + # inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint + disable-all: true + enable: + - revive + - goimports + - gofmt + - unused + #- depguard + - dogsled + - errcheck + #- gochecknoinits + - goconst + - gocyclo + - gosimple + - govet + - lll + - misspell + - typecheck + - unconvert + - whitespace + - staticcheck + #- bodyclose + #- dupl + #- goprintffuncname + #- gosec + #- unparam + #- ineffassign + + +linters-settings: + revive: + rules: + - name: argument-limit + arguments: [ 8 ] + - name: atomic + - name: bare-return + - name: blank-imports + - name: bool-literal-in-expr + - name: call-to-gc + - name: confusing-naming + - name: confusing-results + - name: constant-logical-expr + - name: context-as-argument + - name: context-keys-type + - name: deep-exit + - name: defer + - name: dot-imports + - name: duplicated-imports + - name: early-return + - name: empty-block + #- name: empty-lines + - name: error-naming + - name: error-return + - name: error-strings + - name: errorf + - name: function-result-limit + arguments: [ 3 ] + - name: identical-branches + - name: if-return + - name: import-shadowing + - name: increment-decrement + - name: indent-error-flow + - name: modifies-parameter + - name: modifies-value-receiver + - name: package-comments + - name: range + - name: range-val-address + - name: range-val-in-closure + - name: receiver-naming + - name: redefines-builtin-id + - name: string-of-int + - name: struct-tag + - name: superfluous-else + - name: time-naming + - name: unconditional-recursion + - name: unexported-naming + - name: unnecessary-stmt + - name: unreachable-code + - name: unused-parameter + - name: var-declaration + - name: var-naming + - name: waitgroup-by-value + + dogsled: + # checks assignments with too many blank identifiers; default is 2 + max-blank-identifiers: 2 + + dupl: + # tokens count to trigger issue, 150 by default + threshold: 100 + + errcheck: + # report about not checking of errors in type assertions: `a := b.(MyStruct)`; + # default is false: such cases aren't reported by default. + check-type-assertions: false + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; + # default is false: such cases aren't reported by default. + check-blank: false + + # [deprecated] comma-separated list of pairs of the form pkg:regex + # the regex is used to ignore names within pkg. (default "fmt:.*"). + # see https://github.com/kisielk/errcheck#the-deprecated-method for details + ignore: fmt:.*,io/ioutil:^Read.* + + # path to a file containing a list of functions to exclude from checking + # see https://github.com/kisielk/errcheck#excluding-functions for details + # exclude: /path/to/file.txt + funlen: + lines: 60 + statements: 40 + + gocognit: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 10 + + goconst: + # minimal length of string constant, 3 by default + min-len: 4 + # minimal occurrences count to trigger, 3 by default + min-occurrences: 4 + + gocyclo: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 20 + + godox: + # report any comments starting with keywords, this is useful for TODO or FIXME comments that + # might be left in the code accidentally and should be resolved before merging + keywords: # default keywords are TODO, BUG, and FIXME, these can be overwritten by this setting + - NOTE + - OPTIMIZE # marks code that should be optimized before merging + - HACK # marks hack-arounds that should be removed before merging + + gofmt: + # simplify code: gofmt with `-s` option, true by default + simplify: true + + goimports: + # put imports beginning with prefix after 3rd-party packages; + # it's a comma-separated list of prefixes + local-prefixes: stock + + gomnd: + settings: + mnd: + # the list of enabled checks, see https://github.com/tommy-muehle/go-mnd/#checks for description. + checks: argument,case,condition,operation,return,assign + + govet: + # report about shadowed variables + check-shadowing: true + + # settings per analyzer + settings: + printf: # analyzer name, run `go tool vet help` to see all analyzers + funcs: # run `go tool vet help printf` to see available settings for `printf` analyzer + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf + + # enable or disable analyzers by name + enable: + - atomicalign + enable-all: false + disable: + - shadow + disable-all: false + + depguard: + list-type: blacklist + include-go-root: false + #packages: + # - github.com/user/name + #packages-with-error-message: + # specify an error message to output when a blacklisted package is used + # - github.com/user/name: "logging is allowed only by logutils.Log" + + lll: + # max line length, lines longer will be reported. Default is 120. + # '\t' is counted as 1 character by default, and can be changed with the tab-width option + line-length: 200 + # tab width in spaces. Default to 1. + tab-width: 1 + + maligned: + # print struct with more effective memory layout or not, false by default + suggest-new: true + + misspell: + # Correct spellings using locale preferences for US or UK. + # Default is to use a neutral variety of English. + # Setting locale to US will correct the British spelling of 'colour' to 'color'. + locale: US + ignore-words: + - someword + + nakedret: + # make an issue if func has more lines of code than this setting and it has naked returns; default is 30 + max-func-lines: 30 + + prealloc: + # XXX: we don't recommend using this linter before doing performance profiling. + # For most programs usage of prealloc will be a premature optimization. + + # Report preallocation suggestions only on simple loops that have no returns/breaks/continues/gotos in them. + # True by default. + simple: true + range-loops: true # Report preallocation suggestions on range loops, true by default + for-loops: false # Report preallocation suggestions on for loops, false by default + + #rowserrcheck: + # packages: + # - github.com/user/name + + unparam: + # Inspect exported functions, default is false. Set to true if no external program/library imports your code. + # XXX: if you enable this setting, unparam will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find external interfaces. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + unused: + # treat code as a program (not a library) and report unused exported identifiers; default is false. + # XXX: if you enable this setting, unused will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find funcs usages. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + whitespace: + multi-if: false # Enforces newlines (or comments) after every multi-line if statement + multi-func: false # Enforces newlines (or comments) after every multi-line function signature + + wsl: + # If true append is only allowed to be cuddled if appending value is + # matching variables, fields or types on line above. Default is true. + strict-append: true + # Allow calls and assignments to be cuddled as long as the lines have any + # matching variables, fields or types. Default is true. + allow-assign-and-call: true + # Allow multiline assignments to be cuddled. Default is true. + allow-multiline-assign: true + # Allow declarations (var) to be cuddled. + allow-cuddle-declarations: false + # Allow trailing comments in ending of blocks + allow-trailing-comment: false + # Force newlines in end of case at this limit (0 = never). + force-case-trailing-whitespace: 0 + +issues: + # List of regexps of issue texts to exclude, empty list by default. + # But independently from this option we use default exclude patterns, + # it can be disabled by `exclude-use-default: false`. To list all + # excluded by default patterns execute `golangci-lint run --help` + exclude: + - abcdef + + # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: + # Exclude some linters from running on tests files. + - path: _test\.go + linters: + - gocyclo + - errcheck + - dupl + - gosec + + # Exclude known linters from partially hard-vendored code, + # which is impossible to exclude via "nolint" comments. + - path: internal/hmac/ + text: "weak cryptographic primitive" + linters: + - gosec + + # Exclude lll issues for long lines with go:generate + - linters: + - lll + source: "^//go:generate " + + # Independently from option `exclude` we use default exclude patterns, + # it can be disabled by this option. To list all + # excluded by default patterns execute `golangci-lint run --help`. + # Default value for this option is true. + exclude-use-default: false + + # Maximum issues count per one linter. Set to 0 to disable. Default is 50. + max-issues-per-linter: 0 + + # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. + max-same-issues: 0 + + # Show only new issues: if there are unstaged changes or untracked files, + # only those changes are analyzed, else only changes in HEAD~ are analyzed. + # It's a super-useful option for integration of golangci-lint into existing + # large codebase. It's not practical to fix all existing issues at the moment + # of integration: much better don't allow issues in new code. + # Default is false. + new: false + + # Show only new issues created after git revision `REV` + new-from-rev: "" + +service: + golangci-lint-version: 1.48.0 # use the fixed version to not introduce new linters unexpectedly diff --git a/_13_sponge-dtm-cache/http/Jenkinsfile b/_13_sponge-dtm-cache/http/Jenkinsfile new file mode 100644 index 0000000..cc76915 --- /dev/null +++ b/_13_sponge-dtm-cache/http/Jenkinsfile @@ -0,0 +1,200 @@ +pipeline { + agent any + + stages { + stage("Check Build Branch") { + steps { + echo "Checking build branch in progress ......" + script { + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + echo "building production environment, tag=${env.GIT_BRANCH}" + } else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + echo "building test environment, tag=${env.GIT_BRANCH}" + } else if (env.GIT_BRANCH ==~ /(origin\/develop)/) { + echo "building development environment, /origin/develop" + } else { + echo "The build branch ${env.GIT_BRANCH} is not legal, allowing to build the development environment branch (/origin/develop), the test environment branch (e.g. test-1.0.0), and the production environment branch (e.g. v1.0.0)" + sh 'exit 1' + } + } + echo "Check build branch complete." + } + } + + stage("Check Code") { + steps { + echo "Checking code in progress ......" + sh 'make ci-lint' + echo "Check code complete." + } + } + + stage("Unit Testing") { + steps { + echo "Unit testing in progress ......" + sh 'make test' + echo "Unit testing complete." + } + } + + stage("Compile Code") { + steps { + echo "Compiling code in progress ......" + sh 'make build' + echo "compile code complete." + } + } + + stage("Build Image") { + steps { + echo "building image in progress ......" + script { + registryHost="" + tagName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.PROD_REPO_HOST == null) { + echo "The value of environment variable PROD_REPO_HOST is empty, please set the value of PROD_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the production environment image repository ${env.PROD_REPO_HOST}" + registryHost=env.PROD_REPO_HOST + tagName=env.GIT_BRANCH + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.TEST_REPO_HOST == null) { + echo "The value of environment variable TEST_REPO_HOST is empty, please set the value of TEST_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the test environment image repository ${env.TEST_REPO_HOST}" + registryHost=env.TEST_REPO_HOST + tagName=env.GIT_BRANCH + } + else { + if (env.DEV_REPO_HOST == null) { + echo "The value of environment variable DEV_REPO_HOST is empty, please set the value of DEV_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Using the development environment ${env.DEV_REPO_HOST}" + registryHost=env.DEV_REPO_HOST + } + sh "make image-build REPO_HOST=$registryHost TAG=$tagName" + } + echo "Build image complete" + } + } + + stage("Push Image") { + steps { + echo "pushing image in progress ......" + script { + registryHost="" + tagName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.PROD_REPO_HOST == null) { + echo "The value of environment variable PROD_REPO_HOST is empty, please set the value of PROD_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the production environment image repository ${env.PROD_REPO_HOST}" + registryHost=env.PROD_REPO_HOST + tagName=env.GIT_BRANCH + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/) { + if (env.TEST_REPO_HOST == null) { + echo "The value of environment variable TEST_REPO_HOST is empty, please set the value of TEST_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Use the test environment image repository ${env.TEST_REPO_HOST}" + registryHost=env.TEST_REPO_HOST + tagName=env.GIT_BRANCH + } + else { + if (env.DEV_REPO_HOST == null) { + echo "The value of environment variable DEV_REPO_HOST is empty, please set the value of DEV_REPO_HOST in [Jenkins Management] --> [System Settings] --> [Environment Variables]." + sh 'exit 1' + } + echo "Using the development environment ${env.DEV_REPO_HOST}" + registryHost=env.DEV_REPO_HOST + } + sh "make image-push REPO_HOST=$registryHost TAG=$tagName" + } + echo "push image complete, clear image complete." + } + } + + stage("Deploy to k8s") { + when { expression { return env.GIT_BRANCH ==~ /(origin\/staging|origin\/develop)/ } } + steps { + echo "Deploying to k8s in progress ......" + sh 'make deploy-k8s' + echo "Deploy to k8s complete." + } + } + } + + post { + always { + echo 'One way or another, I have finished' + echo sh(returnStdout: true, script: 'env') + deleteDir() /* clean up our workspace */ + } + success { + SendDingding("success") + //SendEmail("success") + echo 'structure success' + } + failure { + SendDingding("failure") + //SendEmail("failure") + echo 'structure failure' + } + } +} + +// Notifications using dingding +void SendDingding(res) +{ + // Fill in the corresponding cell phone number and specify a person to be notified in the pinned group + tel_num="xxxxxxxxxxx" + dingding_url="https://oapi.dingtalk.com/robot/send\\?access_token\\=your dingding robot token" + + branchName="" + if (env.GIT_BRANCH ==~ /^v([0-9])+\.([0-9])+\.([0-9])+.*/) { + branchName="${env.SERVER_PLATFORM} production environment, tag=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + else if (env.GIT_BRANCH ==~ /^test-([0-9])+\.([0-9])+\.([0-9])+.*/){ + branchName="${env.SERVER_PLATFORM} test environment, tag=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + else { + branchName="${env.SERVER_PLATFORM} develop environment, branch=${env.GIT_BRANCH}, ${env.JOB_NAME}" + } + + json_msg="" + if( res == "success" ) { + json_msg='{\\"msgtype\\":\\"text\\",\\"text\\":{\\"content\\":\\"@' + tel_num +' [OK] ' + "${branchName} ${env.BUILD_NUMBER}th " + 'build success. \\"},\\"at\\":{\\"atMobiles\\":[\\"' + tel_num + '\\"],\\"isAtAll\\":false}}' + } + else { + json_msg='{\\"msgtype\\":\\"text\\",\\"text\\":{\\"content\\":\\"@' + tel_num +' [cry] ' + "${branchName} ${env.BUILD_NUMBER}th " + 'build failed, please deal with it promptly! \\"},\\"at\\":{\\"atMobiles\\":[\\"' + tel_num + '\\"],\\"isAtAll\\":false}}' + } + + post_header="Content-Type:application/json;charset=utf-8" + sh_cmd="curl -X POST " + dingding_url + " -H " + "\'" + post_header + "\'" + " -d " + "\"" + json_msg + "\"" + sh sh_cmd +} + +// Notifications using email +void SendEmail(res) +{ + emailAddr="xxx@xxx.com" + if( res == "success" ) + { + mail to: emailAddr, + subject: "Build Success: ${currentBuild.fullDisplayName}", + body: "\nJob name: ${env.JOB_NAME} ${env.BUILD_NUMBER}th build. \n\n For more information, please see: ${env.BUILD_URL}" + } + else + { + mail to: emailAddr, + subject: "Build Failed: ${currentBuild.fullDisplayName}", + body: "\nJob name: ${env.JOB_NAME} ${env.BUILD_NUMBER}th build. \n\n For more information, please see: ${env.BUILD_URL}" + } +} diff --git a/a_micro-grpc-http-protobuf/Makefile b/_13_sponge-dtm-cache/http/Makefile similarity index 51% rename from a_micro-grpc-http-protobuf/Makefile rename to _13_sponge-dtm-cache/http/Makefile index 1920402..5cf4b55 100644 --- a/a_micro-grpc-http-protobuf/Makefile +++ b/_13_sponge-dtm-cache/http/Makefile @@ -1,54 +1,44 @@ SHELL := /bin/bash -PROJECT_NAME := "user" +PROJECT_NAME := "stock" PKG := "$(PROJECT_NAME)" PKG_LIST := $(shell go list ${PKG}/... | grep -v /vendor/ | grep -v /api/) -.PHONY: mod -# maintaining module dependencies -mod: - go mod tidy - - -.PHONY: fmt -# go format *.go files -fmt: - gofmt -s -w . - .PHONY: ci-lint -# check code formatting, naming conventions, security, maintainability, etc. the rules in the .golangci.yml file -ci-lint: fmt +# Check code formatting, naming conventions, security, maintainability, etc. the rules in the .golangci.yml file +ci-lint: + @gofmt -s -w . golangci-lint run ./... .PHONY: test -# go test *_test.go files, the parameter -count=1 means that caching is disabled +# Test *_test.go files, the parameter -count=1 means that caching is disabled test: go test -count=1 -short ${PKG_LIST} .PHONY: cover -# generate test coverage +# Generate test coverage cover: go test -short -coverprofile=cover.out -covermode=atomic ${PKG_LIST} go tool cover -html=cover.out .PHONY: graph -# generate interactive visual function dependency graphs +# Generate interactive visual function dependency graphs graph: @echo "generating graph ......" - @cp -f cmd/user/main.go . - go-callvis -skipbrowser -format=svg -nostd -file=user user - @rm -f main.go user.gv + @cp -f cmd/stock/main.go . + go-callvis -skipbrowser -format=svg -nostd -file=stock stock + @rm -f main.go stock.gv .PHONY: proto -# generate *.go and template code by proto files, if you do not refer to the proto file, the default is all the proto files in the api directory. you can specify the proto file, multiple files are separated by commas, e.g. make proto FILES=api/user/v1/user.proto, only for ⓶ Microservices created based on sql, ⓷ Web services created based on protobuf, ⓸ Microservices created based on protobuf, ⓹ grpc gateway service created based on protobuf, ⓺ Create grpc+http service based on protobuf +# Generate *.go and template code by proto files, the default is all the proto files in the api directory. you can specify the proto file, multiple files are separated by commas, e.g. make proto FILES=api/user/v1/user.proto proto: @bash scripts/protoc.sh $(FILES) go mod tidy @@ -56,122 +46,128 @@ proto: .PHONY: proto-doc -# generate doc from *.proto files +# Generate doc from *.proto files proto-doc: @bash scripts/proto-doc.sh .PHONY: build -# build user for linux amd64 binary +# Build stock for linux amd64 binary build: - @echo "building 'user', linux binary file will output to 'cmd/user'" - @cd cmd/user && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build + @echo "building 'stock', linux binary file will output to 'cmd/stock'" + @cd cmd/stock && CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build .PHONY: run -# run service +# Build and run service run: @bash scripts/run.sh .PHONY: run-nohup -# run service with nohup in local, if you want to stop the server, pass the parameter stop, e.g. make run-nohup CMD=stop +# Run service with nohup in local, if you want to stop the server, pass the parameter stop, e.g. make run-nohup CMD=stop run-nohup: @bash scripts/run-nohup.sh $(CMD) .PHONY: run-docker -# run service in local docker, if you want to update the service, run the make run-docker command again. +# Run service in local docker, if you want to update the service, run the make run-docker command again. run-docker: image-build-local @bash scripts/deploy-docker.sh .PHONY: binary-package -# packaged binary files +# Packaged binary files binary-package: build @bash scripts/binary-package.sh .PHONY: deploy-binary -# deploy binary to remote linux server, e.g. make deploy-binary USER=root PWD=123456 IP=192.168.1.10 +# Deploy binary to remote linux server, e.g. make deploy-binary USER=root PWD=123456 IP=192.168.1.10 deploy-binary: binary-package @expect scripts/deploy-binary.sh $(USER) $(PWD) $(IP) .PHONY: image-build-local -# build image for local docker, tag=latest, use binary files to build +# Build image for local docker, tag=latest, use binary files to build image-build-local: build @bash scripts/image-build-local.sh .PHONY: image-build -# build image for remote repositories, use binary files to build, e.g. make image-build REPO_HOST=addr TAG=latest +# Build image for remote repositories, use binary files to build, e.g. make image-build REPO_HOST=addr TAG=latest image-build: @bash scripts/image-build.sh $(REPO_HOST) $(TAG) .PHONY: image-build2 -# build image for remote repositories, phase II build, e.g. make image-build2 REPO_HOST=addr TAG=latest +# Build image for remote repositories, phase II build, e.g. make image-build2 REPO_HOST=addr TAG=latest image-build2: @bash scripts/image-build2.sh $(REPO_HOST) $(TAG) .PHONY: image-push -# push docker image to remote repositories, e.g. make image-push REPO_HOST=addr TAG=latest +# Push docker image to remote repositories, e.g. make image-push REPO_HOST=addr TAG=latest image-push: @bash scripts/image-push.sh $(REPO_HOST) $(TAG) .PHONY: deploy-k8s -# deploy service to k8s +# Deploy service to k8s deploy-k8s: @bash scripts/deploy-k8s.sh .PHONY: image-build-rpc-test -# build grpc test image for remote repositories, e.g. make image-build-rpc-test REPO_HOST=addr TAG=latest +# Build grpc test image for remote repositories, e.g. make image-build-rpc-test REPO_HOST=addr TAG=latest image-build-rpc-test: @bash scripts/image-rpc-test.sh $(REPO_HOST) $(TAG) .PHONY: patch -# patch some dependent code, such as types.proto, mysql initialization code. e.g. make patch TYPE=types-pb , make patch TYPE=init-your_db_driver, replace "your_db_driver" with mysql, mongodb, postgresql, tidb, sqlite +# Patch some dependent code, e.g. make patch TYPE=types-pb , make patch TYPE=init-, your_db_driver is mysql, mongodb, postgresql, tidb, sqlite, for example: make patch TYPE=init-mysql patch: @bash scripts/patch.sh $(TYPE) .PHONY: copy-proto -# copy proto file from the grpc server directory, multiple directories or proto files separated by commas. copy all proto files, e.g. make copy-proto SERVER=yourServerDir, copy specified proto files, e.g. make copy-proto SERVER=yourServerDir PROTO_FILE=yourProtoFile1,yourProtoFile2 +# Copy proto file from the grpc server directory, multiple directories or proto files separated by commas. default is to copy all proto files, e.g. make copy-proto SERVER=yourServerDir, copy specified proto files, e.g. make copy-proto SERVER=yourServerDir PROTO_FILE=yourProtoFile1,yourProtoFile2 copy-proto: @sponge patch copy-proto --server-dir=$(SERVER) --proto-file=$(PROTO_FILE) +.PHONY: modify-proto-pkg-name +# Modify the 'package' and 'go_package' names of all proto files in the 'api' directory. +modify-proto-pkg-name: + @sponge patch modify-proto-package --dir=api --server-dir=. + + .PHONY: update-config -# update internal/config code base on yaml file +# Update internal/config code base on yaml file update-config: @sponge config --server-dir=. .PHONY: clean -# clean binary file, cover.out, template file +# Clean binary file, cover.out, template file clean: - @rm -vrf cmd/user/user* + @rm -vrf cmd/stock/stock* @rm -vrf cover.out - @rm -vrf main.go user.gv + @rm -vrf main.go stock.gv @rm -vrf internal/ecode/*.go.gen* @rm -vrf internal/routers/*.go.gen* @rm -vrf internal/handler/*.go.gen* @rm -vrf internal/service/*.go.gen* - @rm -rf user-binary.tar.gz + @rm -rf stock-binary.tar.gz @echo "clean finished" -# show help +# Show help help: @echo '' @echo 'Usage:' - @echo ' make [target]' + @echo ' make ' @echo '' @echo 'Targets:' @awk '/^[a-zA-Z\-_0-9]+:/ { \ diff --git a/_13_sponge-dtm-cache/http/README.md b/_13_sponge-dtm-cache/http/README.md new file mode 100644 index 0000000..dca2982 --- /dev/null +++ b/_13_sponge-dtm-cache/http/README.md @@ -0,0 +1,69 @@ +## Cache Consistency + +Using a web service created by [Sponge](https://github.com/zhufuyi/sponge) combined with [DTM](https://github.com/dtm-labs/dtm) and [RocksCache](https://github.com/dtm-labs/rockscache), this example demonstrates cache consistency (with Redis and MySQL) including `eventual consistency`, `atomicity`, `strong consistency`, and `strong consistency during downgrade and upgrade`. + +
+ +### Quick Start + +- Start the Redis service. +- Start the MySQL service and import the [stock.sql](test/stock.sql) file into the database. +- Download the [DTM](https://github.com/dtm-labs/dtm/releases/tag/v1.18.0) executable, modify the default DTM configuration to use Redis, then start the DTM service with: `dtm -c conf.yml`. +- Clone the project code locally and modify the IP addresses in the MySQL, Redis, and DTM configurations in [config.yml](configs/stock.yml) (replace the default IP addresses 192.168.3.37 and 192.168.3.90). + +Compile and start the service: + +```bash +make run +``` + +Open [http://localhost:8080/apis/swagger/index.html](http://localhost:8080/apis/swagger/index.html) in your browser to test the four different cache consistency approaches. + +![cache-http-pb-swagger](https://raw.githubusercontent.com/zhufuyi/sponge_examples/main/assets/cache-http-pb-swagger.png) + +#### Eventual Consistency + +Using the "mark deletion" strategy for the cache thoroughly solves the inconsistency between the database and the cache that is not resolved by merely deleting the cache, ensuring eventual consistency even under extreme conditions. + +**Example code: [final.go](internal/handler/final.go).** + +
+ +#### Atomicity + +This approach ensures that even in the event of a process crash, updates to the database and the cache are either both successful or both fail, making it simpler than other architectures like local message tables, transaction messages, or binlog listeners. + +**Example code: [atomic.go](internal/handler/atomic.go).** + +
+ +#### Strong Consistency + +The prerequisite for strong consistency is that "all data reads must be from the cache". For both the database and Redis, if all reads are provided only by the cache, strong consistency can be achieved easily. The `Fetch` function in RocksCache offers strong consistent cache reads by not returning outdated data and instead synchronously waits for the latest result. + +For example, in a recharge scenario, after a user successfully recharges, if the user queries the business result (by checking whether the two-phase global transaction status has succeeded), the system will inform the user of the incomplete status until the global transaction completes, even if the database has been updated. + +Strong consistency comes with a cost, mainly performance degradation. Compared to eventual consistency, strong consistency in data reads requires waiting for the latest results, which increases response latency, and also may involve waiting for results from other processes, which consumes resources. + +**Example code: [strong.go](internal/handler/strong.go).** + +
+ +#### Strong Consistency During Downgrade and Upgrade + +Downgrade refers to reading data from the database when the cache is faulty, while upgrade refers to reading data from the cache after the cache recovers. During the short time window of downgrading and upgrading, strong consistency can still be maintained. + +- Use DTM's Saga mode to update data, ensuring atomicity of the three operations: locking the cache, updating the database, and deleting the cache. +- After updating the database but before updating the cache, the system can inform the user that the business is complete (unlike the earlier strong consistency scenario, this condition is relaxed). +- In strong consistency access mode, queries will wait for the data update result. +- During downgrade, first disable cache reads and wait until no read operations access the cache, then disable cache deletion. +- During upgrade, first enable cache deletion to ensure all database updates are reflected in the cache, then enable cache reads. + +**Example code: [downgrade.go](internal/handler/downgrade.go).** + +
+ +Reference: + +- https://dtm.pub/app/cache.html +- https://github.com/dtm-labs/dtm-cases/tree/main/cache diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/atomic.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/atomic.pb.go new file mode 100644 index 0000000..009de1e --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/atomic.pb.go @@ -0,0 +1,379 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/atomic.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type UpdateAtomicRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *UpdateAtomicRequest) Reset() { + *x = UpdateAtomicRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_atomic_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateAtomicRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateAtomicRequest) ProtoMessage() {} + +func (x *UpdateAtomicRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_atomic_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateAtomicRequest.ProtoReflect.Descriptor instead. +func (*UpdateAtomicRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_atomic_proto_rawDescGZIP(), []int{0} +} + +func (x *UpdateAtomicRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *UpdateAtomicRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type UpdateAtomicRequestReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateAtomicRequestReply) Reset() { + *x = UpdateAtomicRequestReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_atomic_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateAtomicRequestReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateAtomicRequestReply) ProtoMessage() {} + +func (x *UpdateAtomicRequestReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_atomic_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateAtomicRequestReply.ProtoReflect.Descriptor instead. +func (*UpdateAtomicRequestReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_atomic_proto_rawDescGZIP(), []int{1} +} + +type QueryAtomicRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *QueryAtomicRequest) Reset() { + *x = QueryAtomicRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_atomic_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryAtomicRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryAtomicRequest) ProtoMessage() {} + +func (x *QueryAtomicRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_atomic_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryAtomicRequest.ProtoReflect.Descriptor instead. +func (*QueryAtomicRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_atomic_proto_rawDescGZIP(), []int{2} +} + +func (x *QueryAtomicRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type QueryAtomicReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Stock uint32 `protobuf:"varint,1,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *QueryAtomicReply) Reset() { + *x = QueryAtomicReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_atomic_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryAtomicReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryAtomicReply) ProtoMessage() {} + +func (x *QueryAtomicReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_atomic_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryAtomicReply.ProtoReflect.Descriptor instead. +func (*QueryAtomicReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_atomic_proto_rawDescGZIP(), []int{3} +} + +func (x *QueryAtomicReply) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +var File_api_stock_v1_atomic_proto protoreflect.FileDescriptor + +var file_api_stock_v1_atomic_proto_rawDesc = []byte{ + 0x0a, 0x19, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x61, + 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x61, 0x70, 0x69, + 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, + 0x67, 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, + 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, + 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x5a, 0x0a, 0x13, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x41, + 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, + 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x1d, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0d, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x2a, 0x02, 0x20, 0x00, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x22, 0x1a, 0x0a, 0x18, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x41, 0x74, 0x6f, 0x6d, 0x69, + 0x63, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x3a, 0x0a, + 0x12, 0x51, 0x75, 0x65, 0x72, 0x79, 0x41, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, + 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x28, 0x0a, 0x10, 0x51, 0x75, 0x65, + 0x72, 0x79, 0x41, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x14, 0x0a, + 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, + 0x6f, 0x63, 0x6b, 0x32, 0xee, 0x02, 0x0a, 0x06, 0x61, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x12, 0xc2, + 0x01, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x41, + 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x41, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x6d, 0x92, 0x41, 0x46, 0x0a, 0x11, 0x63, 0x61, 0x73, 0x65, 0x20, + 0x32, 0x3a, 0x20, 0xe5, 0x8e, 0x9f, 0xe5, 0xad, 0x90, 0xe6, 0x80, 0xa7, 0x12, 0x0c, 0xe6, 0x9b, + 0xb4, 0xe6, 0x96, 0xb0, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x23, 0xe6, 0x9b, 0xb4, 0xe6, + 0x96, 0xb0, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0xef, 0xbc, 0x8c, 0x44, 0x42, 0xe5, 0x92, 0x8c, + 0xe7, 0xbc, 0x93, 0xe5, 0xad, 0x98, 0xe5, 0x8e, 0x9f, 0xe5, 0xad, 0x90, 0xe6, 0x80, 0xa7, 0x82, + 0xd3, 0xe4, 0x93, 0x02, 0x1e, 0x1a, 0x19, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x74, 0x6f, 0x6d, 0x69, 0x63, + 0x3a, 0x01, 0x2a, 0x12, 0x9e, 0x01, 0x0a, 0x05, 0x51, 0x75, 0x65, 0x72, 0x79, 0x12, 0x20, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, + 0x72, 0x79, 0x41, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x51, + 0x75, 0x65, 0x72, 0x79, 0x41, 0x74, 0x6f, 0x6d, 0x69, 0x63, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, + 0x53, 0x92, 0x41, 0x2f, 0x0a, 0x11, 0x63, 0x61, 0x73, 0x65, 0x20, 0x32, 0x3a, 0x20, 0xe5, 0x8e, + 0x9f, 0xe5, 0xad, 0x90, 0xe6, 0x80, 0xa7, 0x12, 0x0c, 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, + 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x0c, 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, + 0xe6, 0x8d, 0xae, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1b, 0x12, 0x19, 0x2f, 0x61, 0x70, 0x69, 0x2f, + 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x74, + 0x6f, 0x6d, 0x69, 0x63, 0x42, 0xb4, 0x01, 0x5a, 0x15, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, + 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, 0x92, 0x41, + 0x99, 0x01, 0x12, 0x15, 0x0a, 0x0e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x61, 0x70, 0x69, 0x20, + 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, 0x32, 0x2e, 0x30, 0x1a, 0x0e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, 0x30, 0x38, 0x30, 0x2a, 0x02, 0x01, 0x02, 0x32, 0x10, 0x61, + 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x3a, + 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, + 0x6e, 0x5a, 0x48, 0x0a, 0x46, 0x0a, 0x0a, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x41, 0x75, 0x74, + 0x68, 0x12, 0x38, 0x08, 0x02, 0x12, 0x23, 0x54, 0x79, 0x70, 0x65, 0x20, 0x42, 0x65, 0x61, 0x72, + 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x72, 0x2d, 0x6a, 0x77, 0x74, 0x2d, 0x74, 0x6f, 0x6b, 0x65, + 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x0d, 0x41, 0x75, 0x74, 0x68, + 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_atomic_proto_rawDescOnce sync.Once + file_api_stock_v1_atomic_proto_rawDescData = file_api_stock_v1_atomic_proto_rawDesc +) + +func file_api_stock_v1_atomic_proto_rawDescGZIP() []byte { + file_api_stock_v1_atomic_proto_rawDescOnce.Do(func() { + file_api_stock_v1_atomic_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_atomic_proto_rawDescData) + }) + return file_api_stock_v1_atomic_proto_rawDescData +} + +var file_api_stock_v1_atomic_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_api_stock_v1_atomic_proto_goTypes = []interface{}{ + (*UpdateAtomicRequest)(nil), // 0: api.stock.v1.UpdateAtomicRequest + (*UpdateAtomicRequestReply)(nil), // 1: api.stock.v1.UpdateAtomicRequestReply + (*QueryAtomicRequest)(nil), // 2: api.stock.v1.QueryAtomicRequest + (*QueryAtomicReply)(nil), // 3: api.stock.v1.QueryAtomicReply +} +var file_api_stock_v1_atomic_proto_depIdxs = []int32{ + 0, // 0: api.stock.v1.atomic.Update:input_type -> api.stock.v1.UpdateAtomicRequest + 2, // 1: api.stock.v1.atomic.Query:input_type -> api.stock.v1.QueryAtomicRequest + 1, // 2: api.stock.v1.atomic.Update:output_type -> api.stock.v1.UpdateAtomicRequestReply + 3, // 3: api.stock.v1.atomic.Query:output_type -> api.stock.v1.QueryAtomicReply + 2, // [2:4] is the sub-list for method output_type + 0, // [0:2] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_atomic_proto_init() } +func file_api_stock_v1_atomic_proto_init() { + if File_api_stock_v1_atomic_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_atomic_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateAtomicRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_atomic_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateAtomicRequestReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_atomic_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryAtomicRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_atomic_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryAtomicReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_atomic_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_atomic_proto_goTypes, + DependencyIndexes: file_api_stock_v1_atomic_proto_depIdxs, + MessageInfos: file_api_stock_v1_atomic_proto_msgTypes, + }.Build() + File_api_stock_v1_atomic_proto = out.File + file_api_stock_v1_atomic_proto_rawDesc = nil + file_api_stock_v1_atomic_proto_goTypes = nil + file_api_stock_v1_atomic_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/atomic.pb.validate.go b/_13_sponge-dtm-cache/http/api/stock/v1/atomic.pb.validate.go new file mode 100644 index 0000000..d43026a --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/atomic.pb.validate.go @@ -0,0 +1,477 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/atomic.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on UpdateAtomicRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateAtomicRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateAtomicRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateAtomicRequestMultiError, or nil if none found. +func (m *UpdateAtomicRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateAtomicRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := UpdateAtomicRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetStock() <= 0 { + err := UpdateAtomicRequestValidationError{ + field: "Stock", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return UpdateAtomicRequestMultiError(errors) + } + + return nil +} + +// UpdateAtomicRequestMultiError is an error wrapping multiple validation +// errors returned by UpdateAtomicRequest.ValidateAll() if the designated +// constraints aren't met. +type UpdateAtomicRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateAtomicRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateAtomicRequestMultiError) AllErrors() []error { return m } + +// UpdateAtomicRequestValidationError is the validation error returned by +// UpdateAtomicRequest.Validate if the designated constraints aren't met. +type UpdateAtomicRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateAtomicRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateAtomicRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateAtomicRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateAtomicRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateAtomicRequestValidationError) ErrorName() string { + return "UpdateAtomicRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateAtomicRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateAtomicRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateAtomicRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateAtomicRequestValidationError{} + +// Validate checks the field values on UpdateAtomicRequestReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateAtomicRequestReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateAtomicRequestReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateAtomicRequestReplyMultiError, or nil if none found. +func (m *UpdateAtomicRequestReply) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateAtomicRequestReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return UpdateAtomicRequestReplyMultiError(errors) + } + + return nil +} + +// UpdateAtomicRequestReplyMultiError is an error wrapping multiple validation +// errors returned by UpdateAtomicRequestReply.ValidateAll() if the designated +// constraints aren't met. +type UpdateAtomicRequestReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateAtomicRequestReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateAtomicRequestReplyMultiError) AllErrors() []error { return m } + +// UpdateAtomicRequestReplyValidationError is the validation error returned by +// UpdateAtomicRequestReply.Validate if the designated constraints aren't met. +type UpdateAtomicRequestReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateAtomicRequestReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateAtomicRequestReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateAtomicRequestReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateAtomicRequestReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateAtomicRequestReplyValidationError) ErrorName() string { + return "UpdateAtomicRequestReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateAtomicRequestReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateAtomicRequestReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateAtomicRequestReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateAtomicRequestReplyValidationError{} + +// Validate checks the field values on QueryAtomicRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryAtomicRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryAtomicRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryAtomicRequestMultiError, or nil if none found. +func (m *QueryAtomicRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryAtomicRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := QueryAtomicRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return QueryAtomicRequestMultiError(errors) + } + + return nil +} + +// QueryAtomicRequestMultiError is an error wrapping multiple validation errors +// returned by QueryAtomicRequest.ValidateAll() if the designated constraints +// aren't met. +type QueryAtomicRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryAtomicRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryAtomicRequestMultiError) AllErrors() []error { return m } + +// QueryAtomicRequestValidationError is the validation error returned by +// QueryAtomicRequest.Validate if the designated constraints aren't met. +type QueryAtomicRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryAtomicRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryAtomicRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryAtomicRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryAtomicRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryAtomicRequestValidationError) ErrorName() string { + return "QueryAtomicRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryAtomicRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryAtomicRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryAtomicRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryAtomicRequestValidationError{} + +// Validate checks the field values on QueryAtomicReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *QueryAtomicReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryAtomicReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryAtomicReplyMultiError, or nil if none found. +func (m *QueryAtomicReply) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryAtomicReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Stock + + if len(errors) > 0 { + return QueryAtomicReplyMultiError(errors) + } + + return nil +} + +// QueryAtomicReplyMultiError is an error wrapping multiple validation errors +// returned by QueryAtomicReply.ValidateAll() if the designated constraints +// aren't met. +type QueryAtomicReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryAtomicReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryAtomicReplyMultiError) AllErrors() []error { return m } + +// QueryAtomicReplyValidationError is the validation error returned by +// QueryAtomicReply.Validate if the designated constraints aren't met. +type QueryAtomicReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryAtomicReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryAtomicReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryAtomicReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryAtomicReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryAtomicReplyValidationError) ErrorName() string { return "QueryAtomicReplyValidationError" } + +// Error satisfies the builtin error interface +func (e QueryAtomicReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryAtomicReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryAtomicReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryAtomicReplyValidationError{} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/atomic.proto b/_13_sponge-dtm-cache/http/api/stock/v1/atomic.proto new file mode 100644 index 0000000..9867bc7 --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/atomic.proto @@ -0,0 +1,82 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "tagger/tagger.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +// Default settings for generating swagger documents +// NOTE: because json does not support 64 bits, the int64 and uint64 types under *.swagger.json are automatically converted to string types +// Reference https://github.com/grpc-ecosystem/grpc-gateway/blob/db7fbefff7c04877cdb32e16d4a248a024428207/examples/internal/proto/examplepb/a_bit_of_everything.proto +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + host: "localhost:8080" + base_path: "" + info: { + title: "stock api docs"; + version: "2.0"; + } + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; + security_definitions: { + security: { + key: "BearerAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + description: "Type Bearer your-jwt-token to Value"; + } + } + } +}; + +// 使用dtm+rockscache实现原子性更新,比本地消息表、事务消息、binlog监听的这些架构更加简单 +service atomic{ + // 更新数据,保证DB与缓存操作的原子性。 + rpc Update(UpdateAtomicRequest) returns (UpdateAtomicRequestReply) { + option (google.api.http) = { + put: "/api/v1/stock/{id}/atomic" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "更新数据", + description: "更新数据,DB和缓存原子性", + tags: "case 2: 原子性" + }; + } + + // 查询 + rpc Query(QueryAtomicRequest) returns (QueryAtomicReply) { + option (google.api.http) = { + get: "/api/v1/stock/{id}/atomic" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "查询数据", + description: "查询数据", + tags: "case 2: 原子性" + }; + } +} + +message UpdateAtomicRequest { + uint64 id = 1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; + uint32 stock = 2 [(validate.rules).uint32.gt = 0]; // 库存数量 +} + +message UpdateAtomicRequestReply { + +} + +message QueryAtomicRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; +} + +message QueryAtomicReply { + uint32 stock = 1; // 库存数量 +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/atomic_router.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/atomic_router.pb.go new file mode 100644 index 0000000..e551613 --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/atomic_router.pb.go @@ -0,0 +1,221 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type AtomicLogicer interface { + Update(ctx context.Context, req *UpdateAtomicRequest) (*UpdateAtomicRequestReply, error) + Query(ctx context.Context, req *QueryAtomicRequest) (*QueryAtomicReply, error) +} + +type AtomicOption func(*atomicOptions) + +type atomicOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *atomicOptions) apply(opts ...AtomicOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithAtomicHTTPResponse() AtomicOption { + return func(o *atomicOptions) { + o.isFromRPC = false + } +} + +func WithAtomicRPCResponse() AtomicOption { + return func(o *atomicOptions) { + o.isFromRPC = true + } +} + +func WithAtomicResponser(responser errcode.Responser) AtomicOption { + return func(o *atomicOptions) { + o.responser = responser + } +} + +func WithAtomicLogger(zapLog *zap.Logger) AtomicOption { + return func(o *atomicOptions) { + o.zapLog = zapLog + } +} + +func WithAtomicErrorToHTTPCode(e ...*errcode.Error) AtomicOption { + return func(o *atomicOptions) { + o.httpErrors = e + } +} + +func WithAtomicRPCStatusToHTTPCode(s ...*errcode.RPCStatus) AtomicOption { + return func(o *atomicOptions) { + o.rpcStatus = s + } +} + +func WithAtomicWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) AtomicOption { + return func(o *atomicOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterAtomicRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic AtomicLogicer, + opts ...AtomicOption) { + + o := &atomicOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &atomicRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type atomicRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic AtomicLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *atomicRouter) register() { + r.iRouter.Handle("PUT", "/api/v1/stock/:id/atomic", r.withMiddleware("PUT", "/api/v1/stock/:id/atomic", r.Update_0)...) + r.iRouter.Handle("GET", "/api/v1/stock/:id/atomic", r.withMiddleware("GET", "/api/v1/stock/:id/atomic", r.Query_0)...) + +} + +func (r *atomicRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *atomicRouter) Update_0(c *gin.Context) { + req := &UpdateAtomicRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Update(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *atomicRouter) Query_0(c *gin.Context) { + req := &QueryAtomicRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Query(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/callback.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/callback.pb.go new file mode 100644 index 0000000..6745cfd --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/callback.pb.go @@ -0,0 +1,325 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/callback.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type QueryPreparedRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *QueryPreparedRequest) Reset() { + *x = QueryPreparedRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_callback_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryPreparedRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryPreparedRequest) ProtoMessage() {} + +func (x *QueryPreparedRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_callback_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryPreparedRequest.ProtoReflect.Descriptor instead. +func (*QueryPreparedRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_callback_proto_rawDescGZIP(), []int{0} +} + +type QueryPreparedReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *QueryPreparedReply) Reset() { + *x = QueryPreparedReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_callback_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryPreparedReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryPreparedReply) ProtoMessage() {} + +func (x *QueryPreparedReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_callback_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryPreparedReply.ProtoReflect.Descriptor instead. +func (*QueryPreparedReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_callback_proto_rawDescGZIP(), []int{1} +} + +type DeleteCacheRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key"` +} + +func (x *DeleteCacheRequest) Reset() { + *x = DeleteCacheRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_callback_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteCacheRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteCacheRequest) ProtoMessage() {} + +func (x *DeleteCacheRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_callback_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteCacheRequest.ProtoReflect.Descriptor instead. +func (*DeleteCacheRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_callback_proto_rawDescGZIP(), []int{2} +} + +func (x *DeleteCacheRequest) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +type DeleteCacheReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteCacheReply) Reset() { + *x = DeleteCacheReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_callback_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteCacheReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteCacheReply) ProtoMessage() {} + +func (x *DeleteCacheReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_callback_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteCacheReply.ProtoReflect.Descriptor instead. +func (*DeleteCacheReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_callback_proto_rawDescGZIP(), []int{3} +} + +var File_api_stock_v1_callback_proto protoreflect.FileDescriptor + +var file_api_stock_v1_callback_proto_rawDesc = []byte{ + 0x0a, 0x1b, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x63, + 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x61, + 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x22, 0x16, 0x0a, 0x14, 0x51, 0x75, 0x65, 0x72, 0x79, 0x50, 0x72, 0x65, 0x70, 0x61, + 0x72, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x14, 0x0a, 0x12, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x50, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, 0x64, 0x52, 0x65, 0x70, 0x6c, 0x79, + 0x22, 0x2f, 0x0a, 0x12, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x19, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x72, 0x02, 0x10, 0x01, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x22, 0x12, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, + 0x52, 0x65, 0x70, 0x6c, 0x79, 0x32, 0x85, 0x02, 0x0a, 0x08, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, + 0x63, 0x6b, 0x12, 0x81, 0x01, 0x0a, 0x0d, 0x51, 0x75, 0x65, 0x72, 0x79, 0x50, 0x72, 0x65, 0x70, + 0x61, 0x72, 0x65, 0x64, 0x12, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x2e, 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x50, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, + 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x50, 0x72, 0x65, + 0x70, 0x61, 0x72, 0x65, 0x64, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x2a, 0x82, 0xd3, 0xe4, 0x93, + 0x02, 0x24, 0x0a, 0x05, 0x5b, 0x63, 0x74, 0x78, 0x5d, 0x12, 0x1b, 0x2f, 0x61, 0x70, 0x69, 0x2f, + 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x50, 0x72, + 0x65, 0x70, 0x61, 0x72, 0x65, 0x64, 0x12, 0x75, 0x0a, 0x0b, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, + 0x43, 0x61, 0x63, 0x68, 0x65, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, + 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x61, 0x63, + 0x68, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x24, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1e, 0x22, + 0x19, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x64, + 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x3a, 0x01, 0x2a, 0x42, 0x17, 0x5a, + 0x15, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_callback_proto_rawDescOnce sync.Once + file_api_stock_v1_callback_proto_rawDescData = file_api_stock_v1_callback_proto_rawDesc +) + +func file_api_stock_v1_callback_proto_rawDescGZIP() []byte { + file_api_stock_v1_callback_proto_rawDescOnce.Do(func() { + file_api_stock_v1_callback_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_callback_proto_rawDescData) + }) + return file_api_stock_v1_callback_proto_rawDescData +} + +var file_api_stock_v1_callback_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_api_stock_v1_callback_proto_goTypes = []interface{}{ + (*QueryPreparedRequest)(nil), // 0: api.stock.v1.QueryPreparedRequest + (*QueryPreparedReply)(nil), // 1: api.stock.v1.QueryPreparedReply + (*DeleteCacheRequest)(nil), // 2: api.stock.v1.DeleteCacheRequest + (*DeleteCacheReply)(nil), // 3: api.stock.v1.DeleteCacheReply +} +var file_api_stock_v1_callback_proto_depIdxs = []int32{ + 0, // 0: api.stock.v1.callback.QueryPrepared:input_type -> api.stock.v1.QueryPreparedRequest + 2, // 1: api.stock.v1.callback.DeleteCache:input_type -> api.stock.v1.DeleteCacheRequest + 1, // 2: api.stock.v1.callback.QueryPrepared:output_type -> api.stock.v1.QueryPreparedReply + 3, // 3: api.stock.v1.callback.DeleteCache:output_type -> api.stock.v1.DeleteCacheReply + 2, // [2:4] is the sub-list for method output_type + 0, // [0:2] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_callback_proto_init() } +func file_api_stock_v1_callback_proto_init() { + if File_api_stock_v1_callback_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_callback_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryPreparedRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_callback_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryPreparedReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_callback_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteCacheRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_callback_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteCacheReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_callback_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_callback_proto_goTypes, + DependencyIndexes: file_api_stock_v1_callback_proto_depIdxs, + MessageInfos: file_api_stock_v1_callback_proto_msgTypes, + }.Build() + File_api_stock_v1_callback_proto = out.File + file_api_stock_v1_callback_proto_rawDesc = nil + file_api_stock_v1_callback_proto_goTypes = nil + file_api_stock_v1_callback_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/callback.pb.validate.go b/_13_sponge-dtm-cache/http/api/stock/v1/callback.pb.validate.go new file mode 100644 index 0000000..aa2578e --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/callback.pb.validate.go @@ -0,0 +1,453 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/callback.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on QueryPreparedRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryPreparedRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryPreparedRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryPreparedRequestMultiError, or nil if none found. +func (m *QueryPreparedRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryPreparedRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return QueryPreparedRequestMultiError(errors) + } + + return nil +} + +// QueryPreparedRequestMultiError is an error wrapping multiple validation +// errors returned by QueryPreparedRequest.ValidateAll() if the designated +// constraints aren't met. +type QueryPreparedRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryPreparedRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryPreparedRequestMultiError) AllErrors() []error { return m } + +// QueryPreparedRequestValidationError is the validation error returned by +// QueryPreparedRequest.Validate if the designated constraints aren't met. +type QueryPreparedRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryPreparedRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryPreparedRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryPreparedRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryPreparedRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryPreparedRequestValidationError) ErrorName() string { + return "QueryPreparedRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryPreparedRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryPreparedRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryPreparedRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryPreparedRequestValidationError{} + +// Validate checks the field values on QueryPreparedReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryPreparedReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryPreparedReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryPreparedReplyMultiError, or nil if none found. +func (m *QueryPreparedReply) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryPreparedReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return QueryPreparedReplyMultiError(errors) + } + + return nil +} + +// QueryPreparedReplyMultiError is an error wrapping multiple validation errors +// returned by QueryPreparedReply.ValidateAll() if the designated constraints +// aren't met. +type QueryPreparedReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryPreparedReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryPreparedReplyMultiError) AllErrors() []error { return m } + +// QueryPreparedReplyValidationError is the validation error returned by +// QueryPreparedReply.Validate if the designated constraints aren't met. +type QueryPreparedReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryPreparedReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryPreparedReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryPreparedReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryPreparedReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryPreparedReplyValidationError) ErrorName() string { + return "QueryPreparedReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryPreparedReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryPreparedReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryPreparedReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryPreparedReplyValidationError{} + +// Validate checks the field values on DeleteCacheRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *DeleteCacheRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DeleteCacheRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DeleteCacheRequestMultiError, or nil if none found. +func (m *DeleteCacheRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *DeleteCacheRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if utf8.RuneCountInString(m.GetKey()) < 1 { + err := DeleteCacheRequestValidationError{ + field: "Key", + reason: "value length must be at least 1 runes", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return DeleteCacheRequestMultiError(errors) + } + + return nil +} + +// DeleteCacheRequestMultiError is an error wrapping multiple validation errors +// returned by DeleteCacheRequest.ValidateAll() if the designated constraints +// aren't met. +type DeleteCacheRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DeleteCacheRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DeleteCacheRequestMultiError) AllErrors() []error { return m } + +// DeleteCacheRequestValidationError is the validation error returned by +// DeleteCacheRequest.Validate if the designated constraints aren't met. +type DeleteCacheRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DeleteCacheRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DeleteCacheRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DeleteCacheRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DeleteCacheRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DeleteCacheRequestValidationError) ErrorName() string { + return "DeleteCacheRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e DeleteCacheRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDeleteCacheRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DeleteCacheRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DeleteCacheRequestValidationError{} + +// Validate checks the field values on DeleteCacheReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *DeleteCacheReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DeleteCacheReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DeleteCacheReplyMultiError, or nil if none found. +func (m *DeleteCacheReply) ValidateAll() error { + return m.validate(true) +} + +func (m *DeleteCacheReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return DeleteCacheReplyMultiError(errors) + } + + return nil +} + +// DeleteCacheReplyMultiError is an error wrapping multiple validation errors +// returned by DeleteCacheReply.ValidateAll() if the designated constraints +// aren't met. +type DeleteCacheReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DeleteCacheReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DeleteCacheReplyMultiError) AllErrors() []error { return m } + +// DeleteCacheReplyValidationError is the validation error returned by +// DeleteCacheReply.Validate if the designated constraints aren't met. +type DeleteCacheReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DeleteCacheReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DeleteCacheReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DeleteCacheReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DeleteCacheReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DeleteCacheReplyValidationError) ErrorName() string { return "DeleteCacheReplyValidationError" } + +// Error satisfies the builtin error interface +func (e DeleteCacheReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDeleteCacheReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DeleteCacheReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DeleteCacheReplyValidationError{} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/callback.proto b/_13_sponge-dtm-cache/http/api/stock/v1/callback.proto new file mode 100644 index 0000000..3da37df --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/callback.proto @@ -0,0 +1,34 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "google/api/annotations.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +service callback { + // 反查数据 + rpc QueryPrepared(QueryPreparedRequest) returns (QueryPreparedReply) { + option (google.api.http) = { + get: "/api/v1/stock/queryPrepared" + selector: "[ctx]" + }; + } + + // 删除缓存 + rpc DeleteCache(DeleteCacheRequest) returns (DeleteCacheReply) { + option (google.api.http) = { + post: "/api/v1/stock/deleteCache" + body: "*" + }; + } +} + +message QueryPreparedRequest {} +message QueryPreparedReply {} + +message DeleteCacheRequest { + string key = 1 [(validate.rules).string.min_len = 1]; +} +message DeleteCacheReply {} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/callback_router.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/callback_router.pb.go new file mode 100644 index 0000000..d98730b --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/callback_router.pb.go @@ -0,0 +1,204 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type CallbackLogicer interface { + QueryPrepared(ctx context.Context, req *QueryPreparedRequest) (*QueryPreparedReply, error) + DeleteCache(ctx context.Context, req *DeleteCacheRequest) (*DeleteCacheReply, error) +} + +type CallbackOption func(*callbackOptions) + +type callbackOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *callbackOptions) apply(opts ...CallbackOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithCallbackHTTPResponse() CallbackOption { + return func(o *callbackOptions) { + o.isFromRPC = false + } +} + +func WithCallbackRPCResponse() CallbackOption { + return func(o *callbackOptions) { + o.isFromRPC = true + } +} + +func WithCallbackResponser(responser errcode.Responser) CallbackOption { + return func(o *callbackOptions) { + o.responser = responser + } +} + +func WithCallbackLogger(zapLog *zap.Logger) CallbackOption { + return func(o *callbackOptions) { + o.zapLog = zapLog + } +} + +func WithCallbackErrorToHTTPCode(e ...*errcode.Error) CallbackOption { + return func(o *callbackOptions) { + o.httpErrors = e + } +} + +func WithCallbackRPCStatusToHTTPCode(s ...*errcode.RPCStatus) CallbackOption { + return func(o *callbackOptions) { + o.rpcStatus = s + } +} + +func WithCallbackWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) CallbackOption { + return func(o *callbackOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterCallbackRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic CallbackLogicer, + opts ...CallbackOption) { + + o := &callbackOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &callbackRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type callbackRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic CallbackLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *callbackRouter) register() { + r.iRouter.Handle("GET", "/api/v1/stock/queryPrepared", r.withMiddleware("GET", "/api/v1/stock/queryPrepared", r.QueryPrepared_0)...) + r.iRouter.Handle("POST", "/api/v1/stock/deleteCache", r.withMiddleware("POST", "/api/v1/stock/deleteCache", r.DeleteCache_0)...) + +} + +func (r *callbackRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *callbackRouter) QueryPrepared_0(c *gin.Context) { + req := &QueryPreparedRequest{} + var err error + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context = c + + out, err := r.iLogic.QueryPrepared(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *callbackRouter) DeleteCache_0(c *gin.Context) { + req := &DeleteCacheRequest{} + var err error + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.DeleteCache(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/downgrade.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/downgrade.pb.go new file mode 100644 index 0000000..43e8185 --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/downgrade.pb.go @@ -0,0 +1,556 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/downgrade.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type UpdateDowngradeRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *UpdateDowngradeRequest) Reset() { + *x = UpdateDowngradeRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateDowngradeRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateDowngradeRequest) ProtoMessage() {} + +func (x *UpdateDowngradeRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateDowngradeRequest.ProtoReflect.Descriptor instead. +func (*UpdateDowngradeRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{0} +} + +func (x *UpdateDowngradeRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *UpdateDowngradeRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type UpdateDowngradeRequestReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateDowngradeRequestReply) Reset() { + *x = UpdateDowngradeRequestReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateDowngradeRequestReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateDowngradeRequestReply) ProtoMessage() {} + +func (x *UpdateDowngradeRequestReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateDowngradeRequestReply.ProtoReflect.Descriptor instead. +func (*UpdateDowngradeRequestReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{1} +} + +type QueryDowngradeRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *QueryDowngradeRequest) Reset() { + *x = QueryDowngradeRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryDowngradeRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryDowngradeRequest) ProtoMessage() {} + +func (x *QueryDowngradeRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryDowngradeRequest.ProtoReflect.Descriptor instead. +func (*QueryDowngradeRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{2} +} + +func (x *QueryDowngradeRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type QueryDowngradeReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *QueryDowngradeReply) Reset() { + *x = QueryDowngradeReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryDowngradeReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryDowngradeReply) ProtoMessage() {} + +func (x *QueryDowngradeReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryDowngradeReply.ProtoReflect.Descriptor instead. +func (*QueryDowngradeReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{3} +} + +func (x *QueryDowngradeReply) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *QueryDowngradeReply) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type DowngradeBranchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Gid string `protobuf:"bytes,1,opt,name=gid,proto3" json:"gid"` // dtm gid + Key string `protobuf:"bytes,2,opt,name=key,proto3" json:"key"` // 缓存key + Id uint64 `protobuf:"varint,3,opt,name=id,proto3" json:"id"` + Stock uint32 `protobuf:"varint,4,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *DowngradeBranchRequest) Reset() { + *x = DowngradeBranchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DowngradeBranchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DowngradeBranchRequest) ProtoMessage() {} + +func (x *DowngradeBranchRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DowngradeBranchRequest.ProtoReflect.Descriptor instead. +func (*DowngradeBranchRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{4} +} + +func (x *DowngradeBranchRequest) GetGid() string { + if x != nil { + return x.Gid + } + return "" +} + +func (x *DowngradeBranchRequest) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +func (x *DowngradeBranchRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *DowngradeBranchRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type DowngradeBranchReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DowngradeBranchReply) Reset() { + *x = DowngradeBranchReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DowngradeBranchReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DowngradeBranchReply) ProtoMessage() {} + +func (x *DowngradeBranchReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_downgrade_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DowngradeBranchReply.ProtoReflect.Descriptor instead. +func (*DowngradeBranchReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_downgrade_proto_rawDescGZIP(), []int{5} +} + +var File_api_stock_v1_downgrade_proto protoreflect.FileDescriptor + +var file_api_stock_v1_downgrade_proto_rawDesc = []byte{ + 0x0a, 0x1c, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x64, + 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, + 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x32, + 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x74, 0x61, 0x67, 0x67, + 0x65, 0x72, 0x2f, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, + 0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x5d, 0x0a, 0x16, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, + 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, + 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1d, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x2a, 0x02, 0x20, 0x00, + 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, 0x1d, 0x0a, 0x1b, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x3d, 0x0a, 0x15, 0x51, 0x75, 0x65, 0x72, 0x79, 0x44, + 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, + 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, + 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x3b, 0x0a, 0x13, 0x51, 0x75, 0x65, 0x72, 0x79, 0x44, 0x6f, + 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x22, 0x86, 0x01, 0x0a, 0x16, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, + 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x19, 0x0a, + 0x03, 0x67, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x72, + 0x02, 0x10, 0x01, 0x52, 0x03, 0x67, 0x69, 0x64, 0x12, 0x19, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x72, 0x02, 0x10, 0x01, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x17, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x07, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1d, 0x0a, 0x05, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x42, 0x07, 0xfa, 0x42, 0x04, + 0x2a, 0x02, 0x20, 0x00, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, 0x16, 0x0a, 0x14, 0x44, + 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x32, 0xb8, 0x05, 0x0a, 0x09, 0x64, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, + 0x65, 0x12, 0xf0, 0x01, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x24, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, + 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, + 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x94, 0x01, + 0x92, 0x41, 0x6a, 0x0a, 0x23, 0x63, 0x61, 0x73, 0x65, 0x20, 0x34, 0x3a, 0x20, 0xe5, 0x8d, 0x87, + 0xe9, 0x99, 0x8d, 0xe7, 0xba, 0xa7, 0xe4, 0xb8, 0xad, 0xe7, 0x9a, 0x84, 0xe5, 0xbc, 0xba, 0xe4, + 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x12, 0x0c, 0xe6, 0x9b, 0xb4, 0xe6, 0x96, 0xb0, + 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x35, 0xe6, 0x9b, 0xb4, 0xe6, 0x96, 0xb0, 0xe6, 0x95, + 0xb0, 0xe6, 0x8d, 0xae, 0xef, 0xbc, 0x8c, 0xe5, 0x8d, 0x87, 0xe9, 0x99, 0x8d, 0xe7, 0xba, 0xa7, + 0xe4, 0xb8, 0xad, 0xe7, 0x9a, 0x84, 0x44, 0x42, 0xe5, 0x92, 0x8c, 0xe7, 0xbc, 0x93, 0xe5, 0xad, + 0x98, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x82, 0xd3, 0xe4, + 0x93, 0x02, 0x21, 0x1a, 0x1c, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x64, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, + 0x65, 0x3a, 0x01, 0x2a, 0x12, 0xb9, 0x01, 0x0a, 0x05, 0x51, 0x75, 0x65, 0x72, 0x79, 0x12, 0x23, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, + 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, + 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x68, 0x92, 0x41, 0x41, 0x0a, 0x23, 0x63, 0x61, 0x73, + 0x65, 0x20, 0x34, 0x3a, 0x20, 0xe5, 0x8d, 0x87, 0xe9, 0x99, 0x8d, 0xe7, 0xba, 0xa7, 0xe4, 0xb8, + 0xad, 0xe7, 0x9a, 0x84, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, + 0x12, 0x0c, 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x0c, + 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x82, 0xd3, 0xe4, 0x93, + 0x02, 0x1e, 0x12, 0x1c, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x64, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, + 0x12, 0xfb, 0x01, 0x0a, 0x0f, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x42, 0x72, + 0x61, 0x6e, 0x63, 0x68, 0x12, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, 0x61, 0x64, 0x65, 0x42, 0x72, 0x61, + 0x6e, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x6f, 0x77, 0x6e, 0x67, 0x72, + 0x61, 0x64, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x9d, + 0x01, 0x92, 0x41, 0x6b, 0x0a, 0x23, 0x63, 0x61, 0x73, 0x65, 0x20, 0x34, 0x3a, 0x20, 0xe5, 0x8d, + 0x87, 0xe9, 0x99, 0x8d, 0xe7, 0xba, 0xa7, 0xe4, 0xb8, 0xad, 0xe7, 0x9a, 0x84, 0xe5, 0xbc, 0xba, + 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x12, 0x21, 0xe5, 0x8d, 0x87, 0xe9, 0x99, + 0x8d, 0xe7, 0xba, 0xa7, 0xe4, 0xb8, 0xad, 0xe7, 0x9a, 0x84, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, + 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0xe5, 0x88, 0x86, 0xe6, 0x94, 0xaf, 0x1a, 0x21, 0xe5, 0x8d, + 0x87, 0xe9, 0x99, 0x8d, 0xe7, 0xba, 0xa7, 0xe4, 0xb8, 0xad, 0xe7, 0x9a, 0x84, 0xe5, 0xbc, 0xba, + 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0xe5, 0x88, 0x86, 0xe6, 0x94, 0xaf, 0x82, + 0xd3, 0xe4, 0x93, 0x02, 0x29, 0x0a, 0x05, 0x5b, 0x63, 0x74, 0x78, 0x5d, 0x22, 0x1d, 0x2f, 0x61, + 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x64, 0x6f, 0x77, 0x6e, + 0x67, 0x72, 0x61, 0x64, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x3a, 0x01, 0x2a, 0x42, 0xb4, + 0x01, 0x5a, 0x15, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, 0x92, 0x41, 0x99, 0x01, 0x12, 0x15, 0x0a, 0x0e, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x61, 0x70, 0x69, 0x20, 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, + 0x32, 0x2e, 0x30, 0x1a, 0x0e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, + 0x30, 0x38, 0x30, 0x2a, 0x02, 0x01, 0x02, 0x32, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x3a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, + 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x5a, 0x48, 0x0a, 0x46, 0x0a, + 0x0a, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68, 0x12, 0x38, 0x08, 0x02, 0x12, + 0x23, 0x54, 0x79, 0x70, 0x65, 0x20, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, + 0x72, 0x2d, 0x6a, 0x77, 0x74, 0x2d, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x0d, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_downgrade_proto_rawDescOnce sync.Once + file_api_stock_v1_downgrade_proto_rawDescData = file_api_stock_v1_downgrade_proto_rawDesc +) + +func file_api_stock_v1_downgrade_proto_rawDescGZIP() []byte { + file_api_stock_v1_downgrade_proto_rawDescOnce.Do(func() { + file_api_stock_v1_downgrade_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_downgrade_proto_rawDescData) + }) + return file_api_stock_v1_downgrade_proto_rawDescData +} + +var file_api_stock_v1_downgrade_proto_msgTypes = make([]protoimpl.MessageInfo, 6) +var file_api_stock_v1_downgrade_proto_goTypes = []interface{}{ + (*UpdateDowngradeRequest)(nil), // 0: api.stock.v1.UpdateDowngradeRequest + (*UpdateDowngradeRequestReply)(nil), // 1: api.stock.v1.UpdateDowngradeRequestReply + (*QueryDowngradeRequest)(nil), // 2: api.stock.v1.QueryDowngradeRequest + (*QueryDowngradeReply)(nil), // 3: api.stock.v1.QueryDowngradeReply + (*DowngradeBranchRequest)(nil), // 4: api.stock.v1.DowngradeBranchRequest + (*DowngradeBranchReply)(nil), // 5: api.stock.v1.DowngradeBranchReply +} +var file_api_stock_v1_downgrade_proto_depIdxs = []int32{ + 0, // 0: api.stock.v1.downgrade.Update:input_type -> api.stock.v1.UpdateDowngradeRequest + 2, // 1: api.stock.v1.downgrade.Query:input_type -> api.stock.v1.QueryDowngradeRequest + 4, // 2: api.stock.v1.downgrade.DowngradeBranch:input_type -> api.stock.v1.DowngradeBranchRequest + 1, // 3: api.stock.v1.downgrade.Update:output_type -> api.stock.v1.UpdateDowngradeRequestReply + 3, // 4: api.stock.v1.downgrade.Query:output_type -> api.stock.v1.QueryDowngradeReply + 5, // 5: api.stock.v1.downgrade.DowngradeBranch:output_type -> api.stock.v1.DowngradeBranchReply + 3, // [3:6] is the sub-list for method output_type + 0, // [0:3] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_downgrade_proto_init() } +func file_api_stock_v1_downgrade_proto_init() { + if File_api_stock_v1_downgrade_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_downgrade_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateDowngradeRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_downgrade_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateDowngradeRequestReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_downgrade_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryDowngradeRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_downgrade_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryDowngradeReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_downgrade_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DowngradeBranchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_downgrade_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DowngradeBranchReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_downgrade_proto_rawDesc, + NumEnums: 0, + NumMessages: 6, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_downgrade_proto_goTypes, + DependencyIndexes: file_api_stock_v1_downgrade_proto_depIdxs, + MessageInfos: file_api_stock_v1_downgrade_proto_msgTypes, + }.Build() + File_api_stock_v1_downgrade_proto = out.File + file_api_stock_v1_downgrade_proto_rawDesc = nil + file_api_stock_v1_downgrade_proto_goTypes = nil + file_api_stock_v1_downgrade_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/downgrade.pb.validate.go b/_13_sponge-dtm-cache/http/api/stock/v1/downgrade.pb.validate.go new file mode 100644 index 0000000..a08a3cb --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/downgrade.pb.validate.go @@ -0,0 +1,730 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/downgrade.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on UpdateDowngradeRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateDowngradeRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateDowngradeRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateDowngradeRequestMultiError, or nil if none found. +func (m *UpdateDowngradeRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateDowngradeRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := UpdateDowngradeRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetStock() <= 0 { + err := UpdateDowngradeRequestValidationError{ + field: "Stock", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return UpdateDowngradeRequestMultiError(errors) + } + + return nil +} + +// UpdateDowngradeRequestMultiError is an error wrapping multiple validation +// errors returned by UpdateDowngradeRequest.ValidateAll() if the designated +// constraints aren't met. +type UpdateDowngradeRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateDowngradeRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateDowngradeRequestMultiError) AllErrors() []error { return m } + +// UpdateDowngradeRequestValidationError is the validation error returned by +// UpdateDowngradeRequest.Validate if the designated constraints aren't met. +type UpdateDowngradeRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateDowngradeRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateDowngradeRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateDowngradeRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateDowngradeRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateDowngradeRequestValidationError) ErrorName() string { + return "UpdateDowngradeRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateDowngradeRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateDowngradeRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateDowngradeRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateDowngradeRequestValidationError{} + +// Validate checks the field values on UpdateDowngradeRequestReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateDowngradeRequestReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateDowngradeRequestReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateDowngradeRequestReplyMultiError, or nil if none found. +func (m *UpdateDowngradeRequestReply) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateDowngradeRequestReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return UpdateDowngradeRequestReplyMultiError(errors) + } + + return nil +} + +// UpdateDowngradeRequestReplyMultiError is an error wrapping multiple +// validation errors returned by UpdateDowngradeRequestReply.ValidateAll() if +// the designated constraints aren't met. +type UpdateDowngradeRequestReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateDowngradeRequestReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateDowngradeRequestReplyMultiError) AllErrors() []error { return m } + +// UpdateDowngradeRequestReplyValidationError is the validation error returned +// by UpdateDowngradeRequestReply.Validate if the designated constraints +// aren't met. +type UpdateDowngradeRequestReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateDowngradeRequestReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateDowngradeRequestReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateDowngradeRequestReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateDowngradeRequestReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateDowngradeRequestReplyValidationError) ErrorName() string { + return "UpdateDowngradeRequestReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateDowngradeRequestReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateDowngradeRequestReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateDowngradeRequestReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateDowngradeRequestReplyValidationError{} + +// Validate checks the field values on QueryDowngradeRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryDowngradeRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryDowngradeRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryDowngradeRequestMultiError, or nil if none found. +func (m *QueryDowngradeRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryDowngradeRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := QueryDowngradeRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return QueryDowngradeRequestMultiError(errors) + } + + return nil +} + +// QueryDowngradeRequestMultiError is an error wrapping multiple validation +// errors returned by QueryDowngradeRequest.ValidateAll() if the designated +// constraints aren't met. +type QueryDowngradeRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryDowngradeRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryDowngradeRequestMultiError) AllErrors() []error { return m } + +// QueryDowngradeRequestValidationError is the validation error returned by +// QueryDowngradeRequest.Validate if the designated constraints aren't met. +type QueryDowngradeRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryDowngradeRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryDowngradeRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryDowngradeRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryDowngradeRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryDowngradeRequestValidationError) ErrorName() string { + return "QueryDowngradeRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryDowngradeRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryDowngradeRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryDowngradeRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryDowngradeRequestValidationError{} + +// Validate checks the field values on QueryDowngradeReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryDowngradeReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryDowngradeReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryDowngradeReplyMultiError, or nil if none found. +func (m *QueryDowngradeReply) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryDowngradeReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Id + + // no validation rules for Stock + + if len(errors) > 0 { + return QueryDowngradeReplyMultiError(errors) + } + + return nil +} + +// QueryDowngradeReplyMultiError is an error wrapping multiple validation +// errors returned by QueryDowngradeReply.ValidateAll() if the designated +// constraints aren't met. +type QueryDowngradeReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryDowngradeReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryDowngradeReplyMultiError) AllErrors() []error { return m } + +// QueryDowngradeReplyValidationError is the validation error returned by +// QueryDowngradeReply.Validate if the designated constraints aren't met. +type QueryDowngradeReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryDowngradeReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryDowngradeReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryDowngradeReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryDowngradeReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryDowngradeReplyValidationError) ErrorName() string { + return "QueryDowngradeReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryDowngradeReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryDowngradeReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryDowngradeReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryDowngradeReplyValidationError{} + +// Validate checks the field values on DowngradeBranchRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *DowngradeBranchRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DowngradeBranchRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DowngradeBranchRequestMultiError, or nil if none found. +func (m *DowngradeBranchRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *DowngradeBranchRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if utf8.RuneCountInString(m.GetGid()) < 1 { + err := DowngradeBranchRequestValidationError{ + field: "Gid", + reason: "value length must be at least 1 runes", + } + if !all { + return err + } + errors = append(errors, err) + } + + if utf8.RuneCountInString(m.GetKey()) < 1 { + err := DowngradeBranchRequestValidationError{ + field: "Key", + reason: "value length must be at least 1 runes", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetId() <= 0 { + err := DowngradeBranchRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetStock() <= 0 { + err := DowngradeBranchRequestValidationError{ + field: "Stock", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return DowngradeBranchRequestMultiError(errors) + } + + return nil +} + +// DowngradeBranchRequestMultiError is an error wrapping multiple validation +// errors returned by DowngradeBranchRequest.ValidateAll() if the designated +// constraints aren't met. +type DowngradeBranchRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DowngradeBranchRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DowngradeBranchRequestMultiError) AllErrors() []error { return m } + +// DowngradeBranchRequestValidationError is the validation error returned by +// DowngradeBranchRequest.Validate if the designated constraints aren't met. +type DowngradeBranchRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DowngradeBranchRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DowngradeBranchRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DowngradeBranchRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DowngradeBranchRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DowngradeBranchRequestValidationError) ErrorName() string { + return "DowngradeBranchRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e DowngradeBranchRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDowngradeBranchRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DowngradeBranchRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DowngradeBranchRequestValidationError{} + +// Validate checks the field values on DowngradeBranchReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *DowngradeBranchReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DowngradeBranchReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DowngradeBranchReplyMultiError, or nil if none found. +func (m *DowngradeBranchReply) ValidateAll() error { + return m.validate(true) +} + +func (m *DowngradeBranchReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return DowngradeBranchReplyMultiError(errors) + } + + return nil +} + +// DowngradeBranchReplyMultiError is an error wrapping multiple validation +// errors returned by DowngradeBranchReply.ValidateAll() if the designated +// constraints aren't met. +type DowngradeBranchReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DowngradeBranchReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DowngradeBranchReplyMultiError) AllErrors() []error { return m } + +// DowngradeBranchReplyValidationError is the validation error returned by +// DowngradeBranchReply.Validate if the designated constraints aren't met. +type DowngradeBranchReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DowngradeBranchReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DowngradeBranchReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DowngradeBranchReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DowngradeBranchReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DowngradeBranchReplyValidationError) ErrorName() string { + return "DowngradeBranchReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e DowngradeBranchReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDowngradeBranchReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DowngradeBranchReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DowngradeBranchReplyValidationError{} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/downgrade.proto b/_13_sponge-dtm-cache/http/api/stock/v1/downgrade.proto new file mode 100644 index 0000000..f948e9c --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/downgrade.proto @@ -0,0 +1,108 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "tagger/tagger.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +// Default settings for generating swagger documents +// NOTE: because json does not support 64 bits, the int64 and uint64 types under *.swagger.json are automatically converted to string types +// Reference https://github.com/grpc-ecosystem/grpc-gateway/blob/db7fbefff7c04877cdb32e16d4a248a024428207/examples/internal/proto/examplepb/a_bit_of_everything.proto +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + host: "localhost:8080" + base_path: "" + info: { + title: "stock api docs"; + version: "2.0"; + } + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; + security_definitions: { + security: { + key: "BearerAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + description: "Type Bearer your-jwt-token to Value"; + } + } + } +}; + +service downgrade{ + // 更新数据,升降级中的DB和缓存强一致性 + rpc Update(UpdateDowngradeRequest) returns (UpdateDowngradeRequestReply) { + option (google.api.http) = { + put: "/api/v1/stock/{id}/downgrade" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "更新数据", + description: "更新数据,升降级中的DB和缓存强一致性", + tags: "case 4: 升降级中的强一致性" + }; + } + + // 查询 + rpc Query(QueryDowngradeRequest) returns (QueryDowngradeReply) { + option (google.api.http) = { + get: "/api/v1/stock/{id}/downgrade" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "查询数据", + description: "查询数据", + tags: "case 4: 升降级中的强一致性" + }; + } + + // 升降级中的强一致性分支 + rpc DowngradeBranch(DowngradeBranchRequest) returns (DowngradeBranchReply) { + option (google.api.http) = { + post: "/api/v1/stock/downgradeBranch" + body: "*" + selector: "[ctx]" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "升降级中的强一致性分支", + description: "升降级中的强一致性分支", + tags: "case 4: 升降级中的强一致性" + }; + } +} + +message UpdateDowngradeRequest { + uint64 id = 1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; + uint32 stock = 2 [(validate.rules).uint32.gt = 0]; // 库存数量 +} + +message UpdateDowngradeRequestReply { + +} + +message QueryDowngradeRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; +} + +message QueryDowngradeReply { + uint64 id = 1; + uint32 stock = 2; // 库存数量 +} + +message DowngradeBranchRequest { + string gid = 1 [(validate.rules).string.min_len = 1]; // dtm gid + string key = 2 [(validate.rules).string.min_len = 1]; // 缓存key + + uint64 id = 3 [(validate.rules).uint64.gt = 0]; + uint32 stock = 4 [(validate.rules).uint32.gt = 0]; // 库存数量 +} + +message DowngradeBranchReply { + +} \ No newline at end of file diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/downgrade_router.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/downgrade_router.pb.go new file mode 100644 index 0000000..db33621 --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/downgrade_router.pb.go @@ -0,0 +1,247 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type DowngradeLogicer interface { + Update(ctx context.Context, req *UpdateDowngradeRequest) (*UpdateDowngradeRequestReply, error) + Query(ctx context.Context, req *QueryDowngradeRequest) (*QueryDowngradeReply, error) + DowngradeBranch(ctx context.Context, req *DowngradeBranchRequest) (*DowngradeBranchReply, error) +} + +type DowngradeOption func(*downgradeOptions) + +type downgradeOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *downgradeOptions) apply(opts ...DowngradeOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithDowngradeHTTPResponse() DowngradeOption { + return func(o *downgradeOptions) { + o.isFromRPC = false + } +} + +func WithDowngradeRPCResponse() DowngradeOption { + return func(o *downgradeOptions) { + o.isFromRPC = true + } +} + +func WithDowngradeResponser(responser errcode.Responser) DowngradeOption { + return func(o *downgradeOptions) { + o.responser = responser + } +} + +func WithDowngradeLogger(zapLog *zap.Logger) DowngradeOption { + return func(o *downgradeOptions) { + o.zapLog = zapLog + } +} + +func WithDowngradeErrorToHTTPCode(e ...*errcode.Error) DowngradeOption { + return func(o *downgradeOptions) { + o.httpErrors = e + } +} + +func WithDowngradeRPCStatusToHTTPCode(s ...*errcode.RPCStatus) DowngradeOption { + return func(o *downgradeOptions) { + o.rpcStatus = s + } +} + +func WithDowngradeWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) DowngradeOption { + return func(o *downgradeOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterDowngradeRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic DowngradeLogicer, + opts ...DowngradeOption) { + + o := &downgradeOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &downgradeRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type downgradeRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic DowngradeLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *downgradeRouter) register() { + r.iRouter.Handle("PUT", "/api/v1/stock/:id/downgrade", r.withMiddleware("PUT", "/api/v1/stock/:id/downgrade", r.Update_2)...) + r.iRouter.Handle("GET", "/api/v1/stock/:id/downgrade", r.withMiddleware("GET", "/api/v1/stock/:id/downgrade", r.Query_2)...) + r.iRouter.Handle("POST", "/api/v1/stock/downgradeBranch", r.withMiddleware("POST", "/api/v1/stock/downgradeBranch", r.DowngradeBranch_0)...) + +} + +func (r *downgradeRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *downgradeRouter) Update_2(c *gin.Context) { + req := &UpdateDowngradeRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Update(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *downgradeRouter) Query_2(c *gin.Context) { + req := &QueryDowngradeRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Query(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *downgradeRouter) DowngradeBranch_0(c *gin.Context) { + req := &DowngradeBranchRequest{} + var err error + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context = c + + out, err := r.iLogic.DowngradeBranch(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/final.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/final.pb.go new file mode 100644 index 0000000..1a6229a --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/final.pb.go @@ -0,0 +1,379 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/final.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type UpdateFinalRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *UpdateFinalRequest) Reset() { + *x = UpdateFinalRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_final_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateFinalRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateFinalRequest) ProtoMessage() {} + +func (x *UpdateFinalRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_final_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateFinalRequest.ProtoReflect.Descriptor instead. +func (*UpdateFinalRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_final_proto_rawDescGZIP(), []int{0} +} + +func (x *UpdateFinalRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *UpdateFinalRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type UpdateFinalRequestReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateFinalRequestReply) Reset() { + *x = UpdateFinalRequestReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_final_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateFinalRequestReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateFinalRequestReply) ProtoMessage() {} + +func (x *UpdateFinalRequestReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_final_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateFinalRequestReply.ProtoReflect.Descriptor instead. +func (*UpdateFinalRequestReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_final_proto_rawDescGZIP(), []int{1} +} + +type QueryFinalRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *QueryFinalRequest) Reset() { + *x = QueryFinalRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_final_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryFinalRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryFinalRequest) ProtoMessage() {} + +func (x *QueryFinalRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_final_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryFinalRequest.ProtoReflect.Descriptor instead. +func (*QueryFinalRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_final_proto_rawDescGZIP(), []int{2} +} + +func (x *QueryFinalRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type QueryFinalReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Stock uint32 `protobuf:"varint,1,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *QueryFinalReply) Reset() { + *x = QueryFinalReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_final_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryFinalReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryFinalReply) ProtoMessage() {} + +func (x *QueryFinalReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_final_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryFinalReply.ProtoReflect.Descriptor instead. +func (*QueryFinalReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_final_proto_rawDescGZIP(), []int{3} +} + +func (x *QueryFinalReply) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +var File_api_stock_v1_final_proto protoreflect.FileDescriptor + +var file_api_stock_v1_final_proto_rawDesc = []byte{ + 0x0a, 0x18, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x66, + 0x69, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x61, 0x70, 0x69, 0x2e, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, + 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x74, + 0x61, 0x67, 0x67, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x59, 0x0a, 0x12, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x69, + 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, + 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, + 0x12, 0x1d, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x42, + 0x07, 0xfa, 0x42, 0x04, 0x2a, 0x02, 0x20, 0x00, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, + 0x19, 0x0a, 0x17, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x39, 0x0a, 0x11, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, + 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, + 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x27, 0x0a, 0x0f, 0x51, 0x75, 0x65, 0x72, 0x79, 0x46, 0x69, + 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x32, 0xf9, + 0x02, 0x0a, 0x05, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x12, 0xcb, 0x01, 0x0a, 0x06, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, + 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x69, 0x6e, 0x61, 0x6c, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x78, 0x92, 0x41, + 0x52, 0x0a, 0x17, 0x63, 0x61, 0x73, 0x65, 0x20, 0x31, 0x3a, 0x20, 0xe6, 0x9c, 0x80, 0xe7, 0xbb, + 0x88, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x12, 0x0c, 0xe6, 0x9b, 0xb4, 0xe6, + 0x96, 0xb0, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x29, 0xe6, 0x9b, 0xb4, 0xe6, 0x96, 0xb0, + 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0xef, 0xbc, 0x8c, 0x44, 0x42, 0xe5, 0x92, 0x8c, 0xe7, 0xbc, + 0x93, 0xe5, 0xad, 0x98, 0xe6, 0x9c, 0x80, 0xe7, 0xbb, 0x88, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, + 0xe6, 0x80, 0xa7, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1d, 0x1a, 0x18, 0x2f, 0x61, 0x70, 0x69, 0x2f, + 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x3a, 0x01, 0x2a, 0x12, 0xa1, 0x01, 0x0a, 0x05, 0x51, 0x75, 0x65, 0x72, 0x79, + 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, + 0x51, 0x75, 0x65, 0x72, 0x79, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, + 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x70, 0x6c, 0x79, + 0x22, 0x58, 0x92, 0x41, 0x35, 0x0a, 0x17, 0x63, 0x61, 0x73, 0x65, 0x20, 0x31, 0x3a, 0x20, 0xe6, + 0x9c, 0x80, 0xe7, 0xbb, 0x88, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x12, 0x0c, + 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x0c, 0xe6, 0x9f, + 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1a, + 0x12, 0x18, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, + 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x42, 0xb4, 0x01, 0x5a, 0x15, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, + 0x31, 0x3b, 0x76, 0x31, 0x92, 0x41, 0x99, 0x01, 0x12, 0x15, 0x0a, 0x0e, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x20, 0x61, 0x70, 0x69, 0x20, 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, 0x32, 0x2e, 0x30, 0x1a, + 0x0e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, 0x30, 0x38, 0x30, 0x2a, + 0x02, 0x01, 0x02, 0x32, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x3a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x5a, 0x48, 0x0a, 0x46, 0x0a, 0x0a, 0x42, 0x65, 0x61, + 0x72, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68, 0x12, 0x38, 0x08, 0x02, 0x12, 0x23, 0x54, 0x79, 0x70, + 0x65, 0x20, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x72, 0x2d, 0x6a, 0x77, + 0x74, 0x2d, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x1a, 0x0d, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x02, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_final_proto_rawDescOnce sync.Once + file_api_stock_v1_final_proto_rawDescData = file_api_stock_v1_final_proto_rawDesc +) + +func file_api_stock_v1_final_proto_rawDescGZIP() []byte { + file_api_stock_v1_final_proto_rawDescOnce.Do(func() { + file_api_stock_v1_final_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_final_proto_rawDescData) + }) + return file_api_stock_v1_final_proto_rawDescData +} + +var file_api_stock_v1_final_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_api_stock_v1_final_proto_goTypes = []interface{}{ + (*UpdateFinalRequest)(nil), // 0: api.stock.v1.UpdateFinalRequest + (*UpdateFinalRequestReply)(nil), // 1: api.stock.v1.UpdateFinalRequestReply + (*QueryFinalRequest)(nil), // 2: api.stock.v1.QueryFinalRequest + (*QueryFinalReply)(nil), // 3: api.stock.v1.QueryFinalReply +} +var file_api_stock_v1_final_proto_depIdxs = []int32{ + 0, // 0: api.stock.v1.final.Update:input_type -> api.stock.v1.UpdateFinalRequest + 2, // 1: api.stock.v1.final.Query:input_type -> api.stock.v1.QueryFinalRequest + 1, // 2: api.stock.v1.final.Update:output_type -> api.stock.v1.UpdateFinalRequestReply + 3, // 3: api.stock.v1.final.Query:output_type -> api.stock.v1.QueryFinalReply + 2, // [2:4] is the sub-list for method output_type + 0, // [0:2] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_final_proto_init() } +func file_api_stock_v1_final_proto_init() { + if File_api_stock_v1_final_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_final_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateFinalRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_final_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateFinalRequestReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_final_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryFinalRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_final_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryFinalReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_final_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_final_proto_goTypes, + DependencyIndexes: file_api_stock_v1_final_proto_depIdxs, + MessageInfos: file_api_stock_v1_final_proto_msgTypes, + }.Build() + File_api_stock_v1_final_proto = out.File + file_api_stock_v1_final_proto_rawDesc = nil + file_api_stock_v1_final_proto_goTypes = nil + file_api_stock_v1_final_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/final.pb.validate.go b/_13_sponge-dtm-cache/http/api/stock/v1/final.pb.validate.go new file mode 100644 index 0000000..de240cd --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/final.pb.validate.go @@ -0,0 +1,477 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/final.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on UpdateFinalRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateFinalRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateFinalRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateFinalRequestMultiError, or nil if none found. +func (m *UpdateFinalRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateFinalRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := UpdateFinalRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetStock() <= 0 { + err := UpdateFinalRequestValidationError{ + field: "Stock", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return UpdateFinalRequestMultiError(errors) + } + + return nil +} + +// UpdateFinalRequestMultiError is an error wrapping multiple validation errors +// returned by UpdateFinalRequest.ValidateAll() if the designated constraints +// aren't met. +type UpdateFinalRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateFinalRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateFinalRequestMultiError) AllErrors() []error { return m } + +// UpdateFinalRequestValidationError is the validation error returned by +// UpdateFinalRequest.Validate if the designated constraints aren't met. +type UpdateFinalRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateFinalRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateFinalRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateFinalRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateFinalRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateFinalRequestValidationError) ErrorName() string { + return "UpdateFinalRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateFinalRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateFinalRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateFinalRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateFinalRequestValidationError{} + +// Validate checks the field values on UpdateFinalRequestReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateFinalRequestReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateFinalRequestReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateFinalRequestReplyMultiError, or nil if none found. +func (m *UpdateFinalRequestReply) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateFinalRequestReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return UpdateFinalRequestReplyMultiError(errors) + } + + return nil +} + +// UpdateFinalRequestReplyMultiError is an error wrapping multiple validation +// errors returned by UpdateFinalRequestReply.ValidateAll() if the designated +// constraints aren't met. +type UpdateFinalRequestReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateFinalRequestReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateFinalRequestReplyMultiError) AllErrors() []error { return m } + +// UpdateFinalRequestReplyValidationError is the validation error returned by +// UpdateFinalRequestReply.Validate if the designated constraints aren't met. +type UpdateFinalRequestReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateFinalRequestReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateFinalRequestReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateFinalRequestReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateFinalRequestReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateFinalRequestReplyValidationError) ErrorName() string { + return "UpdateFinalRequestReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateFinalRequestReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateFinalRequestReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateFinalRequestReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateFinalRequestReplyValidationError{} + +// Validate checks the field values on QueryFinalRequest with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *QueryFinalRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryFinalRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryFinalRequestMultiError, or nil if none found. +func (m *QueryFinalRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryFinalRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := QueryFinalRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return QueryFinalRequestMultiError(errors) + } + + return nil +} + +// QueryFinalRequestMultiError is an error wrapping multiple validation errors +// returned by QueryFinalRequest.ValidateAll() if the designated constraints +// aren't met. +type QueryFinalRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryFinalRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryFinalRequestMultiError) AllErrors() []error { return m } + +// QueryFinalRequestValidationError is the validation error returned by +// QueryFinalRequest.Validate if the designated constraints aren't met. +type QueryFinalRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryFinalRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryFinalRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryFinalRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryFinalRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryFinalRequestValidationError) ErrorName() string { + return "QueryFinalRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryFinalRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryFinalRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryFinalRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryFinalRequestValidationError{} + +// Validate checks the field values on QueryFinalReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *QueryFinalReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryFinalReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryFinalReplyMultiError, or nil if none found. +func (m *QueryFinalReply) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryFinalReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Stock + + if len(errors) > 0 { + return QueryFinalReplyMultiError(errors) + } + + return nil +} + +// QueryFinalReplyMultiError is an error wrapping multiple validation errors +// returned by QueryFinalReply.ValidateAll() if the designated constraints +// aren't met. +type QueryFinalReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryFinalReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryFinalReplyMultiError) AllErrors() []error { return m } + +// QueryFinalReplyValidationError is the validation error returned by +// QueryFinalReply.Validate if the designated constraints aren't met. +type QueryFinalReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryFinalReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryFinalReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryFinalReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryFinalReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryFinalReplyValidationError) ErrorName() string { return "QueryFinalReplyValidationError" } + +// Error satisfies the builtin error interface +func (e QueryFinalReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryFinalReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryFinalReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryFinalReplyValidationError{} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/final.proto b/_13_sponge-dtm-cache/http/api/stock/v1/final.proto new file mode 100644 index 0000000..90ef8a5 --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/final.proto @@ -0,0 +1,81 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "tagger/tagger.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +// Default settings for generating swagger documents +// NOTE: because json does not support 64 bits, the int64 and uint64 types under *.swagger.json are automatically converted to string types +// Reference https://github.com/grpc-ecosystem/grpc-gateway/blob/db7fbefff7c04877cdb32e16d4a248a024428207/examples/internal/proto/examplepb/a_bit_of_everything.proto +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + host: "localhost:8080" + base_path: "" + info: { + title: "stock api docs"; + version: "2.0"; + } + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; + security_definitions: { + security: { + key: "BearerAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + description: "Type Bearer your-jwt-token to Value"; + } + } + } +}; + +service final{ + // 更新数据,DB和缓存最终一致性 + rpc Update(UpdateFinalRequest) returns (UpdateFinalRequestReply) { + option (google.api.http) = { + put: "/api/v1/stock/{id}/final" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "更新数据", + description: "更新数据,DB和缓存最终一致性", + tags: "case 1: 最终一致性" + }; + } + + // 查询 + rpc Query(QueryFinalRequest) returns (QueryFinalReply) { + option (google.api.http) = { + get: "/api/v1/stock/{id}/final" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "查询数据", + description: "查询数据", + tags: "case 1: 最终一致性" + }; + } +} + +message UpdateFinalRequest { + uint64 id = 1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; + uint32 stock = 2 [(validate.rules).uint32.gt = 0]; // 库存数量 +} + +message UpdateFinalRequestReply { + +} + +message QueryFinalRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; +} + +message QueryFinalReply { + uint32 stock = 1; // 库存数量 +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/final_router.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/final_router.pb.go new file mode 100644 index 0000000..dbaa9cb --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/final_router.pb.go @@ -0,0 +1,221 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type FinalLogicer interface { + Update(ctx context.Context, req *UpdateFinalRequest) (*UpdateFinalRequestReply, error) + Query(ctx context.Context, req *QueryFinalRequest) (*QueryFinalReply, error) +} + +type FinalOption func(*finalOptions) + +type finalOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *finalOptions) apply(opts ...FinalOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithFinalHTTPResponse() FinalOption { + return func(o *finalOptions) { + o.isFromRPC = false + } +} + +func WithFinalRPCResponse() FinalOption { + return func(o *finalOptions) { + o.isFromRPC = true + } +} + +func WithFinalResponser(responser errcode.Responser) FinalOption { + return func(o *finalOptions) { + o.responser = responser + } +} + +func WithFinalLogger(zapLog *zap.Logger) FinalOption { + return func(o *finalOptions) { + o.zapLog = zapLog + } +} + +func WithFinalErrorToHTTPCode(e ...*errcode.Error) FinalOption { + return func(o *finalOptions) { + o.httpErrors = e + } +} + +func WithFinalRPCStatusToHTTPCode(s ...*errcode.RPCStatus) FinalOption { + return func(o *finalOptions) { + o.rpcStatus = s + } +} + +func WithFinalWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) FinalOption { + return func(o *finalOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterFinalRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic FinalLogicer, + opts ...FinalOption) { + + o := &finalOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &finalRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type finalRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic FinalLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *finalRouter) register() { + r.iRouter.Handle("PUT", "/api/v1/stock/:id/final", r.withMiddleware("PUT", "/api/v1/stock/:id/final", r.Update_4)...) + r.iRouter.Handle("GET", "/api/v1/stock/:id/final", r.withMiddleware("GET", "/api/v1/stock/:id/final", r.Query_4)...) + +} + +func (r *finalRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *finalRouter) Update_4(c *gin.Context) { + req := &UpdateFinalRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Update(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *finalRouter) Query_4(c *gin.Context) { + req := &QueryFinalRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Query(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/stock.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/stock.pb.go new file mode 100644 index 0000000..696d469 --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/stock.pb.go @@ -0,0 +1,918 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/stock.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + types "stock/api/types" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type CreateStockRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ProductID uint64 `protobuf:"varint,1,opt,name=productID,proto3" json:"productID"` // 商品id + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存 +} + +func (x *CreateStockRequest) Reset() { + *x = CreateStockRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateStockRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateStockRequest) ProtoMessage() {} + +func (x *CreateStockRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateStockRequest.ProtoReflect.Descriptor instead. +func (*CreateStockRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{0} +} + +func (x *CreateStockRequest) GetProductID() uint64 { + if x != nil { + return x.ProductID + } + return 0 +} + +func (x *CreateStockRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type CreateStockReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` +} + +func (x *CreateStockReply) Reset() { + *x = CreateStockReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateStockReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateStockReply) ProtoMessage() {} + +func (x *CreateStockReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateStockReply.ProtoReflect.Descriptor instead. +func (*CreateStockReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{1} +} + +func (x *CreateStockReply) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type DeleteStockByIDRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *DeleteStockByIDRequest) Reset() { + *x = DeleteStockByIDRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteStockByIDRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteStockByIDRequest) ProtoMessage() {} + +func (x *DeleteStockByIDRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteStockByIDRequest.ProtoReflect.Descriptor instead. +func (*DeleteStockByIDRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{2} +} + +func (x *DeleteStockByIDRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type DeleteStockByIDReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteStockByIDReply) Reset() { + *x = DeleteStockByIDReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteStockByIDReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteStockByIDReply) ProtoMessage() {} + +func (x *DeleteStockByIDReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteStockByIDReply.ProtoReflect.Descriptor instead. +func (*DeleteStockByIDReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{3} +} + +type UpdateStockByIDRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` + ProductID uint64 `protobuf:"varint,2,opt,name=productID,proto3" json:"productID"` // 商品id + Stock uint32 `protobuf:"varint,3,opt,name=stock,proto3" json:"stock"` // 库存 +} + +func (x *UpdateStockByIDRequest) Reset() { + *x = UpdateStockByIDRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateStockByIDRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateStockByIDRequest) ProtoMessage() {} + +func (x *UpdateStockByIDRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateStockByIDRequest.ProtoReflect.Descriptor instead. +func (*UpdateStockByIDRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{4} +} + +func (x *UpdateStockByIDRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *UpdateStockByIDRequest) GetProductID() uint64 { + if x != nil { + return x.ProductID + } + return 0 +} + +func (x *UpdateStockByIDRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type UpdateStockByIDReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateStockByIDReply) Reset() { + *x = UpdateStockByIDReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateStockByIDReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateStockByIDReply) ProtoMessage() {} + +func (x *UpdateStockByIDReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateStockByIDReply.ProtoReflect.Descriptor instead. +func (*UpdateStockByIDReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{5} +} + +type Stock struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` + ProductID uint64 `protobuf:"varint,2,opt,name=productID,proto3" json:"productID"` // 商品id + Stock uint32 `protobuf:"varint,3,opt,name=stock,proto3" json:"stock"` // 库存 + CreatedAt string `protobuf:"bytes,4,opt,name=createdAt,proto3" json:"createdAt"` + UpdatedAt string `protobuf:"bytes,5,opt,name=updatedAt,proto3" json:"updatedAt"` +} + +func (x *Stock) Reset() { + *x = Stock{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Stock) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Stock) ProtoMessage() {} + +func (x *Stock) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Stock.ProtoReflect.Descriptor instead. +func (*Stock) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{6} +} + +func (x *Stock) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Stock) GetProductID() uint64 { + if x != nil { + return x.ProductID + } + return 0 +} + +func (x *Stock) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +func (x *Stock) GetCreatedAt() string { + if x != nil { + return x.CreatedAt + } + return "" +} + +func (x *Stock) GetUpdatedAt() string { + if x != nil { + return x.UpdatedAt + } + return "" +} + +type GetStockByIDRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *GetStockByIDRequest) Reset() { + *x = GetStockByIDRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetStockByIDRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetStockByIDRequest) ProtoMessage() {} + +func (x *GetStockByIDRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetStockByIDRequest.ProtoReflect.Descriptor instead. +func (*GetStockByIDRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{7} +} + +func (x *GetStockByIDRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type GetStockByIDReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Stock *Stock `protobuf:"bytes,1,opt,name=stock,proto3" json:"stock"` +} + +func (x *GetStockByIDReply) Reset() { + *x = GetStockByIDReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetStockByIDReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetStockByIDReply) ProtoMessage() {} + +func (x *GetStockByIDReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetStockByIDReply.ProtoReflect.Descriptor instead. +func (*GetStockByIDReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{8} +} + +func (x *GetStockByIDReply) GetStock() *Stock { + if x != nil { + return x.Stock + } + return nil +} + +type ListStockRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Params *types.Params `protobuf:"bytes,1,opt,name=params,proto3" json:"params"` +} + +func (x *ListStockRequest) Reset() { + *x = ListStockRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListStockRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListStockRequest) ProtoMessage() {} + +func (x *ListStockRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListStockRequest.ProtoReflect.Descriptor instead. +func (*ListStockRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{9} +} + +func (x *ListStockRequest) GetParams() *types.Params { + if x != nil { + return x.Params + } + return nil +} + +type ListStockReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Total int64 `protobuf:"varint,1,opt,name=total,proto3" json:"total"` + Stocks []*Stock `protobuf:"bytes,2,rep,name=stocks,proto3" json:"stocks"` +} + +func (x *ListStockReply) Reset() { + *x = ListStockReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_stock_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListStockReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListStockReply) ProtoMessage() {} + +func (x *ListStockReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_stock_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListStockReply.ProtoReflect.Descriptor instead. +func (*ListStockReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_stock_proto_rawDescGZIP(), []int{10} +} + +func (x *ListStockReply) GetTotal() int64 { + if x != nil { + return x.Total + } + return 0 +} + +func (x *ListStockReply) GetStocks() []*Stock { + if x != nil { + return x.Stocks + } + return nil +} + +var File_api_stock_v1_stock_proto protoreflect.FileDescriptor + +var file_api_stock_v1_stock_proto_rawDesc = []byte{ + 0x0a, 0x18, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x61, 0x70, 0x69, 0x2e, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x15, 0x61, 0x70, 0x69, 0x2f, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, + 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, + 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x74, + 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x48, 0x0a, 0x12, 0x43, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x12, + 0x14, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, 0x22, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, + 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x69, 0x64, 0x22, 0x3e, 0x0a, 0x16, 0x44, 0x65, 0x6c, + 0x65, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, + 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x16, 0x0a, 0x14, 0x44, 0x65, 0x6c, + 0x65, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, + 0x79, 0x22, 0x6b, 0x0a, 0x16, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, + 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x0d, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, + 0x69, 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, + 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x70, + 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, 0x16, + 0x0a, 0x14, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, + 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x87, 0x01, 0x0a, 0x05, 0x53, 0x74, 0x6f, 0x63, 0x6b, + 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x69, 0x64, + 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x04, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x49, 0x44, 0x12, 0x14, + 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, + 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, + 0x41, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, + 0x22, 0x3b, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, + 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x3e, 0x0a, + 0x11, 0x47, 0x65, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, + 0x6c, 0x79, 0x12, 0x29, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x13, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, + 0x2e, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x22, 0x3d, 0x0a, + 0x10, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x29, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x22, 0x53, 0x0a, 0x0e, + 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x14, + 0x0a, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x74, + 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x2b, 0x0a, 0x06, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x06, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x73, 0x32, 0xa3, 0x06, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x12, 0x99, 0x01, 0x0a, 0x06, + 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, + 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x74, + 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x4d, 0x92, 0x41, 0x32, 0x12, 0x0c, 0x63, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x1a, 0x22, 0x73, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x20, 0x69, 0x6e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x74, 0x6f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x82, + 0xd3, 0xe4, 0x93, 0x02, 0x12, 0x22, 0x0d, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x3a, 0x01, 0x2a, 0x12, 0x97, 0x01, 0x0a, 0x0a, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x42, 0x79, 0x49, 0x44, 0x12, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, + 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, + 0x22, 0x3f, 0x92, 0x41, 0x22, 0x12, 0x0c, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x20, 0x73, 0x74, + 0x6f, 0x63, 0x6b, 0x1a, 0x12, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x20, 0x62, 0x79, 0x20, 0x69, 0x64, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x2a, 0x12, 0x2f, + 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, + 0x7d, 0x12, 0x9a, 0x01, 0x0a, 0x0a, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x42, 0x79, 0x49, 0x44, + 0x12, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x63, + 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x42, 0x92, 0x41, 0x22, 0x12, + 0x0c, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x1a, 0x12, 0x75, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x62, 0x79, 0x20, 0x69, + 0x64, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x17, 0x1a, 0x12, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, + 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x3a, 0x01, 0x2a, 0x12, 0x96, + 0x01, 0x0a, 0x07, 0x47, 0x65, 0x74, 0x42, 0x79, 0x49, 0x44, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x74, 0x6f, + 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, + 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x42, 0x79, 0x49, 0x44, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x47, + 0x92, 0x41, 0x2a, 0x12, 0x10, 0x67, 0x65, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x64, + 0x65, 0x74, 0x61, 0x69, 0x6c, 0x1a, 0x16, 0x67, 0x65, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x20, 0x62, 0x79, 0x20, 0x69, 0x64, 0x82, 0xd3, 0xe4, + 0x93, 0x02, 0x14, 0x12, 0x12, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0xad, 0x01, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, + 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, + 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, + 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x67, + 0x92, 0x41, 0x47, 0x12, 0x1c, 0x6c, 0x69, 0x73, 0x74, 0x20, 0x6f, 0x66, 0x20, 0x73, 0x74, 0x6f, + 0x63, 0x6b, 0x73, 0x20, 0x62, 0x79, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x73, 0x1a, 0x27, 0x6c, 0x69, 0x73, 0x74, 0x20, 0x6f, 0x66, 0x20, 0x73, 0x74, 0x6f, 0x63, 0x6b, + 0x73, 0x20, 0x62, 0x79, 0x20, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x6e, 0x64, 0x20, + 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x17, + 0x22, 0x12, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, + 0x6c, 0x69, 0x73, 0x74, 0x3a, 0x01, 0x2a, 0x42, 0xb4, 0x01, 0x5a, 0x15, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x3b, 0x76, + 0x31, 0x92, 0x41, 0x99, 0x01, 0x12, 0x15, 0x0a, 0x0e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x61, + 0x70, 0x69, 0x20, 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, 0x32, 0x2e, 0x30, 0x1a, 0x0e, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, 0x30, 0x38, 0x30, 0x2a, 0x02, 0x01, 0x02, + 0x32, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, + 0x6f, 0x6e, 0x3a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, + 0x6a, 0x73, 0x6f, 0x6e, 0x5a, 0x48, 0x0a, 0x46, 0x0a, 0x0a, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, + 0x41, 0x75, 0x74, 0x68, 0x12, 0x38, 0x08, 0x02, 0x12, 0x23, 0x54, 0x79, 0x70, 0x65, 0x20, 0x42, + 0x65, 0x61, 0x72, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x72, 0x2d, 0x6a, 0x77, 0x74, 0x2d, 0x74, + 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x0d, 0x41, + 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x06, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_stock_proto_rawDescOnce sync.Once + file_api_stock_v1_stock_proto_rawDescData = file_api_stock_v1_stock_proto_rawDesc +) + +func file_api_stock_v1_stock_proto_rawDescGZIP() []byte { + file_api_stock_v1_stock_proto_rawDescOnce.Do(func() { + file_api_stock_v1_stock_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_stock_proto_rawDescData) + }) + return file_api_stock_v1_stock_proto_rawDescData +} + +var file_api_stock_v1_stock_proto_msgTypes = make([]protoimpl.MessageInfo, 11) +var file_api_stock_v1_stock_proto_goTypes = []interface{}{ + (*CreateStockRequest)(nil), // 0: api.stock.v1.CreateStockRequest + (*CreateStockReply)(nil), // 1: api.stock.v1.CreateStockReply + (*DeleteStockByIDRequest)(nil), // 2: api.stock.v1.DeleteStockByIDRequest + (*DeleteStockByIDReply)(nil), // 3: api.stock.v1.DeleteStockByIDReply + (*UpdateStockByIDRequest)(nil), // 4: api.stock.v1.UpdateStockByIDRequest + (*UpdateStockByIDReply)(nil), // 5: api.stock.v1.UpdateStockByIDReply + (*Stock)(nil), // 6: api.stock.v1.Stock + (*GetStockByIDRequest)(nil), // 7: api.stock.v1.GetStockByIDRequest + (*GetStockByIDReply)(nil), // 8: api.stock.v1.GetStockByIDReply + (*ListStockRequest)(nil), // 9: api.stock.v1.ListStockRequest + (*ListStockReply)(nil), // 10: api.stock.v1.ListStockReply + (*types.Params)(nil), // 11: api.types.Params +} +var file_api_stock_v1_stock_proto_depIdxs = []int32{ + 6, // 0: api.stock.v1.GetStockByIDReply.stock:type_name -> api.stock.v1.Stock + 11, // 1: api.stock.v1.ListStockRequest.params:type_name -> api.types.Params + 6, // 2: api.stock.v1.ListStockReply.stocks:type_name -> api.stock.v1.Stock + 0, // 3: api.stock.v1.stock.Create:input_type -> api.stock.v1.CreateStockRequest + 2, // 4: api.stock.v1.stock.DeleteByID:input_type -> api.stock.v1.DeleteStockByIDRequest + 4, // 5: api.stock.v1.stock.UpdateByID:input_type -> api.stock.v1.UpdateStockByIDRequest + 7, // 6: api.stock.v1.stock.GetByID:input_type -> api.stock.v1.GetStockByIDRequest + 9, // 7: api.stock.v1.stock.List:input_type -> api.stock.v1.ListStockRequest + 1, // 8: api.stock.v1.stock.Create:output_type -> api.stock.v1.CreateStockReply + 3, // 9: api.stock.v1.stock.DeleteByID:output_type -> api.stock.v1.DeleteStockByIDReply + 5, // 10: api.stock.v1.stock.UpdateByID:output_type -> api.stock.v1.UpdateStockByIDReply + 8, // 11: api.stock.v1.stock.GetByID:output_type -> api.stock.v1.GetStockByIDReply + 10, // 12: api.stock.v1.stock.List:output_type -> api.stock.v1.ListStockReply + 8, // [8:13] is the sub-list for method output_type + 3, // [3:8] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_stock_proto_init() } +func file_api_stock_v1_stock_proto_init() { + if File_api_stock_v1_stock_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_stock_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateStockRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateStockReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteStockByIDRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteStockByIDReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateStockByIDRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateStockByIDReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Stock); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetStockByIDRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetStockByIDReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListStockRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_stock_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListStockReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_stock_proto_rawDesc, + NumEnums: 0, + NumMessages: 11, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_stock_proto_goTypes, + DependencyIndexes: file_api_stock_v1_stock_proto_depIdxs, + MessageInfos: file_api_stock_v1_stock_proto_msgTypes, + }.Build() + File_api_stock_v1_stock_proto = out.File + file_api_stock_v1_stock_proto_rawDesc = nil + file_api_stock_v1_stock_proto_goTypes = nil + file_api_stock_v1_stock_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/stock.pb.validate.go b/_13_sponge-dtm-cache/http/api/stock/v1/stock.pb.validate.go new file mode 100644 index 0000000..7e08c68 --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/stock.pb.validate.go @@ -0,0 +1,1286 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/stock.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on CreateStockRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *CreateStockRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on CreateStockRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// CreateStockRequestMultiError, or nil if none found. +func (m *CreateStockRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *CreateStockRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for ProductID + + // no validation rules for Stock + + if len(errors) > 0 { + return CreateStockRequestMultiError(errors) + } + + return nil +} + +// CreateStockRequestMultiError is an error wrapping multiple validation errors +// returned by CreateStockRequest.ValidateAll() if the designated constraints +// aren't met. +type CreateStockRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CreateStockRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CreateStockRequestMultiError) AllErrors() []error { return m } + +// CreateStockRequestValidationError is the validation error returned by +// CreateStockRequest.Validate if the designated constraints aren't met. +type CreateStockRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CreateStockRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CreateStockRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CreateStockRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CreateStockRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CreateStockRequestValidationError) ErrorName() string { + return "CreateStockRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e CreateStockRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCreateStockRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CreateStockRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CreateStockRequestValidationError{} + +// Validate checks the field values on CreateStockReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *CreateStockReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on CreateStockReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// CreateStockReplyMultiError, or nil if none found. +func (m *CreateStockReply) ValidateAll() error { + return m.validate(true) +} + +func (m *CreateStockReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Id + + if len(errors) > 0 { + return CreateStockReplyMultiError(errors) + } + + return nil +} + +// CreateStockReplyMultiError is an error wrapping multiple validation errors +// returned by CreateStockReply.ValidateAll() if the designated constraints +// aren't met. +type CreateStockReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CreateStockReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CreateStockReplyMultiError) AllErrors() []error { return m } + +// CreateStockReplyValidationError is the validation error returned by +// CreateStockReply.Validate if the designated constraints aren't met. +type CreateStockReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CreateStockReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CreateStockReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CreateStockReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CreateStockReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CreateStockReplyValidationError) ErrorName() string { return "CreateStockReplyValidationError" } + +// Error satisfies the builtin error interface +func (e CreateStockReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCreateStockReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CreateStockReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CreateStockReplyValidationError{} + +// Validate checks the field values on DeleteStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *DeleteStockByIDRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DeleteStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DeleteStockByIDRequestMultiError, or nil if none found. +func (m *DeleteStockByIDRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *DeleteStockByIDRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := DeleteStockByIDRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return DeleteStockByIDRequestMultiError(errors) + } + + return nil +} + +// DeleteStockByIDRequestMultiError is an error wrapping multiple validation +// errors returned by DeleteStockByIDRequest.ValidateAll() if the designated +// constraints aren't met. +type DeleteStockByIDRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DeleteStockByIDRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DeleteStockByIDRequestMultiError) AllErrors() []error { return m } + +// DeleteStockByIDRequestValidationError is the validation error returned by +// DeleteStockByIDRequest.Validate if the designated constraints aren't met. +type DeleteStockByIDRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DeleteStockByIDRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DeleteStockByIDRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DeleteStockByIDRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DeleteStockByIDRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DeleteStockByIDRequestValidationError) ErrorName() string { + return "DeleteStockByIDRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e DeleteStockByIDRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDeleteStockByIDRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DeleteStockByIDRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DeleteStockByIDRequestValidationError{} + +// Validate checks the field values on DeleteStockByIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *DeleteStockByIDReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on DeleteStockByIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// DeleteStockByIDReplyMultiError, or nil if none found. +func (m *DeleteStockByIDReply) ValidateAll() error { + return m.validate(true) +} + +func (m *DeleteStockByIDReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return DeleteStockByIDReplyMultiError(errors) + } + + return nil +} + +// DeleteStockByIDReplyMultiError is an error wrapping multiple validation +// errors returned by DeleteStockByIDReply.ValidateAll() if the designated +// constraints aren't met. +type DeleteStockByIDReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m DeleteStockByIDReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m DeleteStockByIDReplyMultiError) AllErrors() []error { return m } + +// DeleteStockByIDReplyValidationError is the validation error returned by +// DeleteStockByIDReply.Validate if the designated constraints aren't met. +type DeleteStockByIDReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e DeleteStockByIDReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e DeleteStockByIDReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e DeleteStockByIDReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e DeleteStockByIDReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e DeleteStockByIDReplyValidationError) ErrorName() string { + return "DeleteStockByIDReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e DeleteStockByIDReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sDeleteStockByIDReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = DeleteStockByIDReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = DeleteStockByIDReplyValidationError{} + +// Validate checks the field values on UpdateStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateStockByIDRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateStockByIDRequestMultiError, or nil if none found. +func (m *UpdateStockByIDRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateStockByIDRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Id + + // no validation rules for ProductID + + // no validation rules for Stock + + if len(errors) > 0 { + return UpdateStockByIDRequestMultiError(errors) + } + + return nil +} + +// UpdateStockByIDRequestMultiError is an error wrapping multiple validation +// errors returned by UpdateStockByIDRequest.ValidateAll() if the designated +// constraints aren't met. +type UpdateStockByIDRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateStockByIDRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateStockByIDRequestMultiError) AllErrors() []error { return m } + +// UpdateStockByIDRequestValidationError is the validation error returned by +// UpdateStockByIDRequest.Validate if the designated constraints aren't met. +type UpdateStockByIDRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateStockByIDRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateStockByIDRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateStockByIDRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateStockByIDRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateStockByIDRequestValidationError) ErrorName() string { + return "UpdateStockByIDRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateStockByIDRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateStockByIDRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateStockByIDRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateStockByIDRequestValidationError{} + +// Validate checks the field values on UpdateStockByIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateStockByIDReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateStockByIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateStockByIDReplyMultiError, or nil if none found. +func (m *UpdateStockByIDReply) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateStockByIDReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return UpdateStockByIDReplyMultiError(errors) + } + + return nil +} + +// UpdateStockByIDReplyMultiError is an error wrapping multiple validation +// errors returned by UpdateStockByIDReply.ValidateAll() if the designated +// constraints aren't met. +type UpdateStockByIDReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateStockByIDReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateStockByIDReplyMultiError) AllErrors() []error { return m } + +// UpdateStockByIDReplyValidationError is the validation error returned by +// UpdateStockByIDReply.Validate if the designated constraints aren't met. +type UpdateStockByIDReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateStockByIDReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateStockByIDReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateStockByIDReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateStockByIDReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateStockByIDReplyValidationError) ErrorName() string { + return "UpdateStockByIDReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateStockByIDReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateStockByIDReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateStockByIDReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateStockByIDReplyValidationError{} + +// Validate checks the field values on Stock with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *Stock) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on Stock with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in StockMultiError, or nil if none found. +func (m *Stock) ValidateAll() error { + return m.validate(true) +} + +func (m *Stock) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Id + + // no validation rules for ProductID + + // no validation rules for Stock + + // no validation rules for CreatedAt + + // no validation rules for UpdatedAt + + if len(errors) > 0 { + return StockMultiError(errors) + } + + return nil +} + +// StockMultiError is an error wrapping multiple validation errors returned by +// Stock.ValidateAll() if the designated constraints aren't met. +type StockMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m StockMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m StockMultiError) AllErrors() []error { return m } + +// StockValidationError is the validation error returned by Stock.Validate if +// the designated constraints aren't met. +type StockValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e StockValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e StockValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e StockValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e StockValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e StockValidationError) ErrorName() string { return "StockValidationError" } + +// Error satisfies the builtin error interface +func (e StockValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sStock.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = StockValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = StockValidationError{} + +// Validate checks the field values on GetStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *GetStockByIDRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on GetStockByIDRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// GetStockByIDRequestMultiError, or nil if none found. +func (m *GetStockByIDRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *GetStockByIDRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := GetStockByIDRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return GetStockByIDRequestMultiError(errors) + } + + return nil +} + +// GetStockByIDRequestMultiError is an error wrapping multiple validation +// errors returned by GetStockByIDRequest.ValidateAll() if the designated +// constraints aren't met. +type GetStockByIDRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m GetStockByIDRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m GetStockByIDRequestMultiError) AllErrors() []error { return m } + +// GetStockByIDRequestValidationError is the validation error returned by +// GetStockByIDRequest.Validate if the designated constraints aren't met. +type GetStockByIDRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e GetStockByIDRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e GetStockByIDRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e GetStockByIDRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e GetStockByIDRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e GetStockByIDRequestValidationError) ErrorName() string { + return "GetStockByIDRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e GetStockByIDRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sGetStockByIDRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = GetStockByIDRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = GetStockByIDRequestValidationError{} + +// Validate checks the field values on GetStockByIDReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *GetStockByIDReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on GetStockByIDReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// GetStockByIDReplyMultiError, or nil if none found. +func (m *GetStockByIDReply) ValidateAll() error { + return m.validate(true) +} + +func (m *GetStockByIDReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if all { + switch v := interface{}(m.GetStock()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, GetStockByIDReplyValidationError{ + field: "Stock", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, GetStockByIDReplyValidationError{ + field: "Stock", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetStock()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return GetStockByIDReplyValidationError{ + field: "Stock", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return GetStockByIDReplyMultiError(errors) + } + + return nil +} + +// GetStockByIDReplyMultiError is an error wrapping multiple validation errors +// returned by GetStockByIDReply.ValidateAll() if the designated constraints +// aren't met. +type GetStockByIDReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m GetStockByIDReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m GetStockByIDReplyMultiError) AllErrors() []error { return m } + +// GetStockByIDReplyValidationError is the validation error returned by +// GetStockByIDReply.Validate if the designated constraints aren't met. +type GetStockByIDReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e GetStockByIDReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e GetStockByIDReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e GetStockByIDReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e GetStockByIDReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e GetStockByIDReplyValidationError) ErrorName() string { + return "GetStockByIDReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e GetStockByIDReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sGetStockByIDReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = GetStockByIDReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = GetStockByIDReplyValidationError{} + +// Validate checks the field values on ListStockRequest with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *ListStockRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ListStockRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ListStockRequestMultiError, or nil if none found. +func (m *ListStockRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *ListStockRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if all { + switch v := interface{}(m.GetParams()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ListStockRequestValidationError{ + field: "Params", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ListStockRequestValidationError{ + field: "Params", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetParams()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ListStockRequestValidationError{ + field: "Params", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return ListStockRequestMultiError(errors) + } + + return nil +} + +// ListStockRequestMultiError is an error wrapping multiple validation errors +// returned by ListStockRequest.ValidateAll() if the designated constraints +// aren't met. +type ListStockRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ListStockRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ListStockRequestMultiError) AllErrors() []error { return m } + +// ListStockRequestValidationError is the validation error returned by +// ListStockRequest.Validate if the designated constraints aren't met. +type ListStockRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ListStockRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ListStockRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ListStockRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ListStockRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ListStockRequestValidationError) ErrorName() string { return "ListStockRequestValidationError" } + +// Error satisfies the builtin error interface +func (e ListStockRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sListStockRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ListStockRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ListStockRequestValidationError{} + +// Validate checks the field values on ListStockReply with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ListStockReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ListStockReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in ListStockReplyMultiError, +// or nil if none found. +func (m *ListStockReply) ValidateAll() error { + return m.validate(true) +} + +func (m *ListStockReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Total + + for idx, item := range m.GetStocks() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ListStockReplyValidationError{ + field: fmt.Sprintf("Stocks[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ListStockReplyValidationError{ + field: fmt.Sprintf("Stocks[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ListStockReplyValidationError{ + field: fmt.Sprintf("Stocks[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ListStockReplyMultiError(errors) + } + + return nil +} + +// ListStockReplyMultiError is an error wrapping multiple validation errors +// returned by ListStockReply.ValidateAll() if the designated constraints +// aren't met. +type ListStockReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ListStockReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ListStockReplyMultiError) AllErrors() []error { return m } + +// ListStockReplyValidationError is the validation error returned by +// ListStockReply.Validate if the designated constraints aren't met. +type ListStockReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ListStockReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ListStockReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ListStockReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ListStockReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ListStockReplyValidationError) ErrorName() string { return "ListStockReplyValidationError" } + +// Error satisfies the builtin error interface +func (e ListStockReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sListStockReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ListStockReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ListStockReplyValidationError{} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/stock.proto b/_13_sponge-dtm-cache/http/api/stock/v1/stock.proto new file mode 100644 index 0000000..bbdaf79 --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/stock.proto @@ -0,0 +1,198 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "api/types/types.proto"; +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "tagger/tagger.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +// Default settings for generating swagger documents +// NOTE: because json does not support 64 bits, the int64 and uint64 types under *.swagger.json are automatically converted to string types +// Reference https://github.com/grpc-ecosystem/grpc-gateway/blob/db7fbefff7c04877cdb32e16d4a248a024428207/examples/internal/proto/examplepb/a_bit_of_everything.proto +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + host: "localhost:8080" + base_path: "" + info: { + title: "stock api docs"; + version: "2.0"; + } + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; + security_definitions: { + security: { + key: "BearerAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + description: "Type Bearer your-jwt-token to Value"; + } + } + } +}; + +service stock { + // create stock + rpc Create(CreateStockRequest) returns (CreateStockReply) { + option (google.api.http) = { + post: "/api/v1/stock" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "create stock", + description: "submit information to create stock", + //security: { + // security_requirement: { + // key: "BearerAuth"; + // value: {} + // } + //} + }; + } + + // delete stock by id + rpc DeleteByID(DeleteStockByIDRequest) returns (DeleteStockByIDReply) { + option (google.api.http) = { + delete: "/api/v1/stock/{id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "delete stock", + description: "delete stock by id", + //security: { + // security_requirement: { + // key: "BearerAuth"; + // value: {} + // } + //} + }; + } + + // update stock by id + rpc UpdateByID(UpdateStockByIDRequest) returns (UpdateStockByIDReply) { + option (google.api.http) = { + put: "/api/v1/stock/{id}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "update stock", + description: "update stock by id", + //security: { + // security_requirement: { + // key: "BearerAuth"; + // value: {} + // } + //} + }; + } + + // get stock by id + rpc GetByID(GetStockByIDRequest) returns (GetStockByIDReply) { + option (google.api.http) = { + get: "/api/v1/stock/{id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "get stock detail", + description: "get stock detail by id", + //security: { + // security_requirement: { + // key: "BearerAuth"; + // value: {} + // } + //} + }; + } + + // list of stock by query parameters + rpc List(ListStockRequest) returns (ListStockReply) { + option (google.api.http) = { + post: "/api/v1/stock/list" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "list of stocks by parameters", + description: "list of stocks by paging and conditions", + //security: { + // security_requirement: { + // key: "BearerAuth"; + // value: {} + // } + //} + }; + } +} + + +// Some notes on defining fields under message: +// (1) Fill in the validate rules https://github.com/envoyproxy/protoc-gen-validate#constraint-rules +// (2) Suggest using camel hump naming for message field names, and for names ending in 'id', +// use xxxID naming format, such as userID, orderID, etc. +// (3) When using the protoc-gen-openapiv2 plugin, if the defined fields are snake case, +// you must add annotations for snake case names, such as string fieldName = 1 [json_name = "field_name"], +// to ensure that the front end and back end JSON naming is consistent. +// (4) If the route contains the path parameter, such as /api/v1/stock/{id}, the defined +// message must contain the name of the path parameter and the name should be +// added with a new tag, such as int64 id = 1 [(tagger.tags) = "uri:\"id\""]; +// (5) If the request url is followed by a query parameter, such as /api/v1/getStock?name=Tom, +// a form tag must be added when defining the query parameter in the message, +// such as string name = 1 [(tagger.tags) = "form:\"name\""]. + + +message CreateStockRequest { + uint64 productID = 1; // 商品id + uint32 stock = 2; // 库存 +} + +message CreateStockReply { + uint64 id = 1; +} + +message DeleteStockByIDRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\""]; +} + +message DeleteStockByIDReply { + +} + +message UpdateStockByIDRequest { + uint64 id = 1 [(tagger.tags) = "uri:\"id\"" ]; + uint64 productID = 2; // 商品id + uint32 stock = 3; // 库存 +} + +message UpdateStockByIDReply { + +} + +message Stock { + uint64 id = 1; + uint64 productID = 2; // 商品id + uint32 stock = 3; // 库存 + string createdAt = 4; + string updatedAt = 5; +} + +message GetStockByIDRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; +} + +message GetStockByIDReply { + Stock stock = 1; +} + +message ListStockRequest { + api.types.Params params = 1; +} + +message ListStockReply { + int64 total = 1; + repeated Stock stocks = 2; +} + + diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/stock_router.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/stock_router.pb.go new file mode 100644 index 0000000..ded0f0f --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/stock_router.pb.go @@ -0,0 +1,320 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type StockLogicer interface { + Create(ctx context.Context, req *CreateStockRequest) (*CreateStockReply, error) + DeleteByID(ctx context.Context, req *DeleteStockByIDRequest) (*DeleteStockByIDReply, error) + UpdateByID(ctx context.Context, req *UpdateStockByIDRequest) (*UpdateStockByIDReply, error) + GetByID(ctx context.Context, req *GetStockByIDRequest) (*GetStockByIDReply, error) + List(ctx context.Context, req *ListStockRequest) (*ListStockReply, error) +} + +type StockOption func(*stockOptions) + +type stockOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *stockOptions) apply(opts ...StockOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithStockHTTPResponse() StockOption { + return func(o *stockOptions) { + o.isFromRPC = false + } +} + +func WithStockRPCResponse() StockOption { + return func(o *stockOptions) { + o.isFromRPC = true + } +} + +func WithStockResponser(responser errcode.Responser) StockOption { + return func(o *stockOptions) { + o.responser = responser + } +} + +func WithStockLogger(zapLog *zap.Logger) StockOption { + return func(o *stockOptions) { + o.zapLog = zapLog + } +} + +func WithStockErrorToHTTPCode(e ...*errcode.Error) StockOption { + return func(o *stockOptions) { + o.httpErrors = e + } +} + +func WithStockRPCStatusToHTTPCode(s ...*errcode.RPCStatus) StockOption { + return func(o *stockOptions) { + o.rpcStatus = s + } +} + +func WithStockWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) StockOption { + return func(o *stockOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterStockRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic StockLogicer, + opts ...StockOption) { + + o := &stockOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &stockRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type stockRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic StockLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *stockRouter) register() { + r.iRouter.Handle("POST", "/api/v1/stock", r.withMiddleware("POST", "/api/v1/stock", r.Create_0)...) + r.iRouter.Handle("DELETE", "/api/v1/stock/:id", r.withMiddleware("DELETE", "/api/v1/stock/:id", r.DeleteByID_0)...) + r.iRouter.Handle("PUT", "/api/v1/stock/:id", r.withMiddleware("PUT", "/api/v1/stock/:id", r.UpdateByID_0)...) + r.iRouter.Handle("GET", "/api/v1/stock/:id", r.withMiddleware("GET", "/api/v1/stock/:id", r.GetByID_0)...) + r.iRouter.Handle("POST", "/api/v1/stock/list", r.withMiddleware("POST", "/api/v1/stock/list", r.List_0)...) + +} + +func (r *stockRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *stockRouter) Create_0(c *gin.Context) { + req := &CreateStockRequest{} + var err error + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Create(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *stockRouter) DeleteByID_0(c *gin.Context) { + req := &DeleteStockByIDRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.DeleteByID(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *stockRouter) UpdateByID_0(c *gin.Context) { + req := &UpdateStockByIDRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.UpdateByID(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *stockRouter) GetByID_0(c *gin.Context) { + req := &GetStockByIDRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.GetByID(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *stockRouter) List_0(c *gin.Context) { + req := &ListStockRequest{} + var err error + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.List(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/strong.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/strong.pb.go new file mode 100644 index 0000000..98462bd --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/strong.pb.go @@ -0,0 +1,379 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/stock/v1/strong.proto + +package v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type UpdateStrongRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` + Stock uint32 `protobuf:"varint,2,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *UpdateStrongRequest) Reset() { + *x = UpdateStrongRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_strong_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateStrongRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateStrongRequest) ProtoMessage() {} + +func (x *UpdateStrongRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_strong_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateStrongRequest.ProtoReflect.Descriptor instead. +func (*UpdateStrongRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_strong_proto_rawDescGZIP(), []int{0} +} + +func (x *UpdateStrongRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *UpdateStrongRequest) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +type UpdateStrongRequestReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateStrongRequestReply) Reset() { + *x = UpdateStrongRequestReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_strong_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateStrongRequestReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateStrongRequestReply) ProtoMessage() {} + +func (x *UpdateStrongRequestReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_strong_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateStrongRequestReply.ProtoReflect.Descriptor instead. +func (*UpdateStrongRequestReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_strong_proto_rawDescGZIP(), []int{1} +} + +type QueryStrongRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id" uri:"id"` +} + +func (x *QueryStrongRequest) Reset() { + *x = QueryStrongRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_strong_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryStrongRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryStrongRequest) ProtoMessage() {} + +func (x *QueryStrongRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_strong_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryStrongRequest.ProtoReflect.Descriptor instead. +func (*QueryStrongRequest) Descriptor() ([]byte, []int) { + return file_api_stock_v1_strong_proto_rawDescGZIP(), []int{2} +} + +func (x *QueryStrongRequest) GetId() uint64 { + if x != nil { + return x.Id + } + return 0 +} + +type QueryStrongReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Stock uint32 `protobuf:"varint,1,opt,name=stock,proto3" json:"stock"` // 库存数量 +} + +func (x *QueryStrongReply) Reset() { + *x = QueryStrongReply{} + if protoimpl.UnsafeEnabled { + mi := &file_api_stock_v1_strong_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryStrongReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryStrongReply) ProtoMessage() {} + +func (x *QueryStrongReply) ProtoReflect() protoreflect.Message { + mi := &file_api_stock_v1_strong_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryStrongReply.ProtoReflect.Descriptor instead. +func (*QueryStrongReply) Descriptor() ([]byte, []int) { + return file_api_stock_v1_strong_proto_rawDescGZIP(), []int{3} +} + +func (x *QueryStrongReply) GetStock() uint32 { + if x != nil { + return x.Stock + } + return 0 +} + +var File_api_stock_v1_strong_proto protoreflect.FileDescriptor + +var file_api_stock_v1_strong_proto_rawDesc = []byte{ + 0x0a, 0x19, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x76, 0x31, 0x2f, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x61, 0x70, 0x69, + 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, + 0x67, 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, + 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, + 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x5a, 0x0a, 0x13, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, + 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x1d, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0d, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x2a, 0x02, 0x20, 0x00, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x22, 0x1a, 0x0a, 0x18, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x72, 0x6f, 0x6e, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x3a, 0x0a, + 0x12, 0x51, 0x75, 0x65, 0x72, 0x79, 0x53, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x14, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x9a, 0x84, 0x9e, 0x03, 0x08, 0x75, 0x72, 0x69, + 0x3a, 0x22, 0x69, 0x64, 0x22, 0x52, 0x02, 0x69, 0x64, 0x22, 0x28, 0x0a, 0x10, 0x51, 0x75, 0x65, + 0x72, 0x79, 0x53, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x14, 0x0a, + 0x05, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, + 0x6f, 0x63, 0x6b, 0x32, 0xf7, 0x02, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x12, 0xc8, + 0x01, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x53, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x73, 0x92, 0x41, 0x4c, 0x0a, 0x14, 0x63, 0x61, 0x73, 0x65, 0x20, + 0x33, 0x3a, 0x20, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x12, + 0x0c, 0xe6, 0x9b, 0xb4, 0xe6, 0x96, 0xb0, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x26, 0xe6, + 0x9b, 0xb4, 0xe6, 0x96, 0xb0, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0xef, 0xbc, 0x8c, 0x44, 0x42, + 0xe5, 0x92, 0x8c, 0xe7, 0xbc, 0x93, 0xe5, 0xad, 0x98, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, 0xe8, + 0x87, 0xb4, 0xe6, 0x80, 0xa7, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1e, 0x1a, 0x19, 0x2f, 0x61, 0x70, + 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, + 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3a, 0x01, 0x2a, 0x12, 0xa1, 0x01, 0x0a, 0x05, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2e, + 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x53, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2e, 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x53, 0x74, 0x72, 0x6f, 0x6e, 0x67, + 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x56, 0x92, 0x41, 0x32, 0x0a, 0x14, 0x63, 0x61, 0x73, 0x65, + 0x20, 0x33, 0x3a, 0x20, 0xe5, 0xbc, 0xba, 0xe4, 0xb8, 0x80, 0xe8, 0x87, 0xb4, 0xe6, 0x80, 0xa7, + 0x12, 0x0c, 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x1a, 0x0c, + 0xe6, 0x9f, 0xa5, 0xe8, 0xaf, 0xa2, 0xe6, 0x95, 0xb0, 0xe6, 0x8d, 0xae, 0x82, 0xd3, 0xe4, 0x93, + 0x02, 0x1b, 0x12, 0x19, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x42, 0xb4, 0x01, + 0x5a, 0x15, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x6f, 0x63, + 0x6b, 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, 0x92, 0x41, 0x99, 0x01, 0x12, 0x15, 0x0a, 0x0e, 0x73, + 0x74, 0x6f, 0x63, 0x6b, 0x20, 0x61, 0x70, 0x69, 0x20, 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, 0x32, + 0x2e, 0x30, 0x1a, 0x0e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, 0x30, + 0x38, 0x30, 0x2a, 0x02, 0x01, 0x02, 0x32, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x3a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x5a, 0x48, 0x0a, 0x46, 0x0a, 0x0a, + 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68, 0x12, 0x38, 0x08, 0x02, 0x12, 0x23, + 0x54, 0x79, 0x70, 0x65, 0x20, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x72, + 0x2d, 0x6a, 0x77, 0x74, 0x2d, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x1a, 0x0d, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_stock_v1_strong_proto_rawDescOnce sync.Once + file_api_stock_v1_strong_proto_rawDescData = file_api_stock_v1_strong_proto_rawDesc +) + +func file_api_stock_v1_strong_proto_rawDescGZIP() []byte { + file_api_stock_v1_strong_proto_rawDescOnce.Do(func() { + file_api_stock_v1_strong_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_stock_v1_strong_proto_rawDescData) + }) + return file_api_stock_v1_strong_proto_rawDescData +} + +var file_api_stock_v1_strong_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_api_stock_v1_strong_proto_goTypes = []interface{}{ + (*UpdateStrongRequest)(nil), // 0: api.stock.v1.UpdateStrongRequest + (*UpdateStrongRequestReply)(nil), // 1: api.stock.v1.UpdateStrongRequestReply + (*QueryStrongRequest)(nil), // 2: api.stock.v1.QueryStrongRequest + (*QueryStrongReply)(nil), // 3: api.stock.v1.QueryStrongReply +} +var file_api_stock_v1_strong_proto_depIdxs = []int32{ + 0, // 0: api.stock.v1.strong.Update:input_type -> api.stock.v1.UpdateStrongRequest + 2, // 1: api.stock.v1.strong.Query:input_type -> api.stock.v1.QueryStrongRequest + 1, // 2: api.stock.v1.strong.Update:output_type -> api.stock.v1.UpdateStrongRequestReply + 3, // 3: api.stock.v1.strong.Query:output_type -> api.stock.v1.QueryStrongReply + 2, // [2:4] is the sub-list for method output_type + 0, // [0:2] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_stock_v1_strong_proto_init() } +func file_api_stock_v1_strong_proto_init() { + if File_api_stock_v1_strong_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_stock_v1_strong_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateStrongRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_strong_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateStrongRequestReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_strong_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryStrongRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_stock_v1_strong_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryStrongReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_stock_v1_strong_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_stock_v1_strong_proto_goTypes, + DependencyIndexes: file_api_stock_v1_strong_proto_depIdxs, + MessageInfos: file_api_stock_v1_strong_proto_msgTypes, + }.Build() + File_api_stock_v1_strong_proto = out.File + file_api_stock_v1_strong_proto_rawDesc = nil + file_api_stock_v1_strong_proto_goTypes = nil + file_api_stock_v1_strong_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/strong.pb.validate.go b/_13_sponge-dtm-cache/http/api/stock/v1/strong.pb.validate.go new file mode 100644 index 0000000..5b5ca0a --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/strong.pb.validate.go @@ -0,0 +1,477 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/stock/v1/strong.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on UpdateStrongRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateStrongRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateStrongRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateStrongRequestMultiError, or nil if none found. +func (m *UpdateStrongRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateStrongRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := UpdateStrongRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if m.GetStock() <= 0 { + err := UpdateStrongRequestValidationError{ + field: "Stock", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return UpdateStrongRequestMultiError(errors) + } + + return nil +} + +// UpdateStrongRequestMultiError is an error wrapping multiple validation +// errors returned by UpdateStrongRequest.ValidateAll() if the designated +// constraints aren't met. +type UpdateStrongRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateStrongRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateStrongRequestMultiError) AllErrors() []error { return m } + +// UpdateStrongRequestValidationError is the validation error returned by +// UpdateStrongRequest.Validate if the designated constraints aren't met. +type UpdateStrongRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateStrongRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateStrongRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateStrongRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateStrongRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateStrongRequestValidationError) ErrorName() string { + return "UpdateStrongRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateStrongRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateStrongRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateStrongRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateStrongRequestValidationError{} + +// Validate checks the field values on UpdateStrongRequestReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *UpdateStrongRequestReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on UpdateStrongRequestReply with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// UpdateStrongRequestReplyMultiError, or nil if none found. +func (m *UpdateStrongRequestReply) ValidateAll() error { + return m.validate(true) +} + +func (m *UpdateStrongRequestReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return UpdateStrongRequestReplyMultiError(errors) + } + + return nil +} + +// UpdateStrongRequestReplyMultiError is an error wrapping multiple validation +// errors returned by UpdateStrongRequestReply.ValidateAll() if the designated +// constraints aren't met. +type UpdateStrongRequestReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m UpdateStrongRequestReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m UpdateStrongRequestReplyMultiError) AllErrors() []error { return m } + +// UpdateStrongRequestReplyValidationError is the validation error returned by +// UpdateStrongRequestReply.Validate if the designated constraints aren't met. +type UpdateStrongRequestReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e UpdateStrongRequestReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e UpdateStrongRequestReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e UpdateStrongRequestReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e UpdateStrongRequestReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e UpdateStrongRequestReplyValidationError) ErrorName() string { + return "UpdateStrongRequestReplyValidationError" +} + +// Error satisfies the builtin error interface +func (e UpdateStrongRequestReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sUpdateStrongRequestReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = UpdateStrongRequestReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = UpdateStrongRequestReplyValidationError{} + +// Validate checks the field values on QueryStrongRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *QueryStrongRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryStrongRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryStrongRequestMultiError, or nil if none found. +func (m *QueryStrongRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryStrongRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if m.GetId() <= 0 { + err := QueryStrongRequestValidationError{ + field: "Id", + reason: "value must be greater than 0", + } + if !all { + return err + } + errors = append(errors, err) + } + + if len(errors) > 0 { + return QueryStrongRequestMultiError(errors) + } + + return nil +} + +// QueryStrongRequestMultiError is an error wrapping multiple validation errors +// returned by QueryStrongRequest.ValidateAll() if the designated constraints +// aren't met. +type QueryStrongRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryStrongRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryStrongRequestMultiError) AllErrors() []error { return m } + +// QueryStrongRequestValidationError is the validation error returned by +// QueryStrongRequest.Validate if the designated constraints aren't met. +type QueryStrongRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryStrongRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryStrongRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryStrongRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryStrongRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryStrongRequestValidationError) ErrorName() string { + return "QueryStrongRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e QueryStrongRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryStrongRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryStrongRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryStrongRequestValidationError{} + +// Validate checks the field values on QueryStrongReply with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *QueryStrongReply) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on QueryStrongReply with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// QueryStrongReplyMultiError, or nil if none found. +func (m *QueryStrongReply) ValidateAll() error { + return m.validate(true) +} + +func (m *QueryStrongReply) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Stock + + if len(errors) > 0 { + return QueryStrongReplyMultiError(errors) + } + + return nil +} + +// QueryStrongReplyMultiError is an error wrapping multiple validation errors +// returned by QueryStrongReply.ValidateAll() if the designated constraints +// aren't met. +type QueryStrongReplyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m QueryStrongReplyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m QueryStrongReplyMultiError) AllErrors() []error { return m } + +// QueryStrongReplyValidationError is the validation error returned by +// QueryStrongReply.Validate if the designated constraints aren't met. +type QueryStrongReplyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e QueryStrongReplyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e QueryStrongReplyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e QueryStrongReplyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e QueryStrongReplyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e QueryStrongReplyValidationError) ErrorName() string { return "QueryStrongReplyValidationError" } + +// Error satisfies the builtin error interface +func (e QueryStrongReplyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sQueryStrongReply.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = QueryStrongReplyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = QueryStrongReplyValidationError{} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/strong.proto b/_13_sponge-dtm-cache/http/api/stock/v1/strong.proto new file mode 100644 index 0000000..643f4df --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/strong.proto @@ -0,0 +1,81 @@ +syntax = "proto3"; + +package api.stock.v1; + +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "tagger/tagger.proto"; +import "validate/validate.proto"; + +option go_package = "stock/api/stock/v1;v1"; + +// Default settings for generating swagger documents +// NOTE: because json does not support 64 bits, the int64 and uint64 types under *.swagger.json are automatically converted to string types +// Reference https://github.com/grpc-ecosystem/grpc-gateway/blob/db7fbefff7c04877cdb32e16d4a248a024428207/examples/internal/proto/examplepb/a_bit_of_everything.proto +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + host: "localhost:8080" + base_path: "" + info: { + title: "stock api docs"; + version: "2.0"; + } + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; + security_definitions: { + security: { + key: "BearerAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + description: "Type Bearer your-jwt-token to Value"; + } + } + } +}; + +service strong{ + // 更新数据,DB和缓存强一致性 + rpc Update(UpdateStrongRequest) returns (UpdateStrongRequestReply) { + option (google.api.http) = { + put: "/api/v1/stock/{id}/strong" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "更新数据", + description: "更新数据,DB和缓存强一致性", + tags: "case 3: 强一致性" + }; + } + + // 查询 + rpc Query(QueryStrongRequest) returns (QueryStrongReply) { + option (google.api.http) = { + get: "/api/v1/stock/{id}/strong" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "查询数据", + description: "查询数据", + tags: "case 3: 强一致性" + }; + } +} + +message UpdateStrongRequest { + uint64 id = 1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; + uint32 stock = 2 [(validate.rules).uint32.gt = 0]; // 库存数量 +} + +message UpdateStrongRequestReply { + +} + +message QueryStrongRequest { + uint64 id =1 [(validate.rules).uint64.gt = 0, (tagger.tags) = "uri:\"id\"" ]; +} + +message QueryStrongReply { + uint32 stock = 1; // 库存数量 +} diff --git a/_13_sponge-dtm-cache/http/api/stock/v1/strong_router.pb.go b/_13_sponge-dtm-cache/http/api/stock/v1/strong_router.pb.go new file mode 100644 index 0000000..210deb2 --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/stock/v1/strong_router.pb.go @@ -0,0 +1,221 @@ +// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. + +package v1 + +import ( + "context" + "errors" + "strings" + + "github.com/gin-gonic/gin" + "go.uber.org/zap" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/middleware" +) + +type StrongLogicer interface { + Update(ctx context.Context, req *UpdateStrongRequest) (*UpdateStrongRequestReply, error) + Query(ctx context.Context, req *QueryStrongRequest) (*QueryStrongReply, error) +} + +type StrongOption func(*strongOptions) + +type strongOptions struct { + isFromRPC bool + responser errcode.Responser + zapLog *zap.Logger + httpErrors []*errcode.Error + rpcStatus []*errcode.RPCStatus + wrapCtxFn func(c *gin.Context) context.Context +} + +func (o *strongOptions) apply(opts ...StrongOption) { + for _, opt := range opts { + opt(o) + } +} + +func WithStrongHTTPResponse() StrongOption { + return func(o *strongOptions) { + o.isFromRPC = false + } +} + +func WithStrongRPCResponse() StrongOption { + return func(o *strongOptions) { + o.isFromRPC = true + } +} + +func WithStrongResponser(responser errcode.Responser) StrongOption { + return func(o *strongOptions) { + o.responser = responser + } +} + +func WithStrongLogger(zapLog *zap.Logger) StrongOption { + return func(o *strongOptions) { + o.zapLog = zapLog + } +} + +func WithStrongErrorToHTTPCode(e ...*errcode.Error) StrongOption { + return func(o *strongOptions) { + o.httpErrors = e + } +} + +func WithStrongRPCStatusToHTTPCode(s ...*errcode.RPCStatus) StrongOption { + return func(o *strongOptions) { + o.rpcStatus = s + } +} + +func WithStrongWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) StrongOption { + return func(o *strongOptions) { + o.wrapCtxFn = wrapCtxFn + } +} + +func RegisterStrongRouter( + iRouter gin.IRouter, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iLogic StrongLogicer, + opts ...StrongOption) { + + o := &strongOptions{} + o.apply(opts...) + + if o.responser == nil { + o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) + } + if o.zapLog == nil { + o.zapLog, _ = zap.NewProduction() + } + + r := &strongRouter{ + iRouter: iRouter, + groupPathMiddlewares: groupPathMiddlewares, + singlePathMiddlewares: singlePathMiddlewares, + iLogic: iLogic, + iResponse: o.responser, + zapLog: o.zapLog, + wrapCtxFn: o.wrapCtxFn, + } + r.register() +} + +type strongRouter struct { + iRouter gin.IRouter + groupPathMiddlewares map[string][]gin.HandlerFunc + singlePathMiddlewares map[string][]gin.HandlerFunc + iLogic StrongLogicer + iResponse errcode.Responser + zapLog *zap.Logger + wrapCtxFn func(c *gin.Context) context.Context +} + +func (r *strongRouter) register() { + r.iRouter.Handle("PUT", "/api/v1/stock/:id/strong", r.withMiddleware("PUT", "/api/v1/stock/:id/strong", r.Update_6)...) + r.iRouter.Handle("GET", "/api/v1/stock/:id/strong", r.withMiddleware("GET", "/api/v1/stock/:id/strong", r.Query_6)...) + +} + +func (r *strongRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { + handlerFns := []gin.HandlerFunc{} + + // determine if a route group is hit or miss, left prefix rule + for groupPath, fns := range r.groupPathMiddlewares { + if groupPath == "" || groupPath == "/" { + handlerFns = append(handlerFns, fns...) + continue + } + size := len(groupPath) + if len(path) < size { + continue + } + if groupPath == path[:size] { + handlerFns = append(handlerFns, fns...) + } + } + + // determine if a single route has been hit + key := strings.ToUpper(method) + "->" + path + if fns, ok := r.singlePathMiddlewares[key]; ok { + handlerFns = append(handlerFns, fns...) + } + + return append(handlerFns, fn) +} + +func (r *strongRouter) Update_6(c *gin.Context) { + req := &UpdateStrongRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindJSON(req); err != nil { + r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Update(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} + +func (r *strongRouter) Query_6(c *gin.Context) { + req := &QueryStrongRequest{} + var err error + + if err = c.ShouldBindUri(req); err != nil { + r.zapLog.Warn("ShouldBindUri error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + if err = c.ShouldBindQuery(req); err != nil { + r.zapLog.Warn("ShouldBindQuery error", zap.Error(err), middleware.GCtxRequestIDField(c)) + r.iResponse.ParamError(c, err) + return + } + + var ctx context.Context + if r.wrapCtxFn != nil { + ctx = r.wrapCtxFn(c) + } else { + ctx = middleware.WrapCtx(c) + } + + out, err := r.iLogic.Query(ctx, req) + if err != nil { + if errors.Is(err, errcode.SkipResponse) { + return + } + r.iResponse.Error(c, err) + return + } + + r.iResponse.Success(c, out) +} diff --git a/_13_sponge-dtm-cache/http/api/types/types.pb.go b/_13_sponge-dtm-cache/http/api/types/types.pb.go new file mode 100644 index 0000000..5e41b19 --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/types/types.pb.go @@ -0,0 +1,327 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v4.25.2 +// source: api/types/types.proto + +package types + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Params struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Page int32 `protobuf:"varint,1,opt,name=page,proto3" json:"page"` // page number, starting from 0 + Limit int32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit"` // number per page + Sort string `protobuf:"bytes,3,opt,name=sort,proto3" json:"sort"` // sorted fields, multi-column sorting separated by commas + Columns []*Column `protobuf:"bytes,4,rep,name=columns,proto3" json:"columns"` // query conditions +} + +func (x *Params) Reset() { + *x = Params{} + if protoimpl.UnsafeEnabled { + mi := &file_api_types_types_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Params) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Params) ProtoMessage() {} + +func (x *Params) ProtoReflect() protoreflect.Message { + mi := &file_api_types_types_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Params.ProtoReflect.Descriptor instead. +func (*Params) Descriptor() ([]byte, []int) { + return file_api_types_types_proto_rawDescGZIP(), []int{0} +} + +func (x *Params) GetPage() int32 { + if x != nil { + return x.Page + } + return 0 +} + +func (x *Params) GetLimit() int32 { + if x != nil { + return x.Limit + } + return 0 +} + +func (x *Params) GetSort() string { + if x != nil { + return x.Sort + } + return "" +} + +func (x *Params) GetColumns() []*Column { + if x != nil { + return x.Columns + } + return nil +} + +type Column struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name"` // column name + Exp string `protobuf:"bytes,2,opt,name=exp,proto3" json:"exp"` // expressions, which default to = when the value is null, have =, !=, >, >=, <, <=, like, in + Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value"` // column value + Logic string `protobuf:"bytes,4,opt,name=logic,proto3" json:"logic"` // logical type, defaults to and when value is null, only &(and), ||(or) +} + +func (x *Column) Reset() { + *x = Column{} + if protoimpl.UnsafeEnabled { + mi := &file_api_types_types_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Column) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Column) ProtoMessage() {} + +func (x *Column) ProtoReflect() protoreflect.Message { + mi := &file_api_types_types_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Column.ProtoReflect.Descriptor instead. +func (*Column) Descriptor() ([]byte, []int) { + return file_api_types_types_proto_rawDescGZIP(), []int{1} +} + +func (x *Column) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Column) GetExp() string { + if x != nil { + return x.Exp + } + return "" +} + +func (x *Column) GetValue() string { + if x != nil { + return x.Value + } + return "" +} + +func (x *Column) GetLogic() string { + if x != nil { + return x.Logic + } + return "" +} + +type Conditions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Columns []*Column `protobuf:"bytes,1,rep,name=columns,proto3" json:"columns"` // query conditions +} + +func (x *Conditions) Reset() { + *x = Conditions{} + if protoimpl.UnsafeEnabled { + mi := &file_api_types_types_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Conditions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Conditions) ProtoMessage() {} + +func (x *Conditions) ProtoReflect() protoreflect.Message { + mi := &file_api_types_types_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Conditions.ProtoReflect.Descriptor instead. +func (*Conditions) Descriptor() ([]byte, []int) { + return file_api_types_types_proto_rawDescGZIP(), []int{2} +} + +func (x *Conditions) GetColumns() []*Column { + if x != nil { + return x.Columns + } + return nil +} + +var File_api_types_types_proto protoreflect.FileDescriptor + +var file_api_types_types_proto_rawDesc = []byte{ + 0x0a, 0x15, 0x61, 0x70, 0x69, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x74, 0x79, 0x70, 0x65, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x09, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x73, 0x22, 0x73, 0x0a, 0x06, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x12, 0x0a, 0x04, + 0x70, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, + 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, + 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x6f, 0x72, 0x74, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x73, 0x6f, 0x72, 0x74, 0x12, 0x2b, 0x0a, 0x07, 0x63, 0x6f, + 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, + 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x52, 0x07, + 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x22, 0x5a, 0x0a, 0x06, 0x43, 0x6f, 0x6c, 0x75, 0x6d, + 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x78, 0x70, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x65, 0x78, 0x70, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x14, 0x0a, + 0x05, 0x6c, 0x6f, 0x67, 0x69, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x6f, + 0x67, 0x69, 0x63, 0x22, 0x39, 0x0a, 0x0a, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x12, 0x2b, 0x0a, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x43, + 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x42, 0x17, + 0x5a, 0x15, 0x73, 0x74, 0x6f, 0x63, 0x6b, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x74, 0x79, 0x70, 0x65, + 0x73, 0x3b, 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_types_types_proto_rawDescOnce sync.Once + file_api_types_types_proto_rawDescData = file_api_types_types_proto_rawDesc +) + +func file_api_types_types_proto_rawDescGZIP() []byte { + file_api_types_types_proto_rawDescOnce.Do(func() { + file_api_types_types_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_types_types_proto_rawDescData) + }) + return file_api_types_types_proto_rawDescData +} + +var file_api_types_types_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_api_types_types_proto_goTypes = []interface{}{ + (*Params)(nil), // 0: api.types.Params + (*Column)(nil), // 1: api.types.Column + (*Conditions)(nil), // 2: api.types.Conditions +} +var file_api_types_types_proto_depIdxs = []int32{ + 1, // 0: api.types.Params.columns:type_name -> api.types.Column + 1, // 1: api.types.Conditions.columns:type_name -> api.types.Column + 2, // [2:2] is the sub-list for method output_type + 2, // [2:2] is the sub-list for method input_type + 2, // [2:2] is the sub-list for extension type_name + 2, // [2:2] is the sub-list for extension extendee + 0, // [0:2] is the sub-list for field type_name +} + +func init() { file_api_types_types_proto_init() } +func file_api_types_types_proto_init() { + if File_api_types_types_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_types_types_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Params); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_types_types_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Column); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_types_types_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Conditions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_types_types_proto_rawDesc, + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_api_types_types_proto_goTypes, + DependencyIndexes: file_api_types_types_proto_depIdxs, + MessageInfos: file_api_types_types_proto_msgTypes, + }.Build() + File_api_types_types_proto = out.File + file_api_types_types_proto_rawDesc = nil + file_api_types_types_proto_goTypes = nil + file_api_types_types_proto_depIdxs = nil +} diff --git a/_13_sponge-dtm-cache/http/api/types/types.pb.validate.go b/_13_sponge-dtm-cache/http/api/types/types.pb.validate.go new file mode 100644 index 0000000..3ca7a60 --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/types/types.pb.validate.go @@ -0,0 +1,413 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: api/types/types.proto + +package types + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on Params with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *Params) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on Params with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in ParamsMultiError, or nil if none found. +func (m *Params) ValidateAll() error { + return m.validate(true) +} + +func (m *Params) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Page + + // no validation rules for Limit + + // no validation rules for Sort + + for idx, item := range m.GetColumns() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ParamsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ParamsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ParamsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ParamsMultiError(errors) + } + + return nil +} + +// ParamsMultiError is an error wrapping multiple validation errors returned by +// Params.ValidateAll() if the designated constraints aren't met. +type ParamsMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ParamsMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ParamsMultiError) AllErrors() []error { return m } + +// ParamsValidationError is the validation error returned by Params.Validate if +// the designated constraints aren't met. +type ParamsValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ParamsValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ParamsValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ParamsValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ParamsValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ParamsValidationError) ErrorName() string { return "ParamsValidationError" } + +// Error satisfies the builtin error interface +func (e ParamsValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sParams.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ParamsValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ParamsValidationError{} + +// Validate checks the field values on Column with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *Column) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on Column with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in ColumnMultiError, or nil if none found. +func (m *Column) ValidateAll() error { + return m.validate(true) +} + +func (m *Column) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Exp + + // no validation rules for Value + + // no validation rules for Logic + + if len(errors) > 0 { + return ColumnMultiError(errors) + } + + return nil +} + +// ColumnMultiError is an error wrapping multiple validation errors returned by +// Column.ValidateAll() if the designated constraints aren't met. +type ColumnMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ColumnMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ColumnMultiError) AllErrors() []error { return m } + +// ColumnValidationError is the validation error returned by Column.Validate if +// the designated constraints aren't met. +type ColumnValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ColumnValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ColumnValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ColumnValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ColumnValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ColumnValidationError) ErrorName() string { return "ColumnValidationError" } + +// Error satisfies the builtin error interface +func (e ColumnValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sColumn.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ColumnValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ColumnValidationError{} + +// Validate checks the field values on Conditions with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *Conditions) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on Conditions with the rules defined in +// the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in ConditionsMultiError, or +// nil if none found. +func (m *Conditions) ValidateAll() error { + return m.validate(true) +} + +func (m *Conditions) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + for idx, item := range m.GetColumns() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ConditionsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ConditionsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ConditionsValidationError{ + field: fmt.Sprintf("Columns[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ConditionsMultiError(errors) + } + + return nil +} + +// ConditionsMultiError is an error wrapping multiple validation errors +// returned by Conditions.ValidateAll() if the designated constraints aren't met. +type ConditionsMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ConditionsMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ConditionsMultiError) AllErrors() []error { return m } + +// ConditionsValidationError is the validation error returned by +// Conditions.Validate if the designated constraints aren't met. +type ConditionsValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ConditionsValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ConditionsValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ConditionsValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ConditionsValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ConditionsValidationError) ErrorName() string { return "ConditionsValidationError" } + +// Error satisfies the builtin error interface +func (e ConditionsValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sConditions.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ConditionsValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ConditionsValidationError{} diff --git a/_13_sponge-dtm-cache/http/api/types/types.proto b/_13_sponge-dtm-cache/http/api/types/types.proto new file mode 100644 index 0000000..1306a7f --- /dev/null +++ b/_13_sponge-dtm-cache/http/api/types/types.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package api.types; + +option go_package = "stock/api/types;types"; + +message Params { + int32 page = 1; // page number, starting from 0 + int32 limit = 2; // number per page + string sort = 3; // sorted fields, multi-column sorting separated by commas + repeated Column columns = 4; // query conditions +} + +message Column { + string name = 1; // column name + string exp = 2; // expressions, which default to = when the value is null, have =, !=, >, >=, <, <=, like, in + string value = 3; // column value + string logic = 4; // logical type, defaults to and when value is null, only &(and), ||(or) +} + +message Conditions { + repeated Column columns = 1; // query conditions +} diff --git a/a_micro-grpc-http-protobuf/cmd/user/initial/close.go b/_13_sponge-dtm-cache/http/cmd/stock/initial/close.go similarity index 94% rename from a_micro-grpc-http-protobuf/cmd/user/initial/close.go rename to _13_sponge-dtm-cache/http/cmd/stock/initial/close.go index 414003b..75f5133 100644 --- a/a_micro-grpc-http-protobuf/cmd/user/initial/close.go +++ b/_13_sponge-dtm-cache/http/cmd/stock/initial/close.go @@ -7,8 +7,8 @@ import ( "github.com/zhufuyi/sponge/pkg/app" "github.com/zhufuyi/sponge/pkg/tracer" - "user/internal/config" - //"user/internal/model" + "stock/internal/config" + //"stock/internal/model" ) // Close releasing resources after service exit diff --git a/_13_sponge-dtm-cache/http/cmd/stock/initial/createService.go b/_13_sponge-dtm-cache/http/cmd/stock/initial/createService.go new file mode 100644 index 0000000..4ea32f4 --- /dev/null +++ b/_13_sponge-dtm-cache/http/cmd/stock/initial/createService.go @@ -0,0 +1,98 @@ +package initial + +import ( + "fmt" + "strconv" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/consul" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/etcd" + "github.com/zhufuyi/sponge/pkg/servicerd/registry/nacos" + + "stock/internal/config" + "stock/internal/server" +) + +// CreateServices create grpc or http service +func CreateServices() []app.IServer { + var cfg = config.Get() + var servers []app.IServer + + // creating http service + httpAddr := ":" + strconv.Itoa(cfg.HTTP.Port) + httpRegistry, httpInstance := registerService("http", cfg.App.Host, cfg.HTTP.Port) + httpServer := server.NewHTTPServer(httpAddr, + server.WithHTTPRegistry(httpRegistry, httpInstance), + server.WithHTTPIsProd(cfg.App.Env == "prod"), + ) + servers = append(servers, httpServer) + + return servers +} + +func registerService(scheme string, host string, port int) (registry.Registry, *registry.ServiceInstance) { + var ( + instanceEndpoint = fmt.Sprintf("%s://%s:%d", scheme, host, port) + cfg = config.Get() + + iRegistry registry.Registry + instance *registry.ServiceInstance + err error + + id = cfg.App.Name + "_" + scheme + "_" + host + logField logger.Field + ) + + switch cfg.App.RegistryDiscoveryType { + // registering service with consul + case "consul": + iRegistry, instance, err = consul.NewRegistry( + cfg.Consul.Addr, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.Any("consulAddress", cfg.Consul.Addr) + + // registering service with etcd + case "etcd": + iRegistry, instance, err = etcd.NewRegistry( + cfg.Etcd.Addrs, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.Any("etcdAddress", cfg.Etcd.Addrs) + + // registering service with nacos + case "nacos": + iRegistry, instance, err = nacos.NewRegistry( + cfg.NacosRd.IPAddr, + cfg.NacosRd.Port, + cfg.NacosRd.NamespaceID, + id, + cfg.App.Name, + []string{instanceEndpoint}, + ) + if err != nil { + panic(err) + } + logField = logger.String("nacosAddress", fmt.Sprintf("%v:%d", cfg.NacosRd.IPAddr, cfg.NacosRd.Port)) + } + + if instance != nil { + msg := fmt.Sprintf("register service address to %s", cfg.App.RegistryDiscoveryType) + logger.Info(msg, logField, logger.String("id", id), logger.String("name", cfg.App.Name), logger.String("endpoint", instanceEndpoint)) + return iRegistry, instance + } + + return nil, nil +} diff --git a/a_micro-grpc-http-protobuf/cmd/user/initial/initApp.go b/_13_sponge-dtm-cache/http/cmd/stock/initial/initApp.go similarity index 87% rename from a_micro-grpc-http-protobuf/cmd/user/initial/initApp.go rename to _13_sponge-dtm-cache/http/cmd/stock/initial/initApp.go index b89fb37..64aff2a 100644 --- a/a_micro-grpc-http-protobuf/cmd/user/initial/initApp.go +++ b/_13_sponge-dtm-cache/http/cmd/stock/initial/initApp.go @@ -16,9 +16,9 @@ import ( "github.com/zhufuyi/sponge/pkg/stat" "github.com/zhufuyi/sponge/pkg/tracer" - "user/configs" - "user/internal/config" - //"user/internal/model" + "stock/configs" + "stock/internal/config" + //"stock/internal/model" ) var ( @@ -49,12 +49,7 @@ func InitApp() { panic(err) } logger.Debug(config.Show()) - logger.Info("init logger succeeded") - - // initializing database - //model.InitDB() - //logger.Infof("init %s succeeded", cfg.Database.Driver) - //model.InitCache(cfg.App.CacheType) + logger.Info("[logger] was initialized") // initializing tracing if cfg.App.EnableTrace { @@ -66,7 +61,7 @@ func InitApp() { strconv.Itoa(cfg.Jaeger.AgentPort), cfg.App.TracingSamplingRate, ) - logger.Info("init tracer succeeded") + logger.Info("[tracer] was initialized") } // initializing the print system and process resources @@ -75,8 +70,16 @@ func InitApp() { stat.WithLog(logger.Get()), stat.WithAlarm(), // invalid if it is windows, the default threshold for cpu and memory is 0.8, you can modify them ) - logger.Info("init statistics succeeded") + logger.Info("[resource statistics] was initialized") } + + // initializing database + //model.InitDB() + //logger.Infof("[%s] was initialized", cfg.Database.Driver) + //model.InitCache(cfg.App.CacheType) + //if cfg.App.CacheType != "" { + // logger.Infof("[%s] was initialized", cfg.App.CacheType) + //} } func initConfig() { @@ -90,7 +93,7 @@ func initConfig() { // get the configuration from the configuration center (first get the nacos configuration, // then read the service configuration according to the nacos configuration center) if configFile == "" { - configFile = configs.Path("user_cc.yml") + configFile = configs.Path("stock_cc.yml") } nacosConfig, err := config.NewCenter(configFile) if err != nil { @@ -114,7 +117,7 @@ func initConfig() { } else { // get configuration from local configuration file if configFile == "" { - configFile = configs.Path("user.yml") + configFile = configs.Path("stock.yml") } err := config.Init(configFile) if err != nil { diff --git a/_13_sponge-dtm-cache/http/cmd/stock/main.go b/_13_sponge-dtm-cache/http/cmd/stock/main.go new file mode 100644 index 0000000..75aa4af --- /dev/null +++ b/_13_sponge-dtm-cache/http/cmd/stock/main.go @@ -0,0 +1,17 @@ +// Package main is the http server of the application. +package main + +import ( + "github.com/zhufuyi/sponge/pkg/app" + + "stock/cmd/stock/initial" +) + +func main() { + initial.InitApp() + services := initial.CreateServices() + closes := initial.Close(services) + + a := app.New(services, closes) + a.Run() +} diff --git a/_13_sponge-dtm-cache/http/configs/location.go b/_13_sponge-dtm-cache/http/configs/location.go new file mode 100644 index 0000000..6b610a6 --- /dev/null +++ b/_13_sponge-dtm-cache/http/configs/location.go @@ -0,0 +1,23 @@ +// Package configs used to locate config file. +package configs + +import ( + "path/filepath" + "runtime" +) + +var basePath string + +func init() { + _, currentFile, _, _ := runtime.Caller(0) //nolint + basePath = filepath.Dir(currentFile) +} + +// Path return absolute path +func Path(rel string) string { + if filepath.IsAbs(rel) { + return rel + } + + return filepath.Join(basePath, rel) +} diff --git a/_13_sponge-dtm-cache/http/configs/stock.yml b/_13_sponge-dtm-cache/http/configs/stock.yml new file mode 100644 index 0000000..f277fe9 --- /dev/null +++ b/_13_sponge-dtm-cache/http/configs/stock.yml @@ -0,0 +1,92 @@ +# Generate the go struct command: sponge config --server-dir=./serverDir + +# app settings +app: + name: "stock" # server name + env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment + version: "v0.0.0" + host: "127.0.0.1" # domain or ip, for service registration + enableStat: false # whether to turn on printing statistics, true:enable, false:disable + enableMetrics: true # whether to turn on indicator collection, true:enable, false:disable + enableHTTPProfile: false # whether to turn on performance analysis, true:enable, false:disable + enableLimit: false # whether to turn on rate limiting (adaptive), true:on, false:off + enableCircuitBreaker: false # whether to turn on circuit breaker(adaptive), true:on, false:off + enableTrace: false # whether to turn on trace, true:enable, false:disable, if true jaeger configuration must be set + tracingSamplingRate: 1.0 # tracing sampling rate, between 0 and 1, 0 means no sampling, 1 means sampling all links + registryDiscoveryType: "" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used + cacheType: "" # cache type, if empty, the cache is not used, support for "memory" and "redis", if set to redis, must set redis configuration + + +# http server settings +http: + port: 8080 # listen port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, if enableHTTPProfile is true, it needs to set 0 or greater than 60s + + + +# logger settings +logger: + level: "info" # output log levels debug, info, warn, error, default is debug + format: "console" # output format, console or json, default is console + isSave: false # false:output to terminal, true:output to file, default is false + #logFileConfig: # Effective when isSave=true + #filename: "out.log" # File name (default is out.log) + #maxSize: 20 # Maximum file size (MB, default is 10MB) + #maxBackups: 50 # Maximum number of old files to retain (default is 100) + #maxAge: 15 # Maximum number of days to retain old files (default is 30 days) + #isCompression: true # Whether to compress/archive old files (default is false) + + +# set database configuration. reference-db-config-url +database: + driver: "mysql" # database driver + # mysql settings + mysql: + # dsn format, :@(:)/?[k=v& ......] + dsn: "root:123456@(192.168.3.37:3306)/eshop_stock?parseTime=true&loc=Local&charset=utf8,utf8mb4" + enableLog: true # whether to turn on printing of all logs + maxIdleConns: 10 # set the maximum number of connections in the idle connection pool + maxOpenConns: 100 # set the maximum number of open database connections + connMaxLifetime: 30 # sets the maximum time for which the connection can be reused, in minutes + + +# redis settings +redis: + # dsn format, [user]:@127.0.0.1:6379/[db], the default user is default, redis version 6.0 and above only supports user. + dsn: "default:123456@192.168.3.37:6379/0" + dialTimeout: 10 # connection timeout, unit(second) + readTimeout: 2 # read timeout, unit(second) + writeTimeout: 2 # write timeout, unit(second) + + +# dtm settings +dtm: + server: "http://192.168.3.37:35789/api/dtmsvr" # dtm server address (redis) + callbackAddr: # dtm callback server address + stock: + scheme: "http" + host: "192.168.3.90" # stock服务所在的宿主机地址 + port: 0 # 如果设置为0,则默认端口跟随http.port配置项 + + +# jaeger settings +jaeger: + agentHost: "192.168.3.37" + agentPort: 6831 + + +# consul settings +consul: + addr: "192.168.3.37:8500" + + +# etcd settings +etcd: + addrs: ["192.168.3.37:2379"] + + +# nacos settings, used in service registration discovery +nacosRd: + ipAddr: "192.168.3.37" + port: 8848 + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id diff --git a/_13_sponge-dtm-cache/http/configs/stock_cc.yml b/_13_sponge-dtm-cache/http/configs/stock_cc.yml new file mode 100644 index 0000000..6a97506 --- /dev/null +++ b/_13_sponge-dtm-cache/http/configs/stock_cc.yml @@ -0,0 +1,13 @@ +# Generate the go struct command: sponge config --server-dir=./serverDir +# App config from nacos + +# nacos settings +nacos: + ipAddr: "192.168.3.37" # server address + port: 8848 # listening port + scheme: "http" # http or grpc + contextPath: "/nacos" # path + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id + group: "dev" # group name: dev, prod, test + dataID: "stock.yml" # config file id + format: "yaml" # configuration file type: json,yaml,toml diff --git a/_13_sponge-dtm-cache/http/deployments/binary/README.md b/_13_sponge-dtm-cache/http/deployments/binary/README.md new file mode 100644 index 0000000..335cec0 --- /dev/null +++ b/_13_sponge-dtm-cache/http/deployments/binary/README.md @@ -0,0 +1,26 @@ + +copy the configuration file to the configs directory and binary file before starting the service. + +``` +├── configs +│ └── stock.yml +├── stock +├── deploy.sh +└── run.sh +``` + +### Running and stopping service manually + +Running service: + +> ./run.sh + +Stopping the service: + +> ./run.sh stop + +
+ +### Automated deployment service + +> ./deploy.sh diff --git a/_13_sponge-dtm-cache/http/deployments/binary/deploy.sh b/_13_sponge-dtm-cache/http/deployments/binary/deploy.sh new file mode 100644 index 0000000..f2d897e --- /dev/null +++ b/_13_sponge-dtm-cache/http/deployments/binary/deploy.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +serviceName="stock" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# determine if the startup service script run.sh exists +runFile="~/app/${serviceName}/run.sh" +if [ ! -f "$runFile" ]; then + # if it does not exist, copy the entire directory + mkdir -p ~/app + cp -rf /tmp/${serviceName}-binary ~/app/ + checkResult $? + rm -rf /tmp/${serviceName}-binary* +else + # replace only the binary file if it exists + cp -f ${serviceName}-binary/${serviceName} ~/app/${serviceName}-binary/${serviceName} + checkResult $? + rm -rf /tmp/${serviceName}-binary* +fi + +# running service +cd ~/app/${serviceName}-binary +chmod +x run.sh +./run.sh +checkResult $? + +echo "server directory is ~/app/${serviceName}-binary" diff --git a/_13_sponge-dtm-cache/http/deployments/binary/run.sh b/_13_sponge-dtm-cache/http/deployments/binary/run.sh new file mode 100644 index 0000000..f2f3ba9 --- /dev/null +++ b/_13_sponge-dtm-cache/http/deployments/binary/run.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +serviceName="stock" +cmdStr="./${serviceName} -c configs/${serviceName}.yml" + +chmod +x ./${serviceName} + +stopService(){ + NAME=$1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + for id in $ID + do + kill -9 $id + echo "Stopped ${NAME} service successfully, process ID=${ID}" + done + fi +} + +startService() { + NAME=$1 + + nohup ${cmdStr} > ${serviceName}.log 2>&1 & + sleep 1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + echo "Start the ${NAME} service ...... process ID=${ID}" + else + echo "Failed to start ${NAME} service" + return 1 + fi + return 0 +} + + +stopService ${serviceName} +if [ "$1"x != "stop"x ] ;then + sleep 1 + startService ${serviceName} + exit $? + echo "" +else + echo "Service ${serviceName} has stopped" +fi diff --git a/_13_sponge-dtm-cache/http/deployments/docker-compose/README.md b/_13_sponge-dtm-cache/http/deployments/docker-compose/README.md new file mode 100644 index 0000000..f76e87d --- /dev/null +++ b/_13_sponge-dtm-cache/http/deployments/docker-compose/README.md @@ -0,0 +1,12 @@ + +copy the configuration file to the configs directory before starting the service. + +``` +├── configs +│ └── stock.yml +└── docker-compose.yml +``` + +running service: + +> docker-compose up -d diff --git a/_13_sponge-dtm-cache/http/deployments/docker-compose/docker-compose.yml b/_13_sponge-dtm-cache/http/deployments/docker-compose/docker-compose.yml new file mode 100644 index 0000000..e0eb071 --- /dev/null +++ b/_13_sponge-dtm-cache/http/deployments/docker-compose/docker-compose.yml @@ -0,0 +1,19 @@ +version: "3.7" + +services: + stock: + image: eshop/stock:latest + container_name: stock + restart: always + command: ["./stock", "-c", "/app/configs/stock.yml"] + volumes: + - $PWD/configs:/app/configs + ports: + - "8080:8080" # http port + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8080/health"] # http health check, note: mirror must contain curl command + + interval: 10s # interval time + timeout: 5s # timeout time + retries: 3 # number of retries + start_period: 10s # how long after start-up does the check begin diff --git a/_13_sponge-dtm-cache/http/deployments/kubernetes/README.md b/_13_sponge-dtm-cache/http/deployments/kubernetes/README.md new file mode 100644 index 0000000..b4ddaa8 --- /dev/null +++ b/_13_sponge-dtm-cache/http/deployments/kubernetes/README.md @@ -0,0 +1,32 @@ +Before deploying the service to k8s, create a Secret that pulls image permissions for k8s in a docker host that is already logged into the image repository, with the following command. + +```bash +kubectl create secret generic docker-auth-secret \ + --from-file=.dockerconfigjson=/root/.docker/config.json \ + --type=kubernetes.io/dockerconfigjson +``` + +
+ +run server: + +```bash +cd deployments + +kubectl apply -f ./*namespace.yml + +kubectl apply -f ./ +``` + +view the start-up status. + +> kubectl get all -n eshop + +
+ +simple test of http port + +```bash +# mapping to the http port of the service on the local port +kubectl port-forward --address=0.0.0.0 service/ 8080:8080 -n +``` diff --git a/_13_sponge-dtm-cache/http/deployments/kubernetes/eshop-namespace.yml b/_13_sponge-dtm-cache/http/deployments/kubernetes/eshop-namespace.yml new file mode 100644 index 0000000..eba474f --- /dev/null +++ b/_13_sponge-dtm-cache/http/deployments/kubernetes/eshop-namespace.yml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: eshop diff --git a/_13_sponge-dtm-cache/http/deployments/kubernetes/stock-configmap.yml b/_13_sponge-dtm-cache/http/deployments/kubernetes/stock-configmap.yml new file mode 100644 index 0000000..fe64af3 --- /dev/null +++ b/_13_sponge-dtm-cache/http/deployments/kubernetes/stock-configmap.yml @@ -0,0 +1,90 @@ +kind: ConfigMap +apiVersion: v1 +metadata: + name: stock-config + namespace: eshop +data: + stock.yml: |- + # Generate the go struct command: sponge config --server-dir=./serverDir + + # app settings + app: + name: "stock" # server name + env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment + version: "v0.0.0" + host: "127.0.0.1" # domain or ip, for service registration + enableStat: true # whether to turn on printing statistics, true:enable, false:disable + enableMetrics: true # whether to turn on indicator collection, true:enable, false:disable + enableHTTPProfile: false # whether to turn on performance analysis, true:enable, false:disable + enableLimit: false # whether to turn on rate limiting (adaptive), true:on, false:off + enableCircuitBreaker: false # whether to turn on circuit breaker(adaptive), true:on, false:off + enableTrace: false # whether to turn on trace, true:enable, false:disable, if true jaeger configuration must be set + tracingSamplingRate: 1.0 # tracing sampling rate, between 0 and 1, 0 means no sampling, 1 means sampling all links + registryDiscoveryType: "" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used + cacheType: "" # cache type, if empty, the cache is not used, support for "memory" and "redis", if set to redis, must set redis configuration + + + # http server settings + http: + port: 8080 # listen port + timeout: 0 # request timeout, unit(second), if 0 means not set, if greater than 0 means set timeout, if enableHTTPProfile is true, it needs to set 0 or greater than 60s + + + + # logger settings + logger: + level: "info" # output log levels debug, info, warn, error, default is debug + format: "console" # output format, console or json, default is console + isSave: false # false:output to terminal, true:output to file, default is false + #logFileConfig: # Effective when isSave=true + #filename: "out.log" # File name (default is out.log) + #maxSize: 20 # Maximum file size (MB, default is 10MB) + #maxBackups: 50 # Maximum number of old files to retain (default is 100) + #maxAge: 15 # Maximum number of days to retain old files (default is 30 days) + #isCompression: true # Whether to compress/archive old files (default is false) + + + # set database configuration. reference-db-config-url + database: + driver: "mysql" # database driver + # mysql settings + mysql: + # dsn format, :@(:)/?[k=v& ......] + dsn: "root:123456@(192.168.3.37:3306)/account?parseTime=true&loc=Local&charset=utf8,utf8mb4" + enableLog: true # whether to turn on printing of all logs + maxIdleConns: 10 # set the maximum number of connections in the idle connection pool + maxOpenConns: 100 # set the maximum number of open database connections + connMaxLifetime: 30 # sets the maximum time for which the connection can be reused, in minutes + + + + # redis settings + redis: + # dsn format, [user]:@127.0.0.1:6379/[db], the default user is default, redis version 6.0 and above only supports user. + dsn: "default:123456@192.168.3.37:6379/0" + dialTimeout: 10 # connection timeout, unit(second) + readTimeout: 2 # read timeout, unit(second) + writeTimeout: 2 # write timeout, unit(second) + + + # jaeger settings + jaeger: + agentHost: "192.168.3.37" + agentPort: 6831 + + + # consul settings + consul: + addr: "192.168.3.37:8500" + + + # etcd settings + etcd: + addrs: ["192.168.3.37:2379"] + + + # nacos settings, used in service registration discovery + nacosRd: + ipAddr: "192.168.3.37" + port: 8848 + namespaceID: "3454d2b5-2455-4d0e-bf6d-e033b086bb4c" # namespace id diff --git a/_13_sponge-dtm-cache/http/deployments/kubernetes/stock-deployment.yml b/_13_sponge-dtm-cache/http/deployments/kubernetes/stock-deployment.yml new file mode 100644 index 0000000..8c44210 --- /dev/null +++ b/_13_sponge-dtm-cache/http/deployments/kubernetes/stock-deployment.yml @@ -0,0 +1,63 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: stock-dm + namespace: eshop +spec: + replicas: 1 + selector: + matchLabels: + app: stock + template: + metadata: + name: stock-pod + labels: + app: stock + spec: + containers: + - name: stock + image: /eshop/stock:latest + # If using a local image, use Never, default is Always + #imagePullPolicy: Never + command: ["./stock", "-c", "/app/configs/stock.yml"] + resources: + requests: + cpu: 10m + memory: 10Mi + limits: + cpu: 1000m + memory: 1000Mi + volumeMounts: + - name: stock-vl + mountPath: /app/configs/ + readOnly: true + + ports: + - name: http-port + containerPort: 8080 + readinessProbe: + httpGet: + port: http-port + path: /health + initialDelaySeconds: 10 + timeoutSeconds: 2 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + livenessProbe: + httpGet: + port: http-port + path: /health + + initialDelaySeconds: 10 + timeoutSeconds: 2 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + # todo for private repositories, you need to create a secret (here docker-auth-secret) to store the account and password to log into docker + imagePullSecrets: + - name: docker-auth-secret + volumes: + - name: stock-vl + configMap: + name: stock-config diff --git a/_13_sponge-dtm-cache/http/deployments/kubernetes/stock-svc.yml b/_13_sponge-dtm-cache/http/deployments/kubernetes/stock-svc.yml new file mode 100644 index 0000000..8d9dbed --- /dev/null +++ b/_13_sponge-dtm-cache/http/deployments/kubernetes/stock-svc.yml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: Service +metadata: + name: stock-svc + namespace: eshop +spec: + selector: + app: stock + type: ClusterIP + ports: + - name: stock-svc-http-port + port: 8080 + targetPort: 8080 + diff --git a/_13_sponge-dtm-cache/http/docs/apis.go b/_13_sponge-dtm-cache/http/docs/apis.go new file mode 100644 index 0000000..3c749f0 --- /dev/null +++ b/_13_sponge-dtm-cache/http/docs/apis.go @@ -0,0 +1,21 @@ +package docs + +import ( + "embed" + "fmt" +) + +//go:embed apis.swagger.json +var jsonFile embed.FS + +// ApiDocs swagger json file content +var ApiDocs = []byte(``) + +func init() { + data, err := jsonFile.ReadFile("apis.swagger.json") + if err != nil { + fmt.Printf("\nReadFile error: %v\n\n", err) + return + } + ApiDocs = data +} diff --git a/_13_sponge-dtm-cache/http/docs/apis.swagger.json b/_13_sponge-dtm-cache/http/docs/apis.swagger.json new file mode 100644 index 0000000..a66fda3 --- /dev/null +++ b/_13_sponge-dtm-cache/http/docs/apis.swagger.json @@ -0,0 +1,849 @@ +{ + "swagger": "2.0", + "info": { + "title": "stock api docs", + "version": "2.0" + }, + "host": "localhost:8080", + "schemes": [ + "http", + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/api/v1/stock": { + "post": { + "summary": "create stock", + "description": "submit information to create stock", + "operationId": "stock_Create", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1CreateStockReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v1CreateStockRequest" + } + } + ], + "tags": [ + "stock" + ] + } + }, + "/api/v1/stock/deleteCache": { + "post": { + "summary": "删除缓存", + "operationId": "callback_DeleteCache", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1DeleteCacheReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v1DeleteCacheRequest" + } + } + ], + "tags": [ + "callback" + ] + } + }, + "/api/v1/stock/downgradeBranch": { + "post": { + "summary": "升降级中的强一致性分支", + "description": "升降级中的强一致性分支", + "operationId": "downgrade_DowngradeBranch", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1DowngradeBranchReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v1DowngradeBranchRequest" + } + } + ], + "tags": [ + "case 4: 升降级中的强一致性" + ] + } + }, + "/api/v1/stock/list": { + "post": { + "summary": "list of stocks by parameters", + "description": "list of stocks by paging and conditions", + "operationId": "stock_List", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1ListStockReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v1ListStockRequest" + } + } + ], + "tags": [ + "stock" + ] + } + }, + "/api/v1/stock/queryPrepared": { + "get": { + "summary": "反查数据", + "operationId": "callback_QueryPrepared", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1QueryPreparedReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "tags": [ + "callback" + ] + } + }, + "/api/v1/stock/{id}": { + "get": { + "summary": "get stock detail", + "description": "get stock detail by id", + "operationId": "stock_GetByID", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1GetStockByIDReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "stock" + ] + }, + "delete": { + "summary": "delete stock", + "description": "delete stock by id", + "operationId": "stock_DeleteByID", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1DeleteStockByIDReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "stock" + ] + }, + "put": { + "summary": "update stock", + "description": "update stock by id", + "operationId": "stock_UpdateByID", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1UpdateStockByIDReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "productID": { + "type": "integer", + "format": "uint64" + }, + "stock": { + "type": "integer", + "format": "int64" + } + } + } + } + ], + "tags": [ + "stock" + ] + } + }, + "/api/v1/stock/{id}/atomic": { + "get": { + "summary": "查询数据", + "description": "查询数据", + "operationId": "atomic_Query", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1QueryAtomicReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "case 2: 原子性" + ] + }, + "put": { + "summary": "更新数据", + "description": "更新数据,DB和缓存原子性", + "operationId": "atomic_Update", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1UpdateAtomicRequestReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + } + } + ], + "tags": [ + "case 2: 原子性" + ] + } + }, + "/api/v1/stock/{id}/downgrade": { + "get": { + "summary": "查询数据", + "description": "查询数据", + "operationId": "downgrade_Query", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1QueryDowngradeReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "case 4: 升降级中的强一致性" + ] + }, + "put": { + "summary": "更新数据", + "description": "更新数据,升降级中的DB和缓存强一致性", + "operationId": "downgrade_Update", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1UpdateDowngradeRequestReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + } + } + ], + "tags": [ + "case 4: 升降级中的强一致性" + ] + } + }, + "/api/v1/stock/{id}/final": { + "get": { + "summary": "查询数据", + "description": "查询数据", + "operationId": "final_Query", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1QueryFinalReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "case 1: 最终一致性" + ] + }, + "put": { + "summary": "更新数据", + "description": "更新数据,DB和缓存最终一致性", + "operationId": "final_Update", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1UpdateFinalRequestReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + } + } + ], + "tags": [ + "case 1: 最终一致性" + ] + } + }, + "/api/v1/stock/{id}/strong": { + "get": { + "summary": "查询数据", + "description": "查询数据", + "operationId": "strong_Query", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1QueryStrongReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + } + ], + "tags": [ + "case 3: 强一致性" + ] + }, + "put": { + "summary": "更新数据", + "description": "更新数据,DB和缓存强一致性", + "operationId": "strong_Update", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1UpdateStrongRequestReply" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "integer", + "format": "uint64" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + } + } + ], + "tags": [ + "case 3: 强一致性" + ] + } + } + }, + "definitions": { + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string" + } + }, + "additionalProperties": {} + }, + "rpcStatus": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/protobufAny" + } + } + } + }, + "typesColumn": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "exp": { + "type": "string" + }, + "value": { + "type": "string" + }, + "logic": { + "type": "string" + } + } + }, + "typesParams": { + "type": "object", + "properties": { + "page": { + "type": "integer", + "format": "int32" + }, + "limit": { + "type": "integer", + "format": "int32" + }, + "sort": { + "type": "string" + }, + "columns": { + "type": "array", + "items": { + "$ref": "#/definitions/typesColumn" + } + } + } + }, + "v1CreateStockReply": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "uint64" + } + } + }, + "v1CreateStockRequest": { + "type": "object", + "properties": { + "productID": { + "type": "integer", + "format": "uint64" + }, + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1DeleteCacheReply": { + "type": "object" + }, + "v1DeleteCacheRequest": { + "type": "object", + "properties": { + "key": { + "type": "string" + } + } + }, + "v1DeleteStockByIDReply": { + "type": "object" + }, + "v1DowngradeBranchReply": { + "type": "object" + }, + "v1DowngradeBranchRequest": { + "type": "object", + "properties": { + "gid": { + "type": "string" + }, + "key": { + "type": "string" + }, + "id": { + "type": "integer", + "format": "uint64" + }, + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1GetStockByIDReply": { + "type": "object", + "properties": { + "stock": { + "$ref": "#/definitions/v1Stock" + } + } + }, + "v1ListStockReply": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "stocks": { + "type": "array", + "items": { + "$ref": "#/definitions/v1Stock" + } + } + } + }, + "v1ListStockRequest": { + "type": "object", + "properties": { + "params": { + "$ref": "#/definitions/typesParams" + } + } + }, + "v1QueryAtomicReply": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1QueryDowngradeReply": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "uint64" + }, + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1QueryFinalReply": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1QueryPreparedReply": { + "type": "object" + }, + "v1QueryStrongReply": { + "type": "object", + "properties": { + "stock": { + "type": "integer", + "format": "int64" + } + } + }, + "v1Stock": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "uint64" + }, + "productID": { + "type": "integer", + "format": "uint64" + }, + "stock": { + "type": "integer", + "format": "int64" + }, + "createdAt": { + "type": "string" + }, + "updatedAt": { + "type": "string" + } + } + }, + "v1UpdateAtomicRequestReply": { + "type": "object" + }, + "v1UpdateDowngradeRequestReply": { + "type": "object" + }, + "v1UpdateFinalRequestReply": { + "type": "object" + }, + "v1UpdateStockByIDReply": { + "type": "object" + }, + "v1UpdateStrongRequestReply": { + "type": "object" + } + }, + "securityDefinitions": { + "BearerAuth": { + "type": "apiKey", + "description": "Type Bearer your-jwt-token to Value", + "name": "Authorization", + "in": "header" + } + } +} diff --git a/_13_sponge-dtm-cache/http/docs/gen.info b/_13_sponge-dtm-cache/http/docs/gen.info new file mode 100644 index 0000000..d34edd3 --- /dev/null +++ b/_13_sponge-dtm-cache/http/docs/gen.info @@ -0,0 +1 @@ +stock,stock,false \ No newline at end of file diff --git a/_13_sponge-dtm-cache/http/go.mod b/_13_sponge-dtm-cache/http/go.mod new file mode 100644 index 0000000..5e82f19 --- /dev/null +++ b/_13_sponge-dtm-cache/http/go.mod @@ -0,0 +1,178 @@ +module stock + +go 1.21 + +require ( + github.com/DATA-DOG/go-sqlmock v1.5.0 + github.com/dtm-labs/dtmcli v1.15.0 + github.com/dtm-labs/rockscache v0.1.1 + github.com/gin-gonic/gin v1.9.1 + github.com/jinzhu/copier v0.3.5 + github.com/redis/go-redis/v9 v9.6.1 + github.com/stretchr/testify v1.9.0 + github.com/zhufuyi/sponge v1.10.1 + go.uber.org/zap v1.24.0 + golang.org/x/sync v0.8.0 + google.golang.org/protobuf v1.34.2 + gorm.io/gorm v1.25.5 +) + +require ( + github.com/KyleBanks/depth v1.2.1 // indirect + github.com/PuerkitoBio/purell v1.1.1 // indirect + github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect + github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68 // indirect + github.com/alibabacloud-go/tea v1.1.17 // indirect + github.com/alibabacloud-go/tea-utils v1.4.4 // indirect + github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect + github.com/alicebob/miniredis/v2 v2.23.0 // indirect + github.com/aliyun/alibaba-cloud-sdk-go v1.61.1800 // indirect + github.com/aliyun/alibabacloud-dkms-gcs-go-sdk v0.2.2 // indirect + github.com/aliyun/alibabacloud-dkms-transfer-go-sdk v0.1.7 // indirect + github.com/armon/go-metrics v0.3.10 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/buger/jsonparser v1.1.1 // indirect + github.com/bytedance/sonic v1.9.1 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect + github.com/coreos/go-semver v0.3.0 // indirect + github.com/coreos/go-systemd/v22 v22.3.2 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dgraph-io/ristretto v0.1.1 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/dtm-labs/dtmdriver v0.0.3 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/fatih/color v1.13.0 // indirect + github.com/felixge/fgprof v0.9.3 // indirect + github.com/fsnotify/fsnotify v1.5.4 // indirect + github.com/gabriel-vasile/mimetype v1.4.2 // indirect + github.com/gin-contrib/cors v1.3.1 // indirect + github.com/gin-contrib/sse v0.1.0 // indirect + github.com/go-logr/logr v1.4.1 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect + github.com/go-openapi/jsonpointer v0.19.5 // indirect + github.com/go-openapi/jsonreference v0.19.6 // indirect + github.com/go-openapi/spec v0.20.4 // indirect + github.com/go-openapi/swag v0.19.15 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.14.0 // indirect + github.com/go-redis/redis/v8 v8.11.4 // indirect + github.com/go-resty/resty/v2 v2.6.0 // indirect + github.com/go-sql-driver/mysql v1.7.0 // indirect + github.com/goccy/go-json v0.10.2 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang-jwt/jwt/v5 v5.0.0 // indirect + github.com/golang/glog v1.1.2 // indirect + github.com/golang/mock v1.6.0 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/pprof v0.0.0-20211214055906-6f57359322fd // indirect + github.com/google/uuid v1.4.0 // indirect + github.com/hashicorp/consul/api v1.12.0 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-hclog v1.2.0 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-rootcerts v1.0.2 // indirect + github.com/hashicorp/golang-lru v0.5.4 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hashicorp/serf v0.9.7 // indirect + github.com/huandu/xstrings v1.4.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect + github.com/jackc/pgx/v5 v5.6.0 // indirect + github.com/jackc/puddle/v2 v2.2.1 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect + github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.17.8 // indirect + github.com/klauspost/cpuid/v2 v2.2.4 // indirect + github.com/leodido/go-urn v1.2.4 // indirect + github.com/lithammer/shortuuid v3.0.0+incompatible // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect + github.com/magiconair/properties v1.8.6 // indirect + github.com/mailru/easyjson v0.7.6 // indirect + github.com/mattn/go-colorable v0.1.12 // indirect + github.com/mattn/go-isatty v0.0.19 // indirect + github.com/mattn/go-sqlite3 v1.14.17 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect + github.com/nacos-group/nacos-sdk-go/v2 v2.2.7 // indirect + github.com/natefinch/lumberjack v2.0.0+incompatible // indirect + github.com/pelletier/go-toml v1.9.5 // indirect + github.com/pelletier/go-toml/v2 v2.0.8 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect + github.com/prometheus/client_golang v1.13.0 // indirect + github.com/prometheus/client_model v0.4.0 // indirect + github.com/prometheus/common v0.37.0 // indirect + github.com/prometheus/procfs v0.8.0 // indirect + github.com/redis/go-redis/extra/rediscmd/v9 v9.5.3 // indirect + github.com/redis/go-redis/extra/redisotel/v9 v9.5.3 // indirect + github.com/shirou/gopsutil/v3 v3.23.8 // indirect + github.com/shoenig/go-m1cpu v0.1.6 // indirect + github.com/spf13/afero v1.9.2 // indirect + github.com/spf13/cast v1.5.0 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/spf13/pflag v1.0.5 // indirect + github.com/spf13/viper v1.12.0 // indirect + github.com/subosito/gotenv v1.3.0 // indirect + github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a // indirect + github.com/swaggo/gin-swagger v1.5.2 // indirect + github.com/swaggo/swag v1.8.12 // indirect + github.com/tklauser/go-sysconf v0.3.12 // indirect + github.com/tklauser/numcpus v0.6.1 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.2.11 // indirect + github.com/uptrace/opentelemetry-go-extra/otelgorm v0.2.3 // indirect + github.com/uptrace/opentelemetry-go-extra/otelsql v0.2.3 // indirect + github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect + github.com/xdg-go/pbkdf2 v1.0.0 // indirect + github.com/xdg-go/scram v1.1.2 // indirect + github.com/xdg-go/stringprep v1.0.4 // indirect + github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect + github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 // indirect + github.com/yusufpapurcu/wmi v1.2.3 // indirect + go.etcd.io/etcd/api/v3 v3.5.4 // indirect + go.etcd.io/etcd/client/pkg/v3 v3.5.4 // indirect + go.etcd.io/etcd/client/v3 v3.5.4 // indirect + go.mongodb.org/mongo-driver v1.14.0 // indirect + go.opentelemetry.io/contrib v1.24.0 // indirect + go.opentelemetry.io/otel v1.24.0 // indirect + go.opentelemetry.io/otel/exporters/jaeger v1.17.0 // indirect + go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.24.0 // indirect + go.opentelemetry.io/otel/metric v1.24.0 // indirect + go.opentelemetry.io/otel/sdk v1.24.0 // indirect + go.opentelemetry.io/otel/trace v1.24.0 // indirect + go.uber.org/atomic v1.7.0 // indirect + go.uber.org/goleak v1.2.1 // indirect + go.uber.org/multierr v1.9.0 // indirect + golang.org/x/arch v0.3.0 // indirect + golang.org/x/crypto v0.26.0 // indirect + golang.org/x/net v0.25.0 // indirect + golang.org/x/sys v0.23.0 // indirect + golang.org/x/text v0.17.0 // indirect + golang.org/x/time v0.1.0 // indirect + golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect + google.golang.org/appengine v1.6.8 // indirect + google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20231106174013-bbf56f31fb17 // indirect + google.golang.org/grpc v1.61.0 // indirect + gopkg.in/ini.v1 v1.66.4 // indirect + gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + gorm.io/driver/mysql v1.5.2 // indirect + gorm.io/driver/postgres v1.5.4 // indirect + gorm.io/driver/sqlite v1.5.4 // indirect + gorm.io/plugin/dbresolver v1.5.1 // indirect +) diff --git a/_13_sponge-dtm-cache/http/go.sum b/_13_sponge-dtm-cache/http/go.sum new file mode 100644 index 0000000..7d59d31 --- /dev/null +++ b/_13_sponge-dtm-cache/http/go.sum @@ -0,0 +1,1156 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v1.0.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.1.0 h1:ksErzDEI1khOiGPgpwuI7x2ebx/uXQNw7xJpn9Eq1+I= +github.com/BurntSushi/toml v1.1.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= +github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= +github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/agiledragon/gomonkey/v2 v2.3.1/go.mod h1:ap1AmDzcVOAz1YpeJ3TCzIgstoaWLA6jbbgxfB4w2iY= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68 h1:NqugFkGxx1TXSh/pBcU00Y6bljgDPaFdh5MUSeJ7e50= +github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68/go.mod h1:6pb/Qy8c+lqua8cFpEy7g39NRRqOWc3rOwAy8m5Y2BY= +github.com/alibabacloud-go/tea v1.1.0/go.mod h1:IkGyUSX4Ba1V+k4pCtJUc6jDpZLFph9QMy2VUPTwukg= +github.com/alibabacloud-go/tea v1.1.17 h1:05R5DnaJXe9sCNIe8KUgWHC/z6w/VZIwczgUwzRnul8= +github.com/alibabacloud-go/tea v1.1.17/go.mod h1:nXxjm6CIFkBhwW4FQkNrolwbfon8Svy6cujmKFUq98A= +github.com/alibabacloud-go/tea-utils v1.4.4 h1:lxCDvNCdTo9FaXKKq45+4vGETQUKNOW/qKTcX9Sk53o= +github.com/alibabacloud-go/tea-utils v1.4.4/go.mod h1:KNcT0oXlZZxOXINnZBs6YvgOd5aYp9U67G+E3R8fcQw= +github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk= +github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= +github.com/alicebob/miniredis/v2 v2.23.0 h1:+lwAJYjvvdIVg6doFHuotFjueJ/7KY10xo/vm3X3Scw= +github.com/alicebob/miniredis/v2 v2.23.0/go.mod h1:XNqvJdQJv5mSuVMc0ynneafpnL/zv52acZ6kqeS0t88= +github.com/aliyun/alibaba-cloud-sdk-go v1.61.1800 h1:ie/8RxBOfKZWcrbYSJi2Z8uX8TcOlSMwPlEJh83OeOw= +github.com/aliyun/alibaba-cloud-sdk-go v1.61.1800/go.mod h1:RcDobYh8k5VP6TNybz9m++gL3ijVI5wueVr0EM10VsU= +github.com/aliyun/alibabacloud-dkms-gcs-go-sdk v0.2.2 h1:rWkH6D2XlXb/Y+tNAQROxBzp3a0p92ni+pXcaHBe/WI= +github.com/aliyun/alibabacloud-dkms-gcs-go-sdk v0.2.2/go.mod h1:GDtq+Kw+v0fO+j5BrrWiUHbBq7L+hfpzpPfXKOZMFE0= +github.com/aliyun/alibabacloud-dkms-transfer-go-sdk v0.1.7 h1:olLiPI2iM8Hqq6vKnSxpM3awCrm9/BeOgHpzQkOYnI4= +github.com/aliyun/alibabacloud-dkms-transfer-go-sdk v0.1.7/go.mod h1:oDg1j4kFxnhgftaiLJABkGeSvuEvSF5Lo6UmRAMruX4= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-metrics v0.3.10 h1:FR+drcQStOe+32sYyJYyZ7FIdgoGGBnwLl+flodp8Uo= +github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= +github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s= +github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= +github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams= +github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd/v22 v22.3.2 h1:D9/bQk5vlXQFZ6Kwuu6zaiXJ9oTPe68++AzAJc1DzSI= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8= +github.com/dgraph-io/ristretto v0.1.1/go.mod h1:S1GPSBCYCIhmVNfcth17y2zZtQT6wzkzgwUve0VDWWA= +github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczCTSixgIKmwPv6+wP5DGjqLYw5SUiA= +github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/dtm-labs/dtmcli v1.15.0 h1:kqiV47I9Am/0QuzX11cGTPp/Z2AcOaRDXU7E7SZWYNk= +github.com/dtm-labs/dtmcli v1.15.0/go.mod h1:otEdxUwM5FIKlCLmg5hL5+Z539DmCK2YJ0G4WNsP6Tw= +github.com/dtm-labs/dtmdriver v0.0.3 h1:9iAtvXKR3lJXQ7dvS87e4xdtmqkzN+ofek+CF9AvUSY= +github.com/dtm-labs/dtmdriver v0.0.3/go.mod h1:fLiEeD2BPwM9Yq96TfcP9KpbTwFsn5nTxa/PP0jmFuk= +github.com/dtm-labs/rockscache v0.1.1 h1:6S1vgaHvGqrLd8Ka4hRTKeKPV7v+tT0MSkTIX81LRyA= +github.com/dtm-labs/rockscache v0.1.1/go.mod h1:c76WX0kyIibmQ2ACxUXvDvaLykoPakivMqIxt+UzE7A= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/felixge/fgprof v0.9.3 h1:VvyZxILNuCiUCSXtPtYmmtGvb65nqXh2QFWc0Wpf2/g= +github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= +github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= +github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= +github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= +github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gin-contrib/cors v1.3.1 h1:doAsuITavI4IOcd0Y19U4B+O0dNWihRyX//nn4sEmgA= +github.com/gin-contrib/cors v1.3.1/go.mod h1:jjEJ4268OPZUcU7k9Pm653S7lXUGcqMADzFA61xsmDk= +github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4= +github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.5.0/go.mod h1:Nd6IXA8m5kNZdNEHMBd93KT+mdY3+bewLgRvmCsR2Do= +github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= +github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= +github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= +github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs= +github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= +github.com/go-openapi/spec v0.20.4 h1:O8hJrt0UMnhHcluhIdUgCLRWyM2x7QkBXRvOs7m+O1M= +github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.12.1/go.mod h1:IUMDtCfWo/w/mtMfIE/IG2K+Ey3ygWanZIBtBW0W2TM= +github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.16.0/go.mod h1:1AnU7NaIRDWWzGEKwgtJRd2xk99HeFyHw3yid4rvQIY= +github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= +github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js= +github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= +github.com/go-redis/redis/v8 v8.11.4 h1:kHoYkfZP6+pe04aFTnhDH6GDROa5yJdHJVNxV3F46Tg= +github.com/go-redis/redis/v8 v8.11.4/go.mod h1:2Z2wHZXdQpCDXEGzqMockDpNyYvi2l4Pxt6RJr792+w= +github.com/go-resty/resty/v2 v2.6.0 h1:joIR5PNLM2EFqqESUjCMGXrWmXNHEU9CEiK813oKYS4= +github.com/go-resty/resty/v2 v2.6.0/go.mod h1:PwvJS6hvaPkjtjNg9ph+VrSD92bi5Zq73w/BIH7cC3Q= +github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-sql-driver/mysql v1.7.0 h1:ueSltNNllEqE3qcWBTD0iQd3IpL/6U+mJxLkazJ7YPc= +github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/goji/httpauth v0.0.0-20160601135302-2da839ab0f4d/go.mod h1:nnjvkQ9ptGaCkuDUx6wNykzzlUixGxvkme+H/lnzb+A= +github.com/golang-jwt/jwt/v5 v5.0.0 h1:1n1XNM9hk7O9mnQoNBGolZvzebBQ7p93ULHRc28XJUE= +github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/glog v1.1.2 h1:DVjP2PbBOzHyzA+dn3WhHIq4NdVu3Q+pvivFICf/7fo= +github.com/golang/glog v1.1.2/go.mod h1:zR+okUeTbrL6EL3xHUDxZuEtGv04p5shwip1+mL/rLQ= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20211214055906-6f57359322fd h1:1FjCyPC+syAzJ5/2S8fqdZK1R22vvA0J7JZKcuOIQ7Y= +github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4= +github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/hashicorp/consul/api v1.12.0 h1:k3y1FYv6nuKyNTqj6w9gXOx5r5CfLj/k/euUeBXj1OY= +github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0= +github.com/hashicorp/consul/sdk v0.8.0 h1:OJtKBtEjboEZvG6AOUdh4Z1Zbyu0WcxQ0qatRrZHTVU= +github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v1.2.0 h1:La19f8d7WIlm4ogzNHB0JGqs5AUDAZ2UfCY4sJXcJdM= +github.com/hashicorp/go-hclog v1.2.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3 h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-sockaddr v1.0.0 h1:GeH6tui99pF4NJgfnhp+L6+FfobzVW3Ah46sLo0ICXs= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= +github.com/hashicorp/memberlist v0.3.0 h1:8+567mCcFDnS5ADl7lrpxPMWiFCElyUEeW0gtj34fMA= +github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/hashicorp/serf v0.9.7 h1:hkdgbqizGQHuU5IPqYM1JdSMV8nKfpuOnZYXssk9muY= +github.com/hashicorp/serf v0.9.7/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU= +github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.6.0 h1:SWJzexBzPL5jb0GEsrPMLIsi/3jOo7RHlzTjcAeDrPY= +github.com/jackc/pgx/v5 v5.6.0/go.mod h1:DNZ/vlrUnhWCoFGxHAG8U2ljioxukquj7utPDgtQdTw= +github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= +github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jinzhu/copier v0.3.5 h1:GlvfUwHk62RokgqVNvYsku0TATCF7bAHVwEXoBh3iJg= +github.com/jinzhu/copier v0.3.5/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.5/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU= +github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk= +github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.1.0/go.mod h1:+cyI34gQWZcE1eQU7NVgKkkzdXDQHr1dBMtdAPozLkw= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= +github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= +github.com/lithammer/shortuuid v3.0.0+incompatible h1:NcD0xWW/MZYXEHa6ITy6kaXN5nwm/V115vj2YXfhS0w= +github.com/lithammer/shortuuid v3.0.0+incompatible/go.mod h1:FR74pbAuElzOUuenUHTK2Tciko1/vKuIKS9dSkDrA4w= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= +github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM= +github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/miekg/dns v1.1.41 h1:WMszZWJG0XmzbK9FEmzH2TVcqYzFesusSIB41b8KHxY= +github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= +github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nacos-group/nacos-sdk-go/v2 v2.2.7 h1:wCC1f3/VzIR1WD30YKeJGZAOchYCK/35mLC8qWt6Q6o= +github.com/nacos-group/nacos-sdk-go/v2 v2.2.7/go.mod h1:VYlyDPlQchPC31PmfBustu81vsOkdpCuO5k0dRdQcFc= +github.com/natefinch/lumberjack v2.0.0+incompatible h1:4QJd3OLAMgj7ph+yZTuX13Ld4UpgHp07nNdFX7mqFfM= +github.com/natefinch/lumberjack v2.0.0+incompatible/go.mod h1:Wi9p2TTF5DG5oU+6YfsmYQpsTIOm0B1VNzQg9Mw6nPk= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.16.0 h1:6gjqkI8iiRHMvdccRJM8rVKjCWk6ZIm6FTm3ddIe4/c= +github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/otiai10/copy v1.7.0/go.mod h1:rmRl6QPdJj6EiUqXQ/4Nn2lLXoNQjFCQbbNrxgc/t3U= +github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= +github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= +github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= +github.com/otiai10/mint v1.3.3/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= +github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= +github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_golang v1.13.0 h1:b71QUfeo5M8gq2+evJdTPfZhYMAU0uKPkyPJ7TPsloU= +github.com/prometheus/client_golang v1.13.0/go.mod h1:vTeo+zgvILHsnnj/39Ou/1fPN5nJFOEMgftOUOmlvYQ= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY= +github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.37.0 h1:ccBbHCgIiT9uSoFY0vX8H3zsNR5eLt17/RQLUvn8pXE= +github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= +github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= +github.com/redis/go-redis/extra/rediscmd/v9 v9.5.3 h1:1/BDligzCa40GTllkDnY3Y5DTHuKCONbB2JcRyIfl20= +github.com/redis/go-redis/extra/rediscmd/v9 v9.5.3/go.mod h1:3dZmcLn3Qw6FLlWASn1g4y+YO9ycEFUOM+bhBmzLVKQ= +github.com/redis/go-redis/extra/redisotel/v9 v9.5.3 h1:kuvuJL/+MZIEdvtb/kTBRiRgYaOmx1l+lYJyVdrRUOs= +github.com/redis/go-redis/extra/redisotel/v9 v9.5.3/go.mod h1:7f/FMrf5RRRVHXgfk7CzSVzXHiWeuOQUu2bsVqWoa+g= +github.com/redis/go-redis/v9 v9.6.1 h1:HHDteefn6ZkTtY5fGUE8tj8uy85AHk6zP7CpzIAM0y4= +github.com/redis/go-redis/v9 v9.6.1/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/shirou/gopsutil/v3 v3.23.8 h1:xnATPiybo6GgdRoC4YoGnxXZFRc3dqQTGi73oLvvBrE= +github.com/shirou/gopsutil/v3 v3.23.8/go.mod h1:7hmCaBn+2ZwaZOr6jmPBZDfawwMGuo1id3C6aM8EDqQ= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU= +github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spf13/afero v1.9.2 h1:j49Hj62F0n+DaZ1dDCvhABaPNSGNkt32oRFxI33IEMw= +github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= +github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= +github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= +github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.12.0 h1:CZ7eSOd3kZoaYDLbXnmzgQI5RlciuXBMA+18HwHRfZQ= +github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiuKtSI= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/subosito/gotenv v1.3.0 h1:mjC+YW8QpAdXibNi+vNWgzmgBH4+5l5dCXv8cNysBLI= +github.com/subosito/gotenv v1.3.0/go.mod h1:YzJjq/33h7nrwdY+iHMhEOEEbW0ovIz0tB6t6PwAXzs= +github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= +github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a h1:kAe4YSu0O0UFn1DowNo2MY5p6xzqtJ/wQ7LZynSvGaY= +github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= +github.com/swaggo/gin-swagger v1.5.2 h1:dj2es17EaOHoy0Owu4xn3An1mI8/xjdFyIH6KAbOdYo= +github.com/swaggo/gin-swagger v1.5.2/go.mod h1:Cbj/MlHApPOjZdf4joWFXLLgmZVPyh54GPvPPyVjVZM= +github.com/swaggo/swag v1.8.1/go.mod h1:ugemnJsPZm/kRwFUnzBlbHRd0JY9zE1M4F+uy2pAaPQ= +github.com/swaggo/swag v1.8.12 h1:pctzkNPu0AlQP2royqX3apjKCQonAnf7KGoxeO4y64w= +github.com/swaggo/swag v1.8.12/go.mod h1:lNfm6Gg+oAq3zRJQNEMBE66LIJKM44mxFqhEEgy2its= +github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= +github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= +github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= +github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/uptrace/opentelemetry-go-extra/otelgorm v0.2.3 h1:girTS67d1m8+XUJLbNBDjCSH8BtujWFoI93W1OUjFIc= +github.com/uptrace/opentelemetry-go-extra/otelgorm v0.2.3/go.mod h1:kjsn/ilDe5TABXwTy7Dg/Lfr2pRAjrCD+yPV+pbhOMY= +github.com/uptrace/opentelemetry-go-extra/otelsql v0.2.3 h1:LNi0Qa7869/loPjz2kmMvp/jwZZnMZ9scMJKhDJ1DIo= +github.com/uptrace/opentelemetry-go-extra/otelsql v0.2.3/go.mod h1:jyigonKik3C5V895QNiAGpKYKEvFuqjw9qAEZks1mUg= +github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= +github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI= +github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= +github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= +github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= +github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= +github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= +github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 h1:k/gmLsJDWwWqbLCur2yWnJzwQEKRcAHXo6seXGuSwWw= +github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9/go.mod h1:E1AXubJBdNmFERAOucpDIxNzeGfLzg0mYh+UfMWdChA= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +github.com/zhufuyi/sponge v1.10.1 h1:feB75axQtMJ5EkC9M6WgUC+VIqZlB+K6zmNul9xlB0c= +github.com/zhufuyi/sponge v1.10.1/go.mod h1:g6oDmwPTUrCL9+RJbSbjQEtmc0yhJ1vUD5rCdF1QNG4= +go.etcd.io/etcd/api/v3 v3.5.4 h1:OHVyt3TopwtUQ2GKdd5wu3PmmipR4FTwCqoEjSyRdIc= +go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= +go.etcd.io/etcd/client/pkg/v3 v3.5.4 h1:lrneYvz923dvC14R54XcA7FXoZ3mlGZAgmwhfm7HqOg= +go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v3 v3.5.4 h1:p83BUL3tAYS0OT/r0qglgc3M1JjhM0diV8DSWAhVXv4= +go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY= +go.mongodb.org/mongo-driver v1.8.3/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= +go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= +go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opentelemetry.io/contrib v1.24.0 h1:Tfn7pP/482iIzeeba91tP52a1c1TEeqYc1saih+vBN8= +go.opentelemetry.io/contrib v1.24.0/go.mod h1:usW9bPlrjHiJFbK0a6yK/M5wNHs3nLmtrT3vzhoD3co= +go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= +go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= +go.opentelemetry.io/otel/exporters/jaeger v1.17.0 h1:D7UpUy2Xc2wsi1Ras6V40q806WM07rqoCWzXu7Sqy+4= +go.opentelemetry.io/otel/exporters/jaeger v1.17.0/go.mod h1:nPCqOnEH9rNLKqH/+rrUjiMzHJdV1BlpKcTwRTyKkKI= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.24.0 h1:s0PHtIkN+3xrbDOpt2M8OTG92cWqUESvzh2MxiR5xY8= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.24.0/go.mod h1:hZlFbDbRt++MMPCCfSJfmhkGIWnX1h3XjkfxZUjLrIA= +go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI= +go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= +go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= +go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= +go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= +go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= +go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A= +go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= +go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= +go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60= +go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k= +golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw= +golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= +golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.23.0 h1:YfKFowiIMvtgl1UERQoTPPToxltDeZfbj4H7dVUCwmM= +golang.org/x/sys v0.23.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= +golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.1.0 h1:xYY+Bajn2a7VBmTM5GikTmnK8ZuX8YgnQCqZpbBNtmA= +golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17 h1:wpZ8pe2x1Q3f2KyT5f8oP/fa9rHAKgFPr/HZdNuS+PQ= +google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:J7XzRzVy1+IPwWHZUzoD0IccYZIrXILAQpc+Qy9CMhY= +google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17 h1:JpwMPBpFN3uKhdaekDpiNlImDdkUAyiJ6ez/uxGaUSo= +google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:0xJLfVdJqpAPl8tDg1ujOCGzx6LFLttXT5NhllGOXY4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231106174013-bbf56f31fb17 h1:Jyp0Hsi0bmHXG6k9eATXoYtjd6e2UzZ1SCn/wIupY14= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:oQ5rr10WTTMvP4A36n8JpR1OrO1BEiV4f78CneXZxkA= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.61.0 h1:TOvOcuXn30kRao+gfcvsebNEa5iZIiLkisYEkf7R7o0= +google.golang.org/grpc v1.61.0/go.mod h1:VUbo7IFqmF1QtCAstipjG0GIoq49KvMe9+h1jFLBNJs= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= +gopkg.in/go-playground/validator.v9 v9.29.1/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWdVbfP1avr/N00E2vyQ= +gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.66.4 h1:SsAcf+mM7mRZo2nJNGt8mZCjG8ZRaNGMURJw7BsIST4= +gopkg.in/ini.v1 v1.66.4/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8= +gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/driver/mysql v1.4.3/go.mod h1:sSIebwZAVPiT+27jK9HIwvsqOGKx3YMPmrA3mBJR10c= +gorm.io/driver/mysql v1.5.2 h1:QC2HRskSE75wBuOxe0+iCkyJZ+RqpudsQtqkp+IMuXs= +gorm.io/driver/mysql v1.5.2/go.mod h1:pQLhh1Ut/WUAySdTHwBpBv6+JKcj+ua4ZFx1QQTBzb8= +gorm.io/driver/postgres v1.5.4 h1:Iyrp9Meh3GmbSuyIAGyjkN+n9K+GHX9b9MqsTL4EJCo= +gorm.io/driver/postgres v1.5.4/go.mod h1:Bgo89+h0CRcdA33Y6frlaHHVuTdOf87pmyzwW9C/BH0= +gorm.io/driver/sqlite v1.5.4 h1:IqXwXi8M/ZlPzH/947tn5uik3aYQslP9BVveoax0nV0= +gorm.io/driver/sqlite v1.5.4/go.mod h1:qxAuCol+2r6PannQDpOP1FP6ag3mKi4esLnB/jHed+4= +gorm.io/gorm v1.23.8/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk= +gorm.io/gorm v1.25.2-0.20230530020048-26663ab9bf55/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k= +gorm.io/gorm v1.25.2/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k= +gorm.io/gorm v1.25.5 h1:zR9lOiiYf09VNh5Q1gphfyia1JpiClIWG9hQaxB/mls= +gorm.io/gorm v1.25.5/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= +gorm.io/plugin/dbresolver v1.5.1 h1:s9Dj9f7r+1rE3nx/Ywzc85nXptUEaeOO0pt27xdopM8= +gorm.io/plugin/dbresolver v1.5.1/go.mod h1:l4Cn87EHLEYuqUncpEeTC2tTJQkjngPSD+lo8hIvcT0= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= diff --git a/_13_sponge-dtm-cache/http/internal/cache/stock.go b/_13_sponge-dtm-cache/http/internal/cache/stock.go new file mode 100644 index 0000000..2e56567 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/cache/stock.go @@ -0,0 +1,149 @@ +package cache + +import ( + "context" + "strings" + "time" + + "github.com/zhufuyi/sponge/pkg/cache" + "github.com/zhufuyi/sponge/pkg/encoding" + "github.com/zhufuyi/sponge/pkg/utils" + + "stock/internal/model" +) + +const ( + // cache prefix key, must end with a colon + stockCachePrefixKey = "stock:" + // StockExpireTime expire time + StockExpireTime = 5 * time.Minute +) + +var _ StockCache = (*stockCache)(nil) + +// StockCache cache interface +type StockCache interface { + Set(ctx context.Context, id uint64, data *model.Stock, duration time.Duration) error + Get(ctx context.Context, id uint64) (*model.Stock, error) + MultiGet(ctx context.Context, ids []uint64) (map[uint64]*model.Stock, error) + MultiSet(ctx context.Context, data []*model.Stock, duration time.Duration) error + Del(ctx context.Context, id uint64) error + SetCacheWithNotFound(ctx context.Context, id uint64) error +} + +// stockCache define a cache struct +type stockCache struct { + cache cache.Cache +} + +// NewStockCache new a cache +func NewStockCache(cacheType *model.CacheType) StockCache { + jsonEncoding := encoding.JSONEncoding{} + cachePrefix := "" + + cType := strings.ToLower(cacheType.CType) + switch cType { + case "redis": + c := cache.NewRedisCache(cacheType.Rdb, cachePrefix, jsonEncoding, func() interface{} { + return &model.Stock{} + }) + return &stockCache{cache: c} + case "memory": + c := cache.NewMemoryCache(cachePrefix, jsonEncoding, func() interface{} { + return &model.Stock{} + }) + return &stockCache{cache: c} + } + + return nil // no cache +} + +// GetStockCacheKey cache key +func (c *stockCache) GetStockCacheKey(id uint64) string { + return stockCachePrefixKey + utils.Uint64ToStr(id) +} + +// Set write to cache +func (c *stockCache) Set(ctx context.Context, id uint64, data *model.Stock, duration time.Duration) error { + if data == nil || id == 0 { + return nil + } + cacheKey := c.GetStockCacheKey(id) + err := c.cache.Set(ctx, cacheKey, data, duration) + if err != nil { + return err + } + return nil +} + +// Get cache value +func (c *stockCache) Get(ctx context.Context, id uint64) (*model.Stock, error) { + var data *model.Stock + cacheKey := c.GetStockCacheKey(id) + err := c.cache.Get(ctx, cacheKey, &data) + if err != nil { + return nil, err + } + return data, nil +} + +// MultiSet multiple set cache +func (c *stockCache) MultiSet(ctx context.Context, data []*model.Stock, duration time.Duration) error { + valMap := make(map[string]interface{}) + for _, v := range data { + cacheKey := c.GetStockCacheKey(v.ID) + valMap[cacheKey] = v + } + + err := c.cache.MultiSet(ctx, valMap, duration) + if err != nil { + return err + } + + return nil +} + +// MultiGet multiple get cache, return key in map is id value +func (c *stockCache) MultiGet(ctx context.Context, ids []uint64) (map[uint64]*model.Stock, error) { + var keys []string + for _, v := range ids { + cacheKey := c.GetStockCacheKey(v) + keys = append(keys, cacheKey) + } + + itemMap := make(map[string]*model.Stock) + err := c.cache.MultiGet(ctx, keys, itemMap) + if err != nil { + return nil, err + } + + retMap := make(map[uint64]*model.Stock) + for _, id := range ids { + val, ok := itemMap[c.GetStockCacheKey(id)] + if ok { + retMap[id] = val + } + } + + return retMap, nil +} + +// Del delete cache +func (c *stockCache) Del(ctx context.Context, id uint64) error { + cacheKey := c.GetStockCacheKey(id) + err := c.cache.Del(ctx, cacheKey) + if err != nil { + return err + } + return nil +} + +// SetCacheWithNotFound set empty cache +func (c *stockCache) SetCacheWithNotFound(ctx context.Context, id uint64) error { + cacheKey := c.GetStockCacheKey(id) + err := c.cache.SetCacheWithNotFound(ctx, cacheKey) + if err != nil { + return err + } + return nil +} diff --git a/_13_sponge-dtm-cache/http/internal/cache/stock_test.go b/_13_sponge-dtm-cache/http/internal/cache/stock_test.go new file mode 100644 index 0000000..f280608 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/cache/stock_test.go @@ -0,0 +1,144 @@ +package cache + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/zhufuyi/sponge/pkg/gotest" + "github.com/zhufuyi/sponge/pkg/utils" + + "stock/internal/model" +) + +func newStockCache() *gotest.Cache { + record1 := &model.Stock{} + record1.ID = 1 + record2 := &model.Stock{} + record2.ID = 2 + testData := map[string]interface{}{ + utils.Uint64ToStr(record1.ID): record1, + utils.Uint64ToStr(record2.ID): record2, + } + + c := gotest.NewCache(testData) + c.ICache = NewStockCache(&model.CacheType{ + CType: "redis", + Rdb: c.RedisClient, + }) + return c +} + +func Test_stockCache_Set(t *testing.T) { + c := newStockCache() + defer c.Close() + + record := c.TestDataSlice[0].(*model.Stock) + err := c.ICache.(StockCache).Set(c.Ctx, record.ID, record, time.Hour) + if err != nil { + t.Fatal(err) + } + + // nil data + err = c.ICache.(StockCache).Set(c.Ctx, 0, nil, time.Hour) + assert.NoError(t, err) +} + +func Test_stockCache_Get(t *testing.T) { + c := newStockCache() + defer c.Close() + + record := c.TestDataSlice[0].(*model.Stock) + err := c.ICache.(StockCache).Set(c.Ctx, record.ID, record, time.Hour) + if err != nil { + t.Fatal(err) + } + + got, err := c.ICache.(StockCache).Get(c.Ctx, record.ID) + if err != nil { + t.Fatal(err) + } + assert.Equal(t, record, got) + + // zero key error + _, err = c.ICache.(StockCache).Get(c.Ctx, 0) + assert.Error(t, err) +} + +func Test_stockCache_MultiGet(t *testing.T) { + c := newStockCache() + defer c.Close() + + var testData []*model.Stock + for _, data := range c.TestDataSlice { + testData = append(testData, data.(*model.Stock)) + } + + err := c.ICache.(StockCache).MultiSet(c.Ctx, testData, time.Hour) + if err != nil { + t.Fatal(err) + } + + got, err := c.ICache.(StockCache).MultiGet(c.Ctx, c.GetIDs()) + if err != nil { + t.Fatal(err) + } + + expected := c.GetTestData() + for k, v := range expected { + assert.Equal(t, got[utils.StrToUint64(k)], v.(*model.Stock)) + } +} + +func Test_stockCache_MultiSet(t *testing.T) { + c := newStockCache() + defer c.Close() + + var testData []*model.Stock + for _, data := range c.TestDataSlice { + testData = append(testData, data.(*model.Stock)) + } + + err := c.ICache.(StockCache).MultiSet(c.Ctx, testData, time.Hour) + if err != nil { + t.Fatal(err) + } +} + +func Test_stockCache_Del(t *testing.T) { + c := newStockCache() + defer c.Close() + + record := c.TestDataSlice[0].(*model.Stock) + err := c.ICache.(StockCache).Del(c.Ctx, record.ID) + if err != nil { + t.Fatal(err) + } +} + +func Test_stockCache_SetCacheWithNotFound(t *testing.T) { + c := newStockCache() + defer c.Close() + + record := c.TestDataSlice[0].(*model.Stock) + err := c.ICache.(StockCache).SetCacheWithNotFound(c.Ctx, record.ID) + if err != nil { + t.Fatal(err) + } +} + +func TestNewStockCache(t *testing.T) { + c := NewStockCache(&model.CacheType{ + CType: "", + }) + assert.Nil(t, c) + c = NewStockCache(&model.CacheType{ + CType: "memory", + }) + assert.NotNil(t, c) + c = NewStockCache(&model.CacheType{ + CType: "redis", + }) + assert.NotNil(t, c) +} diff --git a/b_sponge-dtm-msg/internal/config/transfer.go b/_13_sponge-dtm-cache/http/internal/config/stock.go similarity index 54% rename from b_sponge-dtm-msg/internal/config/transfer.go rename to _13_sponge-dtm-cache/http/internal/config/stock.go index a11c66b..9160c1d 100644 --- a/b_sponge-dtm-msg/internal/config/transfer.go +++ b/_13_sponge-dtm-cache/http/internal/config/stock.go @@ -19,7 +19,7 @@ func Show(hiddenFields ...string) string { func Get() *Config { if config == nil { - panic("config is nil") + panic("config is nil, please call config.Init() first") } return config } @@ -29,14 +29,16 @@ func Set(conf *Config) { } type Config struct { - App App `yaml:"app" json:"app"` - Consul Consul `yaml:"consul" json:"consul"` - Etcd Etcd `yaml:"etcd" json:"etcd"` - Grpc Grpc `yaml:"grpc" json:"grpc"` - GrpcClient []GrpcClient `yaml:"grpcClient" json:"grpcClient"` - Jaeger Jaeger `yaml:"jaeger" json:"jaeger"` - Logger Logger `yaml:"logger" json:"logger"` - NacosRd NacosRd `yaml:"nacosRd" json:"nacosRd"` + App App `yaml:"app" json:"app"` + Consul Consul `yaml:"consul" json:"consul"` + Database Database `yaml:"database" json:"database"` + Dtm Dtm `yaml:"dtm" json:"dtm"` + Etcd Etcd `yaml:"etcd" json:"etcd"` + HTTP HTTP `yaml:"http" json:"http"` + Jaeger Jaeger `yaml:"jaeger" json:"jaeger"` + Logger Logger `yaml:"logger" json:"logger"` + NacosRd NacosRd `yaml:"nacosRd" json:"nacosRd"` + Redis Redis `yaml:"redis" json:"redis"` } type Consul struct { @@ -52,13 +54,6 @@ type Jaeger struct { AgentPort int `yaml:"agentPort" json:"agentPort"` } -type ServerSecure struct { - CaFile string `yaml:"caFile" json:"caFile"` - CertFile string `yaml:"certFile" json:"certFile"` - KeyFile string `yaml:"keyFile" json:"keyFile"` - Type string `yaml:"type" json:"type"` -} - type App struct { CacheType string `yaml:"cacheType" json:"cacheType"` EnableCircuitBreaker bool `yaml:"enableCircuitBreaker" json:"enableCircuitBreaker"` @@ -75,21 +70,29 @@ type App struct { Version string `yaml:"version" json:"version"` } -type GrpcClient struct { - EnableLoadBalance bool `yaml:"enableLoadBalance" json:"enableLoadBalance"` - Host string `yaml:"host" json:"host"` - Name string `yaml:"name" json:"name"` - Port int `yaml:"port" json:"port"` - RegistryDiscoveryType string `yaml:"registryDiscoveryType" json:"registryDiscoveryType"` +type Dtm struct { + CallbackAddr CallbackAddr `yaml:"callbackAddr" json:"callbackAddr"` + Server string `yaml:"server" json:"server"` +} + +type Mysql struct { + ConnMaxLifetime int `yaml:"connMaxLifetime" json:"connMaxLifetime"` + Dsn string `yaml:"dsn" json:"dsn"` + EnableLog bool `yaml:"enableLog" json:"enableLog"` + MaxIdleConns int `yaml:"maxIdleConns" json:"maxIdleConns"` + MaxOpenConns int `yaml:"maxOpenConns" json:"maxOpenConns"` +} + +type Redis struct { + DialTimeout int `yaml:"dialTimeout" json:"dialTimeout"` + Dsn string `yaml:"dsn" json:"dsn"` + ReadTimeout int `yaml:"readTimeout" json:"readTimeout"` + WriteTimeout int `yaml:"writeTimeout" json:"writeTimeout"` } -type Grpc struct { - EnableToken bool `yaml:"enableToken" json:"enableToken"` - HTTPPort int `yaml:"httpPort" json:"httpPort"` - Port int `yaml:"port" json:"port"` - ReadTimeout int `yaml:"readTimeout" json:"readTimeout"` - ServerSecure ServerSecure `yaml:"serverSecure" json:"serverSecure"` - WriteTimeout int `yaml:"writeTimeout" json:"writeTimeout"` +type Database struct { + Driver string `yaml:"driver" json:"driver"` + Mysql Mysql `yaml:"mysql" json:"mysql"` } type Logger struct { @@ -98,8 +101,23 @@ type Logger struct { Level string `yaml:"level" json:"level"` } +type Stock struct { + Host string `yaml:"host" json:"host"` + Port int `yaml:"port" json:"port"` + Scheme string `yaml:"scheme" json:"scheme"` +} + type NacosRd struct { IPAddr string `yaml:"ipAddr" json:"ipAddr"` NamespaceID string `yaml:"namespaceID" json:"namespaceID"` Port int `yaml:"port" json:"port"` } + +type HTTP struct { + Port int `yaml:"port" json:"port"` + Timeout int `yaml:"timeout" json:"timeout"` +} + +type CallbackAddr struct { + Stock Stock `yaml:"stock" json:"stock"` +} diff --git a/_13_sponge-dtm-cache/http/internal/config/stock_cc.go b/_13_sponge-dtm-cache/http/internal/config/stock_cc.go new file mode 100644 index 0000000..59e4c88 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/config/stock_cc.go @@ -0,0 +1,28 @@ +// code generated by https://github.com/zhufuyi/sponge + +package config + +import ( + "github.com/zhufuyi/sponge/pkg/conf" +) + +func NewCenter(configFile string) (*Center, error) { + nacosConf := &Center{} + err := conf.Parse(configFile, nacosConf) + return nacosConf, err +} + +type Center struct { + Nacos Nacos `yaml:"nacos" json:"nacos"` +} + +type Nacos struct { + ContextPath string `yaml:"contextPath" json:"contextPath"` + DataID string `yaml:"dataID" json:"dataID"` + Format string `yaml:"format" json:"format"` + Group string `yaml:"group" json:"group"` + IPAddr string `yaml:"ipAddr" json:"ipAddr"` + NamespaceID string `yaml:"namespaceID" json:"namespaceID"` + Port int `yaml:"port" json:"port"` + Scheme string `yaml:"scheme" json:"scheme"` +} diff --git a/b_sponge-dtm-msg/internal/config/transfer_test.go b/_13_sponge-dtm-cache/http/internal/config/stock_test.go similarity index 59% rename from b_sponge-dtm-msg/internal/config/transfer_test.go rename to _13_sponge-dtm-cache/http/internal/config/stock_test.go index 308610d..bba3f97 100644 --- a/b_sponge-dtm-msg/internal/config/transfer_test.go +++ b/_13_sponge-dtm-cache/http/internal/config/stock_test.go @@ -7,11 +7,11 @@ import ( "github.com/zhufuyi/sponge/pkg/gofile" - "transfer/configs" + "stock/configs" ) func TestInit(t *testing.T) { - configFile := configs.Path("transfer.yml") + configFile := configs.Path("stock.yml") err := Init(configFile) if gofile.IsExists(configFile) { assert.NoError(t, err) @@ -24,6 +24,7 @@ func TestInit(t *testing.T) { str := Show() assert.NotEmpty(t, str) + t.Log(str) // set nil Set(nil) @@ -32,3 +33,13 @@ func TestInit(t *testing.T) { }() Get() } + +func TestInitNacos(t *testing.T) { + configFile := configs.Path("stock_cc.yml") + _, err := NewCenter(configFile) + if gofile.IsExists(configFile) { + assert.NoError(t, err) + } else { + assert.Error(t, err) + } +} diff --git a/_13_sponge-dtm-cache/http/internal/dao/stock.go b/_13_sponge-dtm-cache/http/internal/dao/stock.go new file mode 100644 index 0000000..d20cdd7 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/dao/stock.go @@ -0,0 +1,272 @@ +package dao + +import ( + "context" + "database/sql" + "errors" + "fmt" + "github.com/zhufuyi/sponge/pkg/logger" + "time" + + "golang.org/x/sync/singleflight" + "gorm.io/gorm" + + cacheBase "github.com/zhufuyi/sponge/pkg/cache" + "github.com/zhufuyi/sponge/pkg/ggorm/query" + "github.com/zhufuyi/sponge/pkg/utils" + + "stock/internal/cache" + "stock/internal/model" +) + +var _ StockDao = (*stockDao)(nil) + +// StockDao defining the dao interface +type StockDao interface { + Create(ctx context.Context, table *model.Stock) error + DeleteByID(ctx context.Context, id uint64) error + UpdateByID(ctx context.Context, table *model.Stock) error + GetByID(ctx context.Context, id uint64) (*model.Stock, error) + GetByColumns(ctx context.Context, params *query.Params) ([]*model.Stock, int64, error) + + CreateByTx(ctx context.Context, tx *gorm.DB, table *model.Stock) (uint64, error) + DeleteByTx(ctx context.Context, tx *gorm.DB, id uint64) error + UpdateByTx(ctx context.Context, tx *gorm.DB, table *model.Stock) error +} + +type stockDao struct { + db *gorm.DB + cache cache.StockCache // if nil, the cache is not used. + sfg *singleflight.Group // if cache is nil, the sfg is not used. +} + +// NewStockDao creating the dao interface +func NewStockDao(db *gorm.DB, xCache cache.StockCache) StockDao { + if xCache == nil { + return &stockDao{db: db} + } + return &stockDao{ + db: db, + cache: xCache, + sfg: new(singleflight.Group), + } +} + +func (d *stockDao) deleteCache(ctx context.Context, id uint64) error { + if d.cache != nil { + return d.cache.Del(ctx, id) + } + return nil +} + +// Create a record, insert the record and the id value is written back to the table +func (d *stockDao) Create(ctx context.Context, table *model.Stock) error { + return d.db.WithContext(ctx).Create(table).Error +} + +// DeleteByID delete a record by id +func (d *stockDao) DeleteByID(ctx context.Context, id uint64) error { + err := d.db.WithContext(ctx).Where("id = ?", id).Delete(&model.Stock{}).Error + if err != nil { + return err + } + + // delete cache + _ = d.deleteCache(ctx, id) + + return nil +} + +// UpdateByID update a record by id +func (d *stockDao) UpdateByID(ctx context.Context, table *model.Stock) error { + err := d.updateDataByID(ctx, d.db, table) + + // delete cache + _ = d.deleteCache(ctx, table.ID) + + return err +} + +func (d *stockDao) updateDataByID(ctx context.Context, db *gorm.DB, table *model.Stock) error { + if table.ID < 1 { + return errors.New("id cannot be 0") + } + + update := map[string]interface{}{} + + if table.ProductID != 0 { + update["product_id"] = table.ProductID + } + if table.Stock != 0 { + update["stock"] = table.Stock + } + + return db.WithContext(ctx).Model(table).Updates(update).Error +} + +// GetByID get a record by id +func (d *stockDao) GetByID(ctx context.Context, id uint64) (*model.Stock, error) { + // no cache + if d.cache == nil { + record := &model.Stock{} + err := d.db.WithContext(ctx).Where("id = ?", id).First(record).Error + return record, err + } + + // get from cache or database + record, err := d.cache.Get(ctx, id) + if err == nil { + return record, nil + } + + if errors.Is(err, model.ErrCacheNotFound) { + // for the same id, prevent high concurrent simultaneous access to database + val, err, _ := d.sfg.Do(utils.Uint64ToStr(id), func() (interface{}, error) { //nolint + table := &model.Stock{} + err = d.db.WithContext(ctx).Where("id = ?", id).First(table).Error + if err != nil { + // if data is empty, set not found cache to prevent cache penetration, default expiration time 10 minutes + if errors.Is(err, model.ErrRecordNotFound) { + err = d.cache.SetCacheWithNotFound(ctx, id) + if err != nil { + return nil, err + } + return nil, model.ErrRecordNotFound + } + return nil, err + } + // set cache + err = d.cache.Set(ctx, id, table, cache.StockExpireTime) + if err != nil { + return nil, fmt.Errorf("cache.Set error: %v, id=%d", err, id) + } + return table, nil + }) + if err != nil { + return nil, err + } + table, ok := val.(*model.Stock) + if !ok { + return nil, model.ErrRecordNotFound + } + return table, nil + } else if errors.Is(err, cacheBase.ErrPlaceholder) { + return nil, model.ErrRecordNotFound + } + + // fail fast, if cache error return, don't request to db + return nil, err +} + +// GetByColumns get paging records by column information, +// Note: query performance degrades when table rows are very large because of the use of offset. +// +// params includes paging parameters and query parameters +// paging parameters (required): +// +// page: page number, starting from 0 +// limit: lines per page +// sort: sort fields, default is id backwards, you can add - sign before the field to indicate reverse order, no - sign to indicate ascending order, multiple fields separated by comma +// +// query parameters (not required): +// +// name: column name +// exp: expressions, which default is "=", support =, !=, >, >=, <, <=, like, in +// value: column value, if exp=in, multiple values are separated by commas +// logic: logical type, defaults to and when value is null, only &(and), ||(or) +// +// example: search for a male over 20 years of age +// +// params = &query.Params{ +// Page: 0, +// Limit: 20, +// Columns: []query.Column{ +// { +// Name: "age", +// Exp: ">", +// Value: 20, +// }, +// { +// Name: "gender", +// Value: "male", +// }, +// } +func (d *stockDao) GetByColumns(ctx context.Context, params *query.Params) ([]*model.Stock, int64, error) { + queryStr, args, err := params.ConvertToGormConditions() + if err != nil { + return nil, 0, errors.New("query params error: " + err.Error()) + } + + var total int64 + if params.Sort != "ignore count" { // determine if count is required + err = d.db.WithContext(ctx).Model(&model.Stock{}).Select([]string{"id"}).Where(queryStr, args...).Count(&total).Error + if err != nil { + return nil, 0, err + } + if total == 0 { + return nil, total, nil + } + } + + records := []*model.Stock{} + order, limit, offset := params.ConvertToPage() + err = d.db.WithContext(ctx).Order(order).Limit(limit).Offset(offset).Where(queryStr, args...).Find(&records).Error + if err != nil { + return nil, 0, err + } + + return records, total, err +} + +// CreateByTx create a record in the database using the provided transaction +func (d *stockDao) CreateByTx(ctx context.Context, tx *gorm.DB, table *model.Stock) (uint64, error) { + err := tx.WithContext(ctx).Create(table).Error + return table.ID, err +} + +// DeleteByTx delete a record by id in the database using the provided transaction +func (d *stockDao) DeleteByTx(ctx context.Context, tx *gorm.DB, id uint64) error { + err := tx.WithContext(ctx).Where("id = ?", id).Delete(&model.Stock{}).Error + if err != nil { + return err + } + + // delete cache + _ = d.deleteCache(ctx, id) + + return nil +} + +// UpdateByTx update a record by id in the database using the provided transaction +func (d *stockDao) UpdateByTx(ctx context.Context, tx *gorm.DB, table *model.Stock) error { + err := d.updateDataByID(ctx, tx, table) + + // delete cache + _ = d.deleteCache(ctx, table.ID) + + return err +} + +// ------------------------------------------------------------------------------------------ + +// UpdateStockInTx update the stock of a record +func UpdateStockInTx(tx *sql.Tx, table *model.Stock) error { + sqlStr := "update stock set stock=?, updated_at=? where id=?" + result, err := tx.Exec(sqlStr, table.Stock, time.Now(), table.ID) + rowCount, _ := result.RowsAffected() + logger.Info("[mysql] info", logger.String("sql", sqlStr), logger.Any("args", []interface{}{table.Stock, time.Now(), table.ID}), logger.Int64("rows", rowCount)) + return err +} + +func GetStockByID(db *sql.DB, id uint64) (string, error) { + sqlStr := "select stock from stock where id=?" + row := db.QueryRow(sqlStr, id) + + var stock string + err := row.Scan(&stock) + if err != nil { + return "", err + } + logger.Info("[mysql] info", logger.String("sql", sqlStr), logger.Any("args", []interface{}{id})) + return stock, nil +} diff --git a/_13_sponge-dtm-cache/http/internal/dao/stock_test.go b/_13_sponge-dtm-cache/http/internal/dao/stock_test.go new file mode 100644 index 0000000..9690630 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/dao/stock_test.go @@ -0,0 +1,232 @@ +package dao + +import ( + "context" + "testing" + + "github.com/DATA-DOG/go-sqlmock" + "github.com/stretchr/testify/assert" + "github.com/zhufuyi/sponge/pkg/ggorm/query" + "github.com/zhufuyi/sponge/pkg/gotest" + "github.com/zhufuyi/sponge/pkg/utils" + + "stock/internal/cache" + "stock/internal/model" +) + +func newStockDao() *gotest.Dao { + testData := &model.Stock{} + testData.ID = 1 + // you can set the other fields of testData here, such as: + //testData.CreatedAt = time.Now() + //testData.UpdatedAt = testData.CreatedAt + + // init mock cache + //c := gotest.NewCache(map[string]interface{}{"no cache": testData}) // to test mysql, disable caching + c := gotest.NewCache(map[string]interface{}{utils.Uint64ToStr(testData.ID): testData}) + c.ICache = cache.NewStockCache(&model.CacheType{ + CType: "redis", + Rdb: c.RedisClient, + }) + + // init mock dao + d := gotest.NewDao(c, testData) + d.IDao = NewStockDao(d.DB, c.ICache.(cache.StockCache)) + + return d +} + +func Test_stockDao_Create(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec("INSERT INTO .*"). + WithArgs(d.GetAnyArgs(testData)...). + WillReturnResult(sqlmock.NewResult(1, 1)) + d.SQLMock.ExpectCommit() + + err := d.IDao.(StockDao).Create(d.Ctx, testData) + if err != nil { + t.Fatal(err) + } +} + +func Test_stockDao_DeleteByID(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + expectedSQLForDeletion := "DELETE .*" + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec(expectedSQLForDeletion). + WithArgs(testData.ID). + WillReturnResult(sqlmock.NewResult(int64(testData.ID), 1)) + d.SQLMock.ExpectCommit() + + err := d.IDao.(StockDao).DeleteByID(d.Ctx, testData.ID) + if err != nil { + t.Fatal(err) + } + + // zero id error + err = d.IDao.(StockDao).DeleteByID(d.Ctx, 0) + assert.Error(t, err) +} + +func Test_stockDao_UpdateByID(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec("UPDATE .*"). + WithArgs(d.AnyTime, testData.ID). + WillReturnResult(sqlmock.NewResult(1, 1)) + d.SQLMock.ExpectCommit() + + err := d.IDao.(StockDao).UpdateByID(d.Ctx, testData) + if err != nil { + t.Fatal(err) + } + + // zero id error + err = d.IDao.(StockDao).UpdateByID(d.Ctx, &model.Stock{}) + assert.Error(t, err) + +} + +func Test_stockDao_GetByID(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + // column names and corresponding data + rows := sqlmock.NewRows([]string{"id"}). + AddRow(testData.ID) + + d.SQLMock.ExpectQuery("SELECT .*"). + WithArgs(testData.ID). + WillReturnRows(rows) + + _, err := d.IDao.(StockDao).GetByID(d.Ctx, testData.ID) + if err != nil { + t.Fatal(err) + } + + err = d.SQLMock.ExpectationsWereMet() + if err != nil { + t.Fatal(err) + } + + // notfound error + d.SQLMock.ExpectQuery("SELECT .*"). + WithArgs(2). + WillReturnRows(rows) + _, err = d.IDao.(StockDao).GetByID(d.Ctx, 2) + assert.Error(t, err) + + d.SQLMock.ExpectQuery("SELECT .*"). + WithArgs(3, 4). + WillReturnRows(rows) + _, err = d.IDao.(StockDao).GetByID(d.Ctx, 4) + assert.Error(t, err) +} + +func Test_stockDao_GetByColumns(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + // column names and corresponding data + rows := sqlmock.NewRows([]string{"id"}). + AddRow(testData.ID) + + d.SQLMock.ExpectQuery("SELECT .*").WillReturnRows(rows) + + _, _, err := d.IDao.(StockDao).GetByColumns(d.Ctx, &query.Params{ + Page: 0, + Limit: 10, + Sort: "ignore count", // ignore test count(*) + }) + if err != nil { + t.Fatal(err) + } + + err = d.SQLMock.ExpectationsWereMet() + if err != nil { + t.Fatal(err) + } + + // err test + _, _, err = d.IDao.(StockDao).GetByColumns(d.Ctx, &query.Params{ + Page: 0, + Limit: 10, + Columns: []query.Column{ + { + Name: "id", + Exp: "<", + Value: 0, + }, + }, + }) + assert.Error(t, err) + + // error test + dao := &stockDao{} + _, _, err = dao.GetByColumns(context.Background(), &query.Params{Columns: []query.Column{{}}}) + t.Log(err) +} + +func Test_stockDao_CreateByTx(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec("INSERT INTO .*"). + WithArgs(d.GetAnyArgs(testData)...). + WillReturnResult(sqlmock.NewResult(1, 1)) + d.SQLMock.ExpectCommit() + + _, err := d.IDao.(StockDao).CreateByTx(d.Ctx, d.DB, testData) + if err != nil { + t.Fatal(err) + } +} + +func Test_stockDao_DeleteByTx(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + expectedSQLForDeletion := "DELETE .*" + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec(expectedSQLForDeletion). + WithArgs(testData.ID). + WillReturnResult(sqlmock.NewResult(int64(testData.ID), 1)) + d.SQLMock.ExpectCommit() + + err := d.IDao.(StockDao).DeleteByTx(d.Ctx, d.DB, testData.ID) + if err != nil { + t.Fatal(err) + } +} + +func Test_stockDao_UpdateByTx(t *testing.T) { + d := newStockDao() + defer d.Close() + testData := d.TestData.(*model.Stock) + + d.SQLMock.ExpectBegin() + d.SQLMock.ExpectExec("UPDATE .*"). + WithArgs(d.AnyTime, testData.ID). + WillReturnResult(sqlmock.NewResult(1, 1)) + d.SQLMock.ExpectCommit() + + err := d.IDao.(StockDao).UpdateByTx(d.Ctx, d.DB, testData) + if err != nil { + t.Fatal(err) + } +} diff --git a/_13_sponge-dtm-cache/http/internal/ecode/atomic_http.go b/_13_sponge-dtm-cache/http/internal/ecode/atomic_http.go new file mode 100644 index 0000000..9add88e --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/ecode/atomic_http.go @@ -0,0 +1,20 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// atomic business-level http error codes. +// the atomicNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + atomicNO = 80 + atomicName = "atomic" + atomicBaseCode = errcode.HCode(atomicNO) + + ErrUpdateAtomic = errcode.NewError(atomicBaseCode+1, "failed to Update "+atomicName) + ErrQueryAtomic = errcode.NewError(atomicBaseCode+2, "failed to Query "+atomicName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/_13_sponge-dtm-cache/http/internal/ecode/callback_http.go b/_13_sponge-dtm-cache/http/internal/ecode/callback_http.go new file mode 100644 index 0000000..4058fe1 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/ecode/callback_http.go @@ -0,0 +1,20 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// callback business-level http error codes. +// the callbackNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + callbackNO = 15 + callbackName = "callback" + callbackBaseCode = errcode.HCode(callbackNO) + + ErrQueryPreparedCallback = errcode.NewError(callbackBaseCode+1, "failed to QueryPrepared "+callbackName) + ErrDeleteCacheCallback = errcode.NewError(callbackBaseCode+2, "failed to DeleteCache "+callbackName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/_13_sponge-dtm-cache/http/internal/ecode/downgrade_http.go b/_13_sponge-dtm-cache/http/internal/ecode/downgrade_http.go new file mode 100644 index 0000000..e7b521f --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/ecode/downgrade_http.go @@ -0,0 +1,21 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// downgrade business-level http error codes. +// the downgradeNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + downgradeNO = 3 + downgradeName = "downgrade" + downgradeBaseCode = errcode.HCode(downgradeNO) + + ErrUpdateDowngrade = errcode.NewError(downgradeBaseCode+1, "failed to Update "+downgradeName) + ErrQueryDowngrade = errcode.NewError(downgradeBaseCode+2, "failed to Query "+downgradeName) + ErrDowngradeBranchDowngrade = errcode.NewError(downgradeBaseCode+3, "failed to DowngradeBranch "+downgradeName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/_13_sponge-dtm-cache/http/internal/ecode/final_http.go b/_13_sponge-dtm-cache/http/internal/ecode/final_http.go new file mode 100644 index 0000000..dae71f8 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/ecode/final_http.go @@ -0,0 +1,20 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// final business-level http error codes. +// the finalNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + finalNO = 37 + finalName = "final" + finalBaseCode = errcode.HCode(finalNO) + + ErrUpdateFinal = errcode.NewError(finalBaseCode+1, "failed to Update "+finalName) + ErrQueryFinal = errcode.NewError(finalBaseCode+2, "failed to Query "+finalName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/_13_sponge-dtm-cache/http/internal/ecode/stock_http.go b/_13_sponge-dtm-cache/http/internal/ecode/stock_http.go new file mode 100644 index 0000000..f4c24b7 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/ecode/stock_http.go @@ -0,0 +1,21 @@ +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// stock business-level http error codes. +// the stockNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + stockNO = 62 + stockName = "stock" + stockBaseCode = errcode.HCode(stockNO) + + ErrCreateStock = errcode.NewError(stockBaseCode+1, "failed to create "+stockName) + ErrDeleteByIDStock = errcode.NewError(stockBaseCode+2, "failed to delete "+stockName) + ErrUpdateByIDStock = errcode.NewError(stockBaseCode+3, "failed to update "+stockName) + ErrGetByIDStock = errcode.NewError(stockBaseCode+4, "failed to get "+stockName+" details") + ErrListStock = errcode.NewError(stockBaseCode+5, "failed to list of "+stockName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/_13_sponge-dtm-cache/http/internal/ecode/strong_http.go b/_13_sponge-dtm-cache/http/internal/ecode/strong_http.go new file mode 100644 index 0000000..90bf030 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/ecode/strong_http.go @@ -0,0 +1,20 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// strong business-level http error codes. +// the strongNO value range is 1~100, if the same error code is used, it will cause panic. +var ( + strongNO = 64 + strongName = "strong" + strongBaseCode = errcode.HCode(strongNO) + + ErrUpdateStrong = errcode.NewError(strongBaseCode+1, "failed to Update "+strongName) + ErrQueryStrong = errcode.NewError(strongBaseCode+2, "failed to Query "+strongName) + + // error codes are globally unique, adding 1 to the previous error code +) diff --git a/_13_sponge-dtm-cache/http/internal/ecode/systemCode_http.go b/_13_sponge-dtm-cache/http/internal/ecode/systemCode_http.go new file mode 100644 index 0000000..d5f749e --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/ecode/systemCode_http.go @@ -0,0 +1,39 @@ +// Package ecode is the package that unifies the definition of http error codes or grpc error codes here. +package ecode + +import ( + "github.com/zhufuyi/sponge/pkg/errcode" +) + +// http system level error code, error code range 10000~20000 +var ( + Success = errcode.Success + + InvalidParams = errcode.InvalidParams + Unauthorized = errcode.Unauthorized + InternalServerError = errcode.InternalServerError + NotFound = errcode.NotFound + Timeout = errcode.Timeout + TooManyRequests = errcode.TooManyRequests + Forbidden = errcode.Forbidden + LimitExceed = errcode.LimitExceed + Conflict = errcode.Conflict + TooEarly = errcode.TooEarly + + DeadlineExceeded = errcode.DeadlineExceeded + AccessDenied = errcode.AccessDenied + MethodNotAllowed = errcode.MethodNotAllowed + ServiceUnavailable = errcode.ServiceUnavailable + + Canceled = errcode.Canceled + Unknown = errcode.Unknown + PermissionDenied = errcode.PermissionDenied + ResourceExhausted = errcode.ResourceExhausted + FailedPrecondition = errcode.FailedPrecondition + Aborted = errcode.Aborted + OutOfRange = errcode.OutOfRange + Unimplemented = errcode.Unimplemented + DataLoss = errcode.DataLoss +) + +var SkipResponse = errcode.SkipResponse diff --git a/_13_sponge-dtm-cache/http/internal/handler/atomic.go b/_13_sponge-dtm-cache/http/internal/handler/atomic.go new file mode 100644 index 0000000..ebe76a4 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/handler/atomic.go @@ -0,0 +1,103 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package handler + +import ( + "context" + "database/sql" + "errors" + "time" + + "github.com/dtm-labs/dtmcli" + "github.com/dtm-labs/rockscache" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + + stockV1 "stock/api/stock/v1" + "stock/internal/config" + "stock/internal/dao" + "stock/internal/ecode" + "stock/internal/model" +) + +var _ stockV1.AtomicLogicer = (*atomicHandler)(nil) + +type atomicHandler struct { + db *sql.DB + cacheClient *rockscache.Client +} + +// NewAtomicHandler create a handler +func NewAtomicHandler() stockV1.AtomicLogicer { + return &atomicHandler{ + db: model.GetSDB(), + cacheClient: model.GetRockscacheClient(), + } +} + +// Update 更新数据,保证DB与缓存操作的原子性。 +func (h *atomicHandler) Update(ctx context.Context, req *stockV1.UpdateAtomicRequest) (*stockV1.UpdateAtomicRequestReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + gid := newGid() + callbackStockAddr := getCallbackStockAddr() + queryPreparedURL := callbackStockAddr + "/api/v1/stock/queryPrepared" + deleteCacheURL := callbackStockAddr + "/api/v1/stock/deleteCache" + deleteCacheBody := &stockV1.DeleteCacheRequest{ + Key: getStockCacheKey(req.Id), + } + stock := &model.Stock{ + ID: req.Id, + Stock: uint(req.Stock), + } + + msg := dtmcli.NewMsg(config.Get().Dtm.Server, gid) + msg.Add(deleteCacheURL, deleteCacheBody) + msg.TimeoutToFail = 3 + err = msg.DoAndSubmit(queryPreparedURL, func(bb *dtmcli.BranchBarrier) error { + return bb.CallWithDB(h.db, func(tx *sql.Tx) error { + return dao.UpdateStockInTx(tx, stock) + }) + }) + if err != nil { + logger.Warn("msg.DoAndSubmit error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + logger.Info("更新数据,DB与缓存操作的原子性", logger.Any("dtm gid", gid)) + + return &stockV1.UpdateAtomicRequestReply{}, nil +} + +// Query 查询 +func (h *atomicHandler) Query(ctx context.Context, req *stockV1.QueryAtomicRequest) (*stockV1.QueryAtomicReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InvalidParams.Err() + } + + key := getStockCacheKey(req.Id) + query := func() (string, error) { + return dao.GetStockByID(h.db, req.Id) + } + + value, err := h.cacheClient.Fetch(key, 300*time.Second, query) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ecode.NotFound.Err() + } + logger.Warn("fetch cache error", logger.Err(err), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InternalServerError.Err() + } + + return &stockV1.QueryAtomicReply{ + Stock: utils.StrToUint32(value), + }, nil +} diff --git a/_13_sponge-dtm-cache/http/internal/handler/callback.go b/_13_sponge-dtm-cache/http/internal/handler/callback.go new file mode 100644 index 0000000..0e6ada4 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/handler/callback.go @@ -0,0 +1,93 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package handler + +import ( + "context" + "database/sql" + "errors" + "github.com/dtm-labs/dtmcli" + "github.com/dtm-labs/rockscache" + "strconv" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/krand" + "github.com/zhufuyi/sponge/pkg/logger" + + stockV1 "stock/api/stock/v1" + "stock/internal/config" + "stock/internal/ecode" + "stock/internal/model" +) + +var _ stockV1.CallbackLogicer = (*callbackHandler)(nil) + +type callbackHandler struct { + db *sql.DB + cacheClient *rockscache.Client +} + +// NewCallbackHandler create a handler +func NewCallbackHandler() stockV1.CallbackLogicer { + return &callbackHandler{ + db: model.GetSDB(), + cacheClient: model.GetRockscacheClient(), + } +} + +// QueryPrepared 反查数据 +func (h *callbackHandler) QueryPrepared(ctx context.Context, req *stockV1.QueryPreparedRequest) (*stockV1.QueryPreparedReply, error) { + c, ctx := middleware.AdaptCtx(ctx) + bb, err := dtmcli.BarrierFromQuery(c.Request.URL.Query()) + if err != nil { + return nil, adaptErr(err) + } + + err = bb.QueryPrepared(h.db) + + return &stockV1.QueryPreparedReply{}, adaptErr(err) +} + +// DeleteCache 删除缓存 +func (h *callbackHandler) DeleteCache(ctx context.Context, req *stockV1.DeleteCacheRequest) (*stockV1.DeleteCacheReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + err = h.cacheClient.TagAsDeleted(req.Key) + + return &stockV1.DeleteCacheReply{}, adaptErr(err) +} + +func adaptErr(err error) error { + if err == nil { + return nil + } + + if errors.Is(err, dtmcli.ErrFailure) { + return ecode.Conflict.ErrToHTTP() + } else if errors.Is(err, dtmcli.ErrOngoing) { + return ecode.TooEarly.ErrToHTTP() + } + + return ecode.InternalServerError.ErrToHTTP() +} + +func newGid() string { + return krand.NewSeriesID() +} + +func getCallbackStockAddr() string { + port := config.Get().HTTP.Port + stockCfg := config.Get().Dtm.CallbackAddr.Stock + if stockCfg.Port != 0 { + port = stockCfg.Port + } + return stockCfg.Scheme + "://" + stockCfg.Host + ":" + strconv.Itoa(port) +} + +func getStockCacheKey(id uint64) string { + return "stock:" + strconv.FormatUint(id, 10) +} diff --git a/_13_sponge-dtm-cache/http/internal/handler/downgrade.go b/_13_sponge-dtm-cache/http/internal/handler/downgrade.go new file mode 100644 index 0000000..f609dc5 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/handler/downgrade.go @@ -0,0 +1,143 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package handler + +import ( + "context" + "database/sql" + "errors" + "time" + + "github.com/dtm-labs/dtmcli" + "github.com/dtm-labs/rockscache" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + + stockV1 "stock/api/stock/v1" + "stock/internal/config" + "stock/internal/dao" + "stock/internal/ecode" + "stock/internal/model" +) + +var _ stockV1.DowngradeLogicer = (*downgradeHandler)(nil) + +type downgradeHandler struct { + db *sql.DB + strongCacheClient *rockscache.Client +} + +// NewDowngradeHandler create a handler +func NewDowngradeHandler() stockV1.DowngradeLogicer { + return &downgradeHandler{ + db: model.GetSDB(), + strongCacheClient: model.GetStrongRockscacheClient(), + } +} + +// Update 更新数据,升降级中的DB和缓存强一致性 +func (h *downgradeHandler) Update(ctx context.Context, req *stockV1.UpdateDowngradeRequest) (*stockV1.UpdateDowngradeRequestReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InvalidParams.Err() + } + + gid := newGid() + callbackStockAddr := getCallbackStockAddr() + downgradeBranchURL := callbackStockAddr + "/api/v1/stock/downgradeBranch" + downgradeBranchBody := &stockV1.DowngradeBranchRequest{ + Gid: gid, + Key: getStockCacheKey(req.Id), + Id: req.Id, + Stock: req.Stock, + } + + saga := dtmcli.NewSaga(config.Get().Dtm.Server, gid) + saga.Add(downgradeBranchURL, "", downgradeBranchBody) + saga.RetryInterval = 3 + err = saga.Submit() + if err != nil { + logger.Warn("saga.Submit error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + logger.Info("更新数据,升降级中的DB和缓存强一致性", logger.Err(err), logger.Any("dtm gid", gid)) + + return &stockV1.UpdateDowngradeRequestReply{}, nil +} + +// Query 查询 +func (h *downgradeHandler) Query(ctx context.Context, req *stockV1.QueryDowngradeRequest) (*stockV1.QueryDowngradeReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InvalidParams.Err() + } + + key := getStockCacheKey(req.Id) + query := func() (string, error) { + return dao.GetStockByID(h.db, req.Id) + } + + value, err := h.strongCacheClient.Fetch(key, 300*time.Second, query) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ecode.NotFound.Err() + } + logger.Warn("fetch cache error", logger.Err(err), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InternalServerError.Err() + } + + return &stockV1.QueryDowngradeReply{ + Stock: utils.StrToUint32(value), + }, nil +} + +// DowngradeBranch 升降级中的强一致性分支 +func (h *downgradeHandler) DowngradeBranch(ctx context.Context, req *stockV1.DowngradeBranchRequest) (*stockV1.DowngradeBranchReply, error) { + c, ctx := middleware.AdaptCtx(ctx) + + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + stock := &model.Stock{ + ID: req.Id, + Stock: uint(req.Stock), + } + + ctx, _ = context.WithTimeout(ctx, 15*time.Second) + + err = h.strongCacheClient.LockForUpdate(ctx, req.Key, req.Gid) + if err != nil { + logger.Warn("h.strongCacheClient.LockForUpdate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + bb, err := dtmcli.BarrierFromQuery(c.Request.URL.Query()) + if err != nil { + logger.Warn("dtmcli.BarrierFromQuery error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + err = bb.CallWithDB(h.db, func(tx *sql.Tx) error { + // if business failed, user should return error dtmcli.ErrFailure + // other error will be retried + return dao.UpdateStockInTx(tx, stock) + }) + if err != nil && !errors.Is(err, dtmcli.ErrFailure) { + logger.Warn("bb.CallWithDB error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + err = h.strongCacheClient.UnlockForUpdate(ctx, req.Key, req.Gid) + if err != nil { + logger.Warn("h.strongCacheClient.UnlockForUpdate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + return &stockV1.DowngradeBranchReply{}, nil +} diff --git a/_13_sponge-dtm-cache/http/internal/handler/final.go b/_13_sponge-dtm-cache/http/internal/handler/final.go new file mode 100644 index 0000000..3fa3627 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/handler/final.go @@ -0,0 +1,103 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package handler + +import ( + "context" + "database/sql" + "errors" + "time" + + "github.com/dtm-labs/dtmcli" + "github.com/dtm-labs/rockscache" + + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + + stockV1 "stock/api/stock/v1" + "stock/internal/config" + "stock/internal/dao" + "stock/internal/ecode" + "stock/internal/model" +) + +var _ stockV1.FinalLogicer = (*finalHandler)(nil) + +type finalHandler struct { + db *sql.DB + cacheClient *rockscache.Client +} + +// NewFinalHandler create a handler +func NewFinalHandler() stockV1.FinalLogicer { + return &finalHandler{ + db: model.GetSDB(), + cacheClient: model.GetRockscacheClient(), + } +} + +// Update 更新数据,DB和缓存最终一致性 +func (h *finalHandler) Update(ctx context.Context, req *stockV1.UpdateFinalRequest) (*stockV1.UpdateFinalRequestReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + gid := newGid() + callbackStockAddr := getCallbackStockAddr() + deleteCacheURL := callbackStockAddr + "/api/v1/stock/deleteCache" + queryPreparedURL := callbackStockAddr + "/api/v1/stock/queryPrepared" + deleteCacheBody := &stockV1.DeleteCacheRequest{ + Key: getStockCacheKey(req.Id), + } + stock := &model.Stock{ + ID: req.Id, + Stock: uint(req.Stock), + } + + msg := dtmcli.NewMsg(config.Get().Dtm.Server, gid) + msg.Add(deleteCacheURL, deleteCacheBody) + msg.WaitResult = true // when return success, the global transaction has finished + err = msg.DoAndSubmit(queryPreparedURL, func(bb *dtmcli.BranchBarrier) error { + return bb.CallWithDB(h.db, func(tx *sql.Tx) error { + return dao.UpdateStockInTx(tx, stock) + }) + }) + if err != nil { + logger.Warn("msg.DoAndSubmit error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + logger.Info("更新数据,DB和缓存最终一致性", logger.Any("dtm gid", gid)) + + return &stockV1.UpdateFinalRequestReply{}, nil +} + +// Query 查询 +func (h *finalHandler) Query(ctx context.Context, req *stockV1.QueryFinalRequest) (*stockV1.QueryFinalReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InvalidParams.Err() + } + + key := getStockCacheKey(req.Id) + query := func() (string, error) { + return dao.GetStockByID(h.db, req.Id) + } + + value, err := h.cacheClient.Fetch(key, 300*time.Second, query) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ecode.NotFound.Err() + } + logger.Warn("fetch cache error", logger.Err(err), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InternalServerError.Err() + } + + return &stockV1.QueryFinalReply{ + Stock: utils.StrToUint32(value), + }, nil +} diff --git a/_13_sponge-dtm-cache/http/internal/handler/stock.go b/_13_sponge-dtm-cache/http/internal/handler/stock.go new file mode 100644 index 0000000..609d55c --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/handler/stock.go @@ -0,0 +1,188 @@ +package handler + +import ( + "context" + "errors" + "strings" + "time" + + "github.com/jinzhu/copier" + + "github.com/zhufuyi/sponge/pkg/ggorm/query" + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + + stockV1 "stock/api/stock/v1" + "stock/internal/cache" + "stock/internal/dao" + "stock/internal/ecode" + "stock/internal/model" +) + +var _ stockV1.StockLogicer = (*stockHandler)(nil) +var _ time.Time + +type stockHandler struct { + stockDao dao.StockDao +} + +// NewStockHandler create a handler +func NewStockHandler() stockV1.StockLogicer { + return &stockHandler{ + stockDao: dao.NewStockDao( + model.GetDB(), + cache.NewStockCache(model.GetCacheType()), + ), + } +} + +// Create a record +func (h *stockHandler) Create(ctx context.Context, req *stockV1.CreateStockRequest) (*stockV1.CreateStockReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InvalidParams.Err() + } + + stock := &model.Stock{} + err = copier.Copy(stock, req) + if err != nil { + return nil, ecode.ErrCreateStock.Err() + } + // Note: if copier.Copy cannot assign a value to a field, add it here + + err = h.stockDao.Create(ctx, stock) + if err != nil { + logger.Error("Create error", logger.Err(err), logger.Any("stock", stock), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InternalServerError.Err() + } + + return &stockV1.CreateStockReply{Id: stock.ID}, nil +} + +// DeleteByID delete a record by id +func (h *stockHandler) DeleteByID(ctx context.Context, req *stockV1.DeleteStockByIDRequest) (*stockV1.DeleteStockByIDReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InvalidParams.Err() + } + + err = h.stockDao.DeleteByID(ctx, req.Id) + if err != nil { + logger.Warn("DeleteByID error", logger.Err(err), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InternalServerError.Err() + } + + return &stockV1.DeleteStockByIDReply{}, nil +} + +// UpdateByID update a record by id +func (h *stockHandler) UpdateByID(ctx context.Context, req *stockV1.UpdateStockByIDRequest) (*stockV1.UpdateStockByIDReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InvalidParams.Err() + } + + stock := &model.Stock{} + err = copier.Copy(stock, req) + if err != nil { + return nil, ecode.ErrUpdateByIDStock.Err() + } + // Note: if copier.Copy cannot assign a value to a field, add it here + stock.ID = req.Id + + err = h.stockDao.UpdateByID(ctx, stock) + if err != nil { + logger.Error("UpdateByID error", logger.Err(err), logger.Any("stock", stock), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InternalServerError.Err() + } + + return &stockV1.UpdateStockByIDReply{}, nil +} + +// GetByID get a record by id +func (h *stockHandler) GetByID(ctx context.Context, req *stockV1.GetStockByIDRequest) (*stockV1.GetStockByIDReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InvalidParams.Err() + } + + record, err := h.stockDao.GetByID(ctx, req.Id) + if err != nil { + if errors.Is(err, model.ErrRecordNotFound) { + logger.Warn("GetByID error", logger.Err(err), logger.Any("id", req.Id), middleware.CtxRequestIDField(ctx)) + return nil, ecode.NotFound.Err() + } + logger.Error("GetByID error", logger.Err(err), logger.Any("id", req.Id), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InternalServerError.Err() + } + + data, err := convertStock(record) + if err != nil { + logger.Warn("convertStock error", logger.Err(err), logger.Any("stock", record), middleware.CtxRequestIDField(ctx)) + return nil, ecode.ErrGetByIDStock.Err() + } + + return &stockV1.GetStockByIDReply{ + Stock: data, + }, nil +} + +// List of records by query parameters +func (h *stockHandler) List(ctx context.Context, req *stockV1.ListStockRequest) (*stockV1.ListStockReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InvalidParams.Err() + } + + params := &query.Params{} + err = copier.Copy(params, req.Params) + if err != nil { + return nil, ecode.ErrListStock.Err() + } + // Note: if copier.Copy cannot assign a value to a field, add it here + + records, total, err := h.stockDao.GetByColumns(ctx, params) + if err != nil { + if strings.Contains(err.Error(), "query params error:") { + logger.Warn("GetByColumns error", logger.Err(err), logger.Any("params", params), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InvalidParams.Err() + } + logger.Error("GetByColumns error", logger.Err(err), logger.Any("params", params), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InternalServerError.Err() + } + + stocks := []*stockV1.Stock{} + for _, record := range records { + data, err := convertStock(record) + if err != nil { + logger.Warn("convertStock error", logger.Err(err), logger.Any("id", record.ID), middleware.CtxRequestIDField(ctx)) + continue + } + stocks = append(stocks, data) + } + + return &stockV1.ListStockReply{ + Total: total, + Stocks: stocks, + }, nil +} + +func convertStock(record *model.Stock) (*stockV1.Stock, error) { + value := &stockV1.Stock{} + err := copier.Copy(value, record) + if err != nil { + return nil, err + } + // Note: if copier.Copy cannot assign a value to a field, add it here, e.g. CreatedAt, UpdatedAt + value.Id = record.ID + value.CreatedAt = utils.FormatDateTimeRFC3339(*record.CreatedAt) + value.UpdatedAt = utils.FormatDateTimeRFC3339(*record.UpdatedAt) + + return value, nil +} diff --git a/_13_sponge-dtm-cache/http/internal/handler/stock_test.go b/_13_sponge-dtm-cache/http/internal/handler/stock_test.go new file mode 100644 index 0000000..51517cc --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/handler/stock_test.go @@ -0,0 +1,293 @@ +package handler + +import ( + "net/http" + "testing" + "time" + + "github.com/DATA-DOG/go-sqlmock" + "github.com/gin-gonic/gin" + "github.com/jinzhu/copier" + "github.com/stretchr/testify/assert" + "stock/api/types" + + "github.com/zhufuyi/sponge/pkg/gin/response" + "github.com/zhufuyi/sponge/pkg/gotest" + "github.com/zhufuyi/sponge/pkg/httpcli" + "github.com/zhufuyi/sponge/pkg/utils" + + stockV1 "stock/api/stock/v1" + "stock/internal/cache" + "stock/internal/dao" + "stock/internal/ecode" + "stock/internal/model" +) + +func newStockHandler() *gotest.Handler { + testData := &model.Stock{} + testData.ID = 1 + // you can set the other fields of testData here, such as: + //testData.CreatedAt = time.Now() + //testData.UpdatedAt = testData.CreatedAt + + // init mock cache + c := gotest.NewCache(map[string]interface{}{utils.Uint64ToStr(testData.ID): testData}) + c.ICache = cache.NewStockCache(&model.CacheType{ + CType: "redis", + Rdb: c.RedisClient, + }) + + // init mock dao + d := gotest.NewDao(c, testData) + d.IDao = dao.NewStockDao(d.DB, c.ICache.(cache.StockCache)) + + // init mock handler + h := gotest.NewHandler(d, testData) + h.IHandler = &stockHandler{stockDao: d.IDao.(dao.StockDao)} + iHandler := h.IHandler.(stockV1.StockLogicer) + + testFns := []gotest.RouterInfo{ + { + FuncName: "Create", + Method: http.MethodPost, + Path: "/stock", + HandlerFunc: func(c *gin.Context) { + req := &stockV1.CreateStockRequest{} + _ = c.ShouldBindJSON(req) + _, err := iHandler.Create(c, req) + if err != nil { + response.Error(c, ecode.ErrCreateStock) + return + } + response.Success(c) + }, + }, + { + FuncName: "DeleteByID", + Method: http.MethodDelete, + Path: "/stock/:id", + HandlerFunc: func(c *gin.Context) { + req := &stockV1.DeleteStockByIDRequest{ + Id: utils.StrToUint64(c.Param("id")), + } + _, err := iHandler.DeleteByID(c, req) + if err != nil { + response.Error(c, ecode.ErrDeleteByIDStock) + return + } + response.Success(c) + }, + }, + { + FuncName: "UpdateByID", + Method: http.MethodPut, + Path: "/stock/:id", + HandlerFunc: func(c *gin.Context) { + req := &stockV1.UpdateStockByIDRequest{} + _ = c.ShouldBindJSON(req) + req.Id = utils.StrToUint64(c.Param("id")) + _, err := iHandler.UpdateByID(c, req) + if err != nil { + response.Error(c, ecode.ErrUpdateByIDStock) + return + } + response.Success(c) + }, + }, + { + FuncName: "GetByID", + Method: http.MethodGet, + Path: "/stock/:id", + HandlerFunc: func(c *gin.Context) { + req := &stockV1.GetStockByIDRequest{ + Id: utils.StrToUint64(c.Param("id")), + } + _, err := iHandler.GetByID(c, req) + if err != nil { + response.Error(c, ecode.ErrGetByIDStock) + return + } + response.Success(c) + }, + }, + { + FuncName: "List", + Method: http.MethodPost, + Path: "/stock/list", + HandlerFunc: func(c *gin.Context) { + req := &stockV1.ListStockRequest{} + _ = c.ShouldBindJSON(req) + _, err := iHandler.List(c, req) + if err != nil { + response.Error(c, ecode.ErrListStock) + return + } + response.Success(c) + }, + }, + } + + h.GoRunHTTPServer(testFns) + + time.Sleep(time.Millisecond * 200) + return h +} + +func Test_stockHandler_Create(t *testing.T) { + h := newStockHandler() + defer h.Close() + testData := &stockV1.CreateStockRequest{} + _ = copier.Copy(testData, h.TestData.(*model.Stock)) + + h.MockDao.SQLMock.ExpectBegin() + args := h.MockDao.GetAnyArgs(h.TestData) + h.MockDao.SQLMock.ExpectExec("INSERT INTO .*"). + WithArgs(args[:len(args)-1]...). // adjusted for the amount of test data + WillReturnResult(sqlmock.NewResult(1, 1)) + h.MockDao.SQLMock.ExpectCommit() + + result := &httpcli.StdResult{} + err := httpcli.Post(result, h.GetRequestURL("Create"), testData) + if err != nil { + t.Fatal(err) + } + + t.Logf("%+v", result) + +} + +func Test_stockHandler_DeleteByID(t *testing.T) { + h := newStockHandler() + defer h.Close() + testData := h.TestData.(*model.Stock) + expectedSQLForDeletion := "DELETE .*" + + h.MockDao.SQLMock.ExpectBegin() + h.MockDao.SQLMock.ExpectExec(expectedSQLForDeletion). + WithArgs(testData.ID). // adjusted for the amount of test data + WillReturnResult(sqlmock.NewResult(int64(testData.ID), 1)) + h.MockDao.SQLMock.ExpectCommit() + + result := &httpcli.StdResult{} + err := httpcli.Delete(result, h.GetRequestURL("DeleteByID", testData.ID)) + if err != nil { + t.Fatal(err) + } + if result.Code != 0 { + t.Fatalf("%+v", result) + } + + // zero id error test + err = httpcli.Delete(result, h.GetRequestURL("DeleteByID", 0)) + assert.NoError(t, err) + + // delete error test + err = httpcli.Delete(result, h.GetRequestURL("DeleteByID", 111)) + assert.NoError(t, err) +} + +func Test_stockHandler_UpdateByID(t *testing.T) { + h := newStockHandler() + defer h.Close() + testData := &stockV1.UpdateStockByIDRequest{} + _ = copier.Copy(testData, h.TestData.(*model.Stock)) + testData.Id = h.TestData.(*model.Stock).ID + + h.MockDao.SQLMock.ExpectBegin() + h.MockDao.SQLMock.ExpectExec("UPDATE .*"). + WithArgs(h.MockDao.AnyTime, testData.Id). // adjusted for the amount of test data + WillReturnResult(sqlmock.NewResult(int64(testData.Id), 1)) + h.MockDao.SQLMock.ExpectCommit() + + result := &httpcli.StdResult{} + err := httpcli.Put(result, h.GetRequestURL("UpdateByID", testData.Id), testData) + if err != nil { + t.Fatal(err) + } + if result.Code != 0 { + t.Fatalf("%+v", result) + } + + // zero id error test + err = httpcli.Put(result, h.GetRequestURL("UpdateByID", 0), testData) + assert.NoError(t, err) + + // update error test + err = httpcli.Put(result, h.GetRequestURL("UpdateByID", 111), testData) + assert.NoError(t, err) +} + +func Test_stockHandler_GetByID(t *testing.T) { + h := newStockHandler() + defer h.Close() + testData := h.TestData.(*model.Stock) + + // column names and corresponding data + rows := sqlmock.NewRows([]string{"id"}). + AddRow(testData.ID) + + h.MockDao.SQLMock.ExpectQuery("SELECT .*"). + WithArgs(testData.ID). + WillReturnRows(rows) + + result := &httpcli.StdResult{} + err := httpcli.Get(result, h.GetRequestURL("GetByID", testData.ID)) + if err != nil { + t.Fatal(err) + } + if result.Code != 0 { + t.Fatalf("%+v", result) + } + + // zero id error test + err = httpcli.Get(result, h.GetRequestURL("GetByID", 0)) + assert.NoError(t, err) + + // get error test + err = httpcli.Get(result, h.GetRequestURL("GetByID", 111)) + assert.NoError(t, err) +} + +func Test_stockHandler_List(t *testing.T) { + h := newStockHandler() + defer h.Close() + testData := h.TestData.(*model.Stock) + + // column names and corresponding data + rows := sqlmock.NewRows([]string{"id"}). + AddRow(testData.ID) + + h.MockDao.SQLMock.ExpectQuery("SELECT .*").WillReturnRows(rows) + + result := &httpcli.StdResult{} + err := httpcli.Post(result, h.GetRequestURL("List"), &stockV1.ListStockRequest{ + Params: &types.Params{ + Page: 0, + Limit: 10, + Sort: "ignore count", // ignore test count + }}) + if err != nil { + t.Fatal(err) + } + if result.Code != 0 { + t.Fatalf("%+v", result) + } + + // nil params error test + err = httpcli.Post(result, h.GetRequestURL("List"), &stockV1.ListStockRequest{}) + assert.NoError(t, err) + + // get error test + err = httpcli.Post(result, h.GetRequestURL("List"), &stockV1.ListStockRequest{Params: &types.Params{ + Page: 0, + Limit: 10, + }}) + assert.NoError(t, err) +} + +func TestNewStockHandler(t *testing.T) { + defer func() { + recover() + }() + _ = NewStockHandler() +} diff --git a/_13_sponge-dtm-cache/http/internal/handler/strong.go b/_13_sponge-dtm-cache/http/internal/handler/strong.go new file mode 100644 index 0000000..b7b07b4 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/handler/strong.go @@ -0,0 +1,103 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package handler + +import ( + "context" + "database/sql" + "errors" + "time" + + "github.com/dtm-labs/dtmcli" + "github.com/dtm-labs/rockscache" + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + + stockV1 "stock/api/stock/v1" + "stock/internal/config" + "stock/internal/dao" + "stock/internal/ecode" + "stock/internal/model" +) + +var _ stockV1.StrongLogicer = (*strongHandler)(nil) + +type strongHandler struct { + db *sql.DB + strongCacheClient *rockscache.Client +} + +// NewStrongHandler create a handler +func NewStrongHandler() stockV1.StrongLogicer { + return &strongHandler{ + db: model.GetSDB(), + strongCacheClient: model.GetStrongRockscacheClient(), + } +} + +// Update 更新数据,DB和缓存强一致性 +func (h *strongHandler) Update(ctx context.Context, req *stockV1.UpdateStrongRequest) (*stockV1.UpdateStrongRequestReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + gid := newGid() + callbackStockAddr := getCallbackStockAddr() + deleteCacheURL := callbackStockAddr + "/api/v1/stock/deleteCache" + queryPreparedURL := callbackStockAddr + "/api/v1/stock/queryPrepared" + deleteCacheBody := &stockV1.DeleteCacheRequest{ + Key: getStockCacheKey(req.Id), + } + stock := &model.Stock{ + ID: req.Id, + Stock: uint(req.Stock), + } + + // 创建二阶段消息事务 + msg := dtmcli.NewMsg(config.Get().Dtm.Server, gid) + msg.Add(deleteCacheURL, deleteCacheBody) + msg.WaitResult = false // when return success, the global transaction has finished + err = msg.DoAndSubmit(queryPreparedURL, func(bb *dtmcli.BranchBarrier) error { + return bb.CallWithDB(h.db, func(tx *sql.Tx) error { + return dao.UpdateStockInTx(tx, stock) + }) + }) + if err != nil { + logger.Warn("msg.DoAndSubmit error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, adaptErr(err) + } + + logger.Info("更新数据,DB和缓存强一致性", logger.Err(err), logger.Any("dtm gid", gid)) + + return &stockV1.UpdateStrongRequestReply{}, nil +} + +// Query 查询 +func (h *strongHandler) Query(ctx context.Context, req *stockV1.QueryStrongRequest) (*stockV1.QueryStrongReply, error) { + err := req.Validate() + if err != nil { + logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InvalidParams.Err() + } + + key := getStockCacheKey(req.Id) + query := func() (string, error) { + return dao.GetStockByID(h.db, req.Id) + } + + value, err := h.strongCacheClient.Fetch(key, 300*time.Second, query) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ecode.NotFound.Err() + } + logger.Warn("fetch cache error", logger.Err(err), middleware.CtxRequestIDField(ctx)) + return nil, ecode.InternalServerError.Err() + } + + return &stockV1.QueryStrongReply{ + Stock: utils.StrToUint32(value), + }, nil +} diff --git a/_13_sponge-dtm-cache/http/internal/model/init.go b/_13_sponge-dtm-cache/http/internal/model/init.go new file mode 100644 index 0000000..7087975 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/model/init.go @@ -0,0 +1,234 @@ +// Package model is the initial database driver and define the data structures corresponding to the tables. +package model + +import ( + "database/sql" + "strings" + "sync" + "time" + + "github.com/dtm-labs/rockscache" + "github.com/redis/go-redis/v9" + "gorm.io/gorm" + + "github.com/zhufuyi/sponge/pkg/ggorm" + "github.com/zhufuyi/sponge/pkg/goredis" + "github.com/zhufuyi/sponge/pkg/logger" + "github.com/zhufuyi/sponge/pkg/utils" + + "stock/internal/config" +) + +var ( + // ErrCacheNotFound No hit cache + ErrCacheNotFound = redis.Nil + + // ErrRecordNotFound no records found + ErrRecordNotFound = gorm.ErrRecordNotFound +) + +var ( + db *gorm.DB + sdb *sql.DB + once1 sync.Once + + redisCli *redis.Client + once2 sync.Once + + cacheType *CacheType + once3 sync.Once +) + +// CacheType cache type +type CacheType struct { + CType string // cache type memory or redis + Rdb *redis.Client // if CType=redis, Rdb cannot be empty +} + +// InitCache initial cache +func InitCache(cType string) { + cacheType = &CacheType{ + CType: cType, + } + + if cType == "redis" { + cacheType.Rdb = GetRedisCli() + } +} + +// GetCacheType get cacheType +func GetCacheType() *CacheType { + if cacheType == nil { + once3.Do(func() { + InitCache(config.Get().App.CacheType) + }) + } + + return cacheType +} + +// InitRedis connect redis +func InitRedis() { + opts := []goredis.Option{ + goredis.WithDialTimeout(time.Duration(config.Get().Redis.DialTimeout) * time.Second), + goredis.WithReadTimeout(time.Duration(config.Get().Redis.ReadTimeout) * time.Second), + goredis.WithWriteTimeout(time.Duration(config.Get().Redis.WriteTimeout) * time.Second), + } + if config.Get().App.EnableTrace { + opts = append(opts, goredis.WithEnableTrace()) + } + + var err error + redisCli, err = goredis.Init(config.Get().Redis.Dsn, opts...) + if err != nil { + panic("goredis.Init error: " + err.Error()) + } +} + +// GetRedisCli get redis client +func GetRedisCli() *redis.Client { + if redisCli == nil { + once2.Do(func() { + InitRedis() + }) + } + + return redisCli +} + +// CloseRedis close redis +func CloseRedis() error { + if redisCli == nil { + return nil + } + + err := redisCli.Close() + if err != nil && err.Error() != redis.ErrClosed.Error() { + return err + } + + return nil +} + +// ------------------------------------------------------------------------------------------ + +// InitDB connect database +func InitDB() { + switch strings.ToLower(config.Get().Database.Driver) { + case ggorm.DBDriverMysql, ggorm.DBDriverTidb: + InitMysql() + default: + panic("InitDB error, unsupported database driver: " + config.Get().Database.Driver) + } +} + +// InitMysql connect mysql +func InitMysql() { + opts := []ggorm.Option{ + ggorm.WithMaxIdleConns(config.Get().Database.Mysql.MaxIdleConns), + ggorm.WithMaxOpenConns(config.Get().Database.Mysql.MaxOpenConns), + ggorm.WithConnMaxLifetime(time.Duration(config.Get().Database.Mysql.ConnMaxLifetime) * time.Minute), + } + if config.Get().Database.Mysql.EnableLog { + opts = append(opts, + ggorm.WithLogging(logger.Get()), + ggorm.WithLogRequestIDKey("request_id"), + ) + } + + if config.Get().App.EnableTrace { + opts = append(opts, ggorm.WithEnableTrace()) + } + + // setting mysql slave and master dsn addresses, + // if there is no read/write separation, you can comment out the following piece of code + //opts = append(opts, ggorm.WithRWSeparation( + // config.Get().Database.Mysql.SlavesDsn, + // config.Get().Database.Mysql.MastersDsn..., + //)) + + // add custom gorm plugin + //opts = append(opts, ggorm.WithGormPlugin(yourPlugin)) + + var dsn = utils.AdaptiveMysqlDsn(config.Get().Database.Mysql.Dsn) + var err error + db, err = ggorm.InitMysql(dsn, opts...) + if err != nil { + panic("InitMysql error: " + err.Error()) + } + sdb, err = db.DB() + if err != nil { + panic("InitMysql error: " + err.Error()) + } +} + +// GetDB get gorm db +func GetDB() *gorm.DB { + if db == nil { + once1.Do(func() { + InitDB() + }) + } + + return db +} + +// GetSDB get sql db +func GetSDB() *sql.DB { + if sdb == nil { + once1.Do(func() { + InitDB() + }) + } + + return sdb +} + +// CloseDB close db +func CloseDB() error { + return ggorm.CloseDB(db) +} + +// ------------------------------------------------------------------------------------------ + +var ( + cacheClient *rockscache.Client + cacheClientOnce sync.Once + + strongCacheClient *rockscache.Client + strongCacheClientOnce sync.Once +) + +// InitRockscache initial rockscache +func InitRockscache() { + cacheClientOnce.Do(func() { + rdb := GetRedisCli() + cacheClient = rockscache.NewClient(rdb, rockscache.NewDefaultOptions()) + }) +} + +// GetRockscacheClient get rockscache client +func GetRockscacheClient() *rockscache.Client { + if cacheClient == nil { + InitRockscache() + } + return cacheClient +} + +// InitStrongRockscache initial rockscache +func InitStrongRockscache() { + strongCacheClientOnce.Do(func() { + rdb := GetRedisCli() + options := rockscache.NewDefaultOptions() + options.StrongConsistency = true // enable strong consistency + strongCacheClient = rockscache.NewClient(rdb, options) + }) +} + +// GetStrongRockscacheClient get strong rockscache client +func GetStrongRockscacheClient() *rockscache.Client { + if strongCacheClient == nil { + InitStrongRockscache() + } + return strongCacheClient +} diff --git a/_13_sponge-dtm-cache/http/internal/model/stock.go b/_13_sponge-dtm-cache/http/internal/model/stock.go new file mode 100644 index 0000000..c815d6e --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/model/stock.go @@ -0,0 +1,19 @@ +package model + +import ( + "time" +) + +type Stock struct { + ID uint64 `gorm:"column:id;type:bigint(20) unsigned;primary_key;AUTO_INCREMENT" json:"id"` + ProductID uint64 `gorm:"column:product_id;type:bigint(20) unsigned;NOT NULL" json:"productId"` // 商品id + Stock uint `gorm:"column:stock;type:int(11) unsigned;NOT NULL" json:"stock"` // 库存 + CreatedAt *time.Time `gorm:"column:created_at;type:datetime" json:"createdAt"` + UpdatedAt *time.Time `gorm:"column:updated_at;type:datetime" json:"updatedAt"` + DeletedAt *time.Time `gorm:"column:deleted_at;type:datetime" json:"deletedAt"` +} + +// TableName table name +func (m *Stock) TableName() string { + return "stock" +} diff --git a/_13_sponge-dtm-cache/http/internal/routers/atomic_router.go b/_13_sponge-dtm-cache/http/internal/routers/atomic_router.go new file mode 100644 index 0000000..e85c2fa --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/routers/atomic_router.go @@ -0,0 +1,58 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "github.com/gin-gonic/gin" + + "github.com/zhufuyi/sponge/pkg/logger" + //"github.com/zhufuyi/sponge/pkg/middleware" + + stockV1 "stock/api/stock/v1" + "stock/internal/handler" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + atomicMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + atomicRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewAtomicHandler()) + }) +} + +func atomicRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService stockV1.AtomicLogicer) { + stockV1.RegisterAtomicRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + stockV1.WithAtomicLogger(logger.Get()), + stockV1.WithAtomicHTTPResponse(), + stockV1.WithAtomicErrorToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable + // example: + // ecode.Forbidden, ecode.LimitExceed, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func atomicMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/atomic/:id will take effect + // c.setGroupPath("/api/v1/atomic", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("PUT", "/api/v1/stock/:id/atomic", middleware.Auth()) + //c.setSinglePath("GET", "/api/v1/stock/:id/atomic", middleware.Auth()) +} diff --git a/_13_sponge-dtm-cache/http/internal/routers/callback_router.go b/_13_sponge-dtm-cache/http/internal/routers/callback_router.go new file mode 100644 index 0000000..74bfda8 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/routers/callback_router.go @@ -0,0 +1,58 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "github.com/gin-gonic/gin" + + "github.com/zhufuyi/sponge/pkg/logger" + //"github.com/zhufuyi/sponge/pkg/middleware" + + stockV1 "stock/api/stock/v1" + "stock/internal/handler" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + callbackMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + callbackRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewCallbackHandler()) + }) +} + +func callbackRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService stockV1.CallbackLogicer) { + stockV1.RegisterCallbackRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + stockV1.WithCallbackLogger(logger.Get()), + stockV1.WithCallbackHTTPResponse(), + stockV1.WithCallbackErrorToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable + // example: + // ecode.Forbidden, ecode.LimitExceed, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func callbackMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/callback/:id will take effect + // c.setGroupPath("/api/v1/callback", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("GET", "/api/v1/stock/queryPrepared", middleware.Auth()) + //c.setSinglePath("POST", "/api/v1/stock/deleteCache", middleware.Auth()) +} diff --git a/_13_sponge-dtm-cache/http/internal/routers/downgrade_router.go b/_13_sponge-dtm-cache/http/internal/routers/downgrade_router.go new file mode 100644 index 0000000..388adc6 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/routers/downgrade_router.go @@ -0,0 +1,59 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "github.com/gin-gonic/gin" + + "github.com/zhufuyi/sponge/pkg/logger" + //"github.com/zhufuyi/sponge/pkg/middleware" + + stockV1 "stock/api/stock/v1" + "stock/internal/handler" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + downgradeMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + downgradeRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewDowngradeHandler()) + }) +} + +func downgradeRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService stockV1.DowngradeLogicer) { + stockV1.RegisterDowngradeRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + stockV1.WithDowngradeLogger(logger.Get()), + stockV1.WithDowngradeHTTPResponse(), + stockV1.WithDowngradeErrorToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable + // example: + // ecode.Forbidden, ecode.LimitExceed, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func downgradeMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/downgrade/:id will take effect + // c.setGroupPath("/api/v1/downgrade", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("PUT", "/api/v1/stock/:id/downgrade", middleware.Auth()) + //c.setSinglePath("GET", "/api/v1/stock/:id/downgrade", middleware.Auth()) + //c.setSinglePath("POST", "/api/v1/stock/downgradeBranch", middleware.Auth()) +} diff --git a/_13_sponge-dtm-cache/http/internal/routers/final_router.go b/_13_sponge-dtm-cache/http/internal/routers/final_router.go new file mode 100644 index 0000000..beddd0a --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/routers/final_router.go @@ -0,0 +1,58 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "github.com/gin-gonic/gin" + + "github.com/zhufuyi/sponge/pkg/logger" + //"github.com/zhufuyi/sponge/pkg/middleware" + + stockV1 "stock/api/stock/v1" + "stock/internal/handler" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + finalMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + finalRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewFinalHandler()) + }) +} + +func finalRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService stockV1.FinalLogicer) { + stockV1.RegisterFinalRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + stockV1.WithFinalLogger(logger.Get()), + stockV1.WithFinalHTTPResponse(), + stockV1.WithFinalErrorToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable + // example: + // ecode.Forbidden, ecode.LimitExceed, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func finalMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/final/:id will take effect + // c.setGroupPath("/api/v1/final", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("PUT", "/api/v1/stock/:id/final", middleware.Auth()) + //c.setSinglePath("GET", "/api/v1/stock/:id/final", middleware.Auth()) +} diff --git a/_13_sponge-dtm-cache/http/internal/routers/routers.go b/_13_sponge-dtm-cache/http/internal/routers/routers.go new file mode 100644 index 0000000..253abb4 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/routers/routers.go @@ -0,0 +1,167 @@ +package routers + +import ( + "net/http" + "strings" + "time" + + "github.com/gin-gonic/gin" + "github.com/gin-gonic/gin/binding" + + "github.com/zhufuyi/sponge/pkg/errcode" + "github.com/zhufuyi/sponge/pkg/gin/handlerfunc" + "github.com/zhufuyi/sponge/pkg/gin/middleware" + "github.com/zhufuyi/sponge/pkg/gin/middleware/metrics" + "github.com/zhufuyi/sponge/pkg/gin/prof" + "github.com/zhufuyi/sponge/pkg/gin/swagger" + "github.com/zhufuyi/sponge/pkg/gin/validator" + "github.com/zhufuyi/sponge/pkg/jwt" + "github.com/zhufuyi/sponge/pkg/logger" + + "stock/docs" + "stock/internal/config" +) + +type routeFns = []func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) + +var ( + // all route functions + allRouteFns = make(routeFns, 0) + // all middleware functions + allMiddlewareFns = []func(c *middlewareConfig){} +) + +// NewRouter create a new router +func NewRouter() *gin.Engine { //nolint + r := gin.New() + + r.Use(gin.Recovery()) + r.Use(middleware.Cors()) + + if config.Get().HTTP.Timeout > 0 { + // if you need more fine-grained control over your routes, set the timeout in your routes, unsetting the timeout globally here. + r.Use(middleware.Timeout(time.Second * time.Duration(config.Get().HTTP.Timeout))) + } + + // request id middleware + r.Use(middleware.RequestID()) + + // logger middleware, to print simple messages, replace middleware.Logging with middleware.SimpleLog + r.Use(middleware.Logging( + middleware.WithLog(logger.Get()), + middleware.WithRequestIDFromContext(), + middleware.WithIgnoreRoutes("/metrics"), // ignore path + )) + + // init jwt middleware + jwt.Init( + //jwt.WithExpire(time.Hour*24), + //jwt.WithSigningKey("123456"), + //jwt.WithSigningMethod(jwt.HS384), + ) + + // metrics middleware + if config.Get().App.EnableMetrics { + r.Use(metrics.Metrics(r, + //metrics.WithMetricsPath("/metrics"), // default is /metrics + metrics.WithIgnoreStatusCodes(http.StatusNotFound), // ignore 404 status codes + )) + } + + // limit middleware + if config.Get().App.EnableLimit { + r.Use(middleware.RateLimit()) + } + + // circuit breaker middleware + if config.Get().App.EnableCircuitBreaker { + r.Use(middleware.CircuitBreaker( + // set http code for circuit breaker, default already includes 500 and 503 + middleware.WithValidCode(errcode.InternalServerError.Code()), + middleware.WithValidCode(errcode.ServiceUnavailable.Code()), + )) + } + + // trace middleware + if config.Get().App.EnableTrace { + r.Use(middleware.Tracing(config.Get().App.Name)) + } + + // profile performance analysis + if config.Get().App.EnableHTTPProfile { + prof.Register(r, prof.WithIOWaitTime()) + } + + // validator + binding.Validator = validator.Init() + + r.GET("/health", handlerfunc.CheckHealth) + r.GET("/ping", handlerfunc.Ping) + r.GET("/codes", handlerfunc.ListCodes) + r.GET("/config", gin.WrapF(errcode.ShowConfig([]byte(config.Show())))) + + // access path /apis/swagger/index.html + swagger.CustomRouter(r, "apis", docs.ApiDocs) + + c := newMiddlewareConfig() + + // set up all middlewares + for _, fn := range allMiddlewareFns { + fn(c) + } + + // register all routes + for _, fn := range allRouteFns { + fn(r, c.groupPathMiddlewares, c.singlePathMiddlewares) + } + + return r +} + +type middlewareConfig struct { + groupPathMiddlewares map[string][]gin.HandlerFunc // middleware functions corresponding to route group + singlePathMiddlewares map[string][]gin.HandlerFunc // middleware functions corresponding to a single route +} + +func newMiddlewareConfig() *middlewareConfig { + return &middlewareConfig{ + groupPathMiddlewares: make(map[string][]gin.HandlerFunc), + singlePathMiddlewares: make(map[string][]gin.HandlerFunc), + } +} + +func (c *middlewareConfig) setGroupPath(groupPath string, handlers ...gin.HandlerFunc) { //nolint + if groupPath == "" { + return + } + if groupPath[0] != '/' { + groupPath = "/" + groupPath + } + + handlerFns, ok := c.groupPathMiddlewares[groupPath] + if !ok { + c.groupPathMiddlewares[groupPath] = handlers + return + } + + c.groupPathMiddlewares[groupPath] = append(handlerFns, handlers...) +} + +func (c *middlewareConfig) setSinglePath(method string, singlePath string, handlers ...gin.HandlerFunc) { //nolint + if method == "" || singlePath == "" { + return + } + + key := getSinglePathKey(method, singlePath) + handlerFns, ok := c.singlePathMiddlewares[key] + if !ok { + c.singlePathMiddlewares[key] = handlers + return + } + + c.singlePathMiddlewares[key] = append(handlerFns, handlers...) +} + +func getSinglePathKey(method string, singlePath string) string { //nolint + return strings.ToUpper(method) + "->" + singlePath +} diff --git a/_13_sponge-dtm-cache/http/internal/routers/stock_router.go b/_13_sponge-dtm-cache/http/internal/routers/stock_router.go new file mode 100644 index 0000000..16df18a --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/routers/stock_router.go @@ -0,0 +1,61 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "github.com/gin-gonic/gin" + + "github.com/zhufuyi/sponge/pkg/logger" + //"github.com/zhufuyi/sponge/pkg/middleware" + + stockV1 "stock/api/stock/v1" + "stock/internal/handler" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + stockMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + stockRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewStockHandler()) + }) +} + +func stockRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService stockV1.StockLogicer) { + stockV1.RegisterStockRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + stockV1.WithStockLogger(logger.Get()), + stockV1.WithStockHTTPResponse(), + stockV1.WithStockErrorToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable + // example: + // ecode.Forbidden, ecode.LimitExceed, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func stockMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/stock/:id will take effect + // c.setGroupPath("/api/v1/stock", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("POST", "/api/v1/stock", middleware.Auth()) + //c.setSinglePath("DELETE", "/api/v1/stock/:id", middleware.Auth()) + //c.setSinglePath("PUT", "/api/v1/stock/:id", middleware.Auth()) + //c.setSinglePath("GET", "/api/v1/stock/:id", middleware.Auth()) + //c.setSinglePath("POST", "/api/v1/stock/list", middleware.Auth()) +} diff --git a/_13_sponge-dtm-cache/http/internal/routers/strong_router.go b/_13_sponge-dtm-cache/http/internal/routers/strong_router.go new file mode 100644 index 0000000..bc1acbf --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/routers/strong_router.go @@ -0,0 +1,58 @@ +// Code generated by https://github.com/zhufuyi/sponge + +package routers + +import ( + "github.com/gin-gonic/gin" + + "github.com/zhufuyi/sponge/pkg/logger" + //"github.com/zhufuyi/sponge/pkg/middleware" + + stockV1 "stock/api/stock/v1" + "stock/internal/handler" +) + +func init() { + allMiddlewareFns = append(allMiddlewareFns, func(c *middlewareConfig) { + strongMiddlewares(c) + }) + + allRouteFns = append(allRouteFns, + func(r *gin.Engine, groupPathMiddlewares map[string][]gin.HandlerFunc, singlePathMiddlewares map[string][]gin.HandlerFunc) { + strongRouter(r, groupPathMiddlewares, singlePathMiddlewares, handler.NewStrongHandler()) + }) +} + +func strongRouter( + r *gin.Engine, + groupPathMiddlewares map[string][]gin.HandlerFunc, + singlePathMiddlewares map[string][]gin.HandlerFunc, + iService stockV1.StrongLogicer) { + stockV1.RegisterStrongRouter( + r, + groupPathMiddlewares, + singlePathMiddlewares, + iService, + stockV1.WithStrongLogger(logger.Get()), + stockV1.WithStrongHTTPResponse(), + stockV1.WithStrongErrorToHTTPCode( + // Set some error codes to standard http return codes, + // by default there is already ecode.InternalServerError and ecode.ServiceUnavailable + // example: + // ecode.Forbidden, ecode.LimitExceed, + ), + ) +} + +// you can set the middleware of a route group, or set the middleware of a single route, +// or you can mix them, pay attention to the duplication of middleware when mixing them, +// it is recommended to set the middleware of a single route in preference +func strongMiddlewares(c *middlewareConfig) { + // set up group route middleware, group path is left prefix rules, + // if the left prefix is hit, the middleware will take effect, e.g. group route is /api/v1, route /api/v1/strong/:id will take effect + // c.setGroupPath("/api/v1/strong", middleware.Auth()) + + // set up single route middleware, just uncomment the code and fill in the middlewares, nothing else needs to be changed + //c.setSinglePath("PUT", "/api/v1/stock/:id/strong", middleware.Auth()) + //c.setSinglePath("GET", "/api/v1/stock/:id/strong", middleware.Auth()) +} diff --git a/_13_sponge-dtm-cache/http/internal/server/http.go b/_13_sponge-dtm-cache/http/internal/server/http.go new file mode 100644 index 0000000..8151dd4 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/server/http.go @@ -0,0 +1,88 @@ +package server + +import ( + "context" + "fmt" + "net/http" + "time" + + "github.com/gin-gonic/gin" + + "github.com/zhufuyi/sponge/pkg/app" + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + + "stock/internal/routers" +) + +var _ app.IServer = (*httpServer)(nil) + +type httpServer struct { + addr string + server *http.Server + + instance *registry.ServiceInstance + iRegistry registry.Registry +} + +// Start http service +func (s *httpServer) Start() error { + if s.iRegistry != nil { + ctx, _ := context.WithTimeout(context.Background(), 5*time.Second) //nolint + if err := s.iRegistry.Register(ctx, s.instance); err != nil { + return err + } + } + + if err := s.server.ListenAndServe(); err != nil && err != http.ErrServerClosed { + return fmt.Errorf("listen server error: %v", err) + } + return nil +} + +// Stop http service +func (s *httpServer) Stop() error { + if s.iRegistry != nil { + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) + go func() { + _ = s.iRegistry.Deregister(ctx, s.instance) + cancel() + }() + <-ctx.Done() + } + + ctx, _ := context.WithTimeout(context.Background(), 3*time.Second) //nolint + return s.server.Shutdown(ctx) +} + +// String comment +func (s *httpServer) String() string { + return "http service address " + s.addr +} + +// NewHTTPServer creates a new http server +func NewHTTPServer(addr string, opts ...HTTPOption) app.IServer { + o := defaultHTTPOptions() + o.apply(opts...) + + if o.isProd { + gin.SetMode(gin.ReleaseMode) + } else { + gin.SetMode(gin.DebugMode) + } + + router := routers.NewRouter() + server := &http.Server{ + Addr: addr, + Handler: router, + //ReadTimeout: time.Second*30, + //WriteTimeout: time.Second*60, + MaxHeaderBytes: 1 << 20, + } + + return &httpServer{ + addr: addr, + server: server, + iRegistry: o.iRegistry, + instance: o.instance, + } +} diff --git a/_13_sponge-dtm-cache/http/internal/server/http_option.go b/_13_sponge-dtm-cache/http/internal/server/http_option.go new file mode 100644 index 0000000..3704401 --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/server/http_option.go @@ -0,0 +1,43 @@ +package server + +import ( + "github.com/zhufuyi/sponge/pkg/servicerd/registry" +) + +// HTTPOption setting up http +type HTTPOption func(*httpOptions) + +type httpOptions struct { + isProd bool + instance *registry.ServiceInstance + iRegistry registry.Registry +} + +func defaultHTTPOptions() *httpOptions { + return &httpOptions{ + isProd: false, + instance: nil, + iRegistry: nil, + } +} + +func (o *httpOptions) apply(opts ...HTTPOption) { + for _, opt := range opts { + opt(o) + } +} + +// WithHTTPIsProd setting up production environment markers +func WithHTTPIsProd(isProd bool) HTTPOption { + return func(o *httpOptions) { + o.isProd = isProd + } +} + +// WithHTTPRegistry registration services +func WithHTTPRegistry(iRegistry registry.Registry, instance *registry.ServiceInstance) HTTPOption { + return func(o *httpOptions) { + o.iRegistry = iRegistry + o.instance = instance + } +} diff --git a/_13_sponge-dtm-cache/http/internal/server/http_test.go b/_13_sponge-dtm-cache/http/internal/server/http_test.go new file mode 100644 index 0000000..28bdcce --- /dev/null +++ b/_13_sponge-dtm-cache/http/internal/server/http_test.go @@ -0,0 +1,115 @@ +package server + +import ( + "context" + "fmt" + "net/http" + "testing" + "time" + + "github.com/gin-gonic/gin" + "github.com/stretchr/testify/assert" + + "github.com/zhufuyi/sponge/pkg/servicerd/registry" + "github.com/zhufuyi/sponge/pkg/utils" + + "stock/configs" + "stock/internal/config" +) + +// need real database to test +func TestHTTPServer(t *testing.T) { + err := config.Init(configs.Path("stock.yml")) + if err != nil { + t.Fatal(err) + } + config.Get().App.EnableMetrics = true + config.Get().App.EnableTrace = true + config.Get().App.EnableHTTPProfile = true + config.Get().App.EnableLimit = true + config.Get().App.EnableCircuitBreaker = true + + port, _ := utils.GetAvailablePort() + addr := fmt.Sprintf(":%d", port) + gin.SetMode(gin.ReleaseMode) + + utils.SafeRunWithTimeout(time.Second*2, func(cancel context.CancelFunc) { + server := NewHTTPServer(addr, + WithHTTPIsProd(true), + WithHTTPRegistry(&iRegistry{}, ®istry.ServiceInstance{}), + ) + assert.NotNil(t, server) + cancel() + }) + utils.SafeRunWithTimeout(time.Second, func(cancel context.CancelFunc) { + server := NewHTTPServer(addr) + assert.NotNil(t, server) + cancel() + }) + + utils.SafeRunWithTimeout(time.Second*2, func(cancel context.CancelFunc) { + server := NewHTTPServer(addr, + WithHTTPIsProd(true), + WithHTTPRegistry(&iRegistry{}, ®istry.ServiceInstance{}), + ) + assert.NotNil(t, server) + cancel() + }) + utils.SafeRunWithTimeout(time.Second, func(cancel context.CancelFunc) { + server := NewHTTPServer(addr) + assert.NotNil(t, server) + cancel() + }) +} + +func TestHTTPServerMock(t *testing.T) { + err := config.Init(configs.Path("stock.yml")) + if err != nil { + t.Fatal(err) + } + config.Get().App.EnableMetrics = true + config.Get().App.EnableTrace = true + config.Get().App.EnableHTTPProfile = true + config.Get().App.EnableLimit = true + config.Get().App.EnableCircuitBreaker = true + + port, _ := utils.GetAvailablePort() + addr := fmt.Sprintf(":%d", port) + + o := defaultHTTPOptions() + if o.isProd { + gin.SetMode(gin.ReleaseMode) + } + s := &httpServer{ + addr: addr, + instance: ®istry.ServiceInstance{}, + iRegistry: &iRegistry{}, + } + s.server = &http.Server{ + Addr: addr, + Handler: http.NewServeMux(), + MaxHeaderBytes: 1 << 20, + } + + go func() { + time.Sleep(time.Second * 3) + _ = s.server.Shutdown(context.Background()) + }() + + str := s.String() + assert.NotEmpty(t, str) + err = s.Start() + assert.NoError(t, err) + err = s.Stop() + assert.NoError(t, err) +} + +type iRegistry struct{} + +func (i *iRegistry) Register(ctx context.Context, service *registry.ServiceInstance) error { + return nil +} + +func (i *iRegistry) Deregister(ctx context.Context, service *registry.ServiceInstance) error { + return nil +} diff --git a/_13_sponge-dtm-cache/http/scripts/binary-package.sh b/_13_sponge-dtm-cache/http/scripts/binary-package.sh new file mode 100644 index 0000000..33e8e25 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/binary-package.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +serviceName="stock" + +mkdir -p ${serviceName}-binary/configs + +cp -f deployments/binary/run.sh ${serviceName}-binary +chmod +x ${serviceName}-binary/run.sh + +cp -f deployments/binary/deploy.sh ${serviceName}-binary +chmod +x ${serviceName}-binary/deploy.sh + +cp -f cmd/${serviceName}/${serviceName} ${serviceName}-binary +cp -f configs/${serviceName}.yml ${serviceName}-binary/configs +cp -f configs/${serviceName}_cc.yml ${serviceName}-binary/configs + +# compressing binary file +#upx -9 ${serviceName} + +tar zcvf ${serviceName}-binary.tar.gz ${serviceName}-binary +rm -rf ${serviceName}-binary + +echo "" +echo "package binary successfully, output file = ${serviceName}-binary.tar.gz" diff --git a/_13_sponge-dtm-cache/http/scripts/build/Dockerfile b/_13_sponge-dtm-cache/http/scripts/build/Dockerfile new file mode 100644 index 0000000..5c5aac7 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/build/Dockerfile @@ -0,0 +1,25 @@ +FROM alpine:latest +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# set the time zone to Shanghai +RUN apk add tzdata \ + && cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && echo "Asia/Shanghai" > /etc/timezone \ + && apk del tzdata + +# add curl, used for http service checking, can be installed without it if deployed in k8s +RUN apk add curl + +COPY configs/ /app/configs/ +COPY stock /app/stock +RUN chmod +x /app/stock + +# http port +EXPOSE 8080 + + +WORKDIR /app + +CMD ["./stock", "-c", "configs/stock.yml"] +# if you use the Configuration Center, stock.yml is changed to the Configuration Center configuration. +#CMD ["./stock", "-c", "configs/stock.yml", "-enable-cc"] diff --git a/_13_sponge-dtm-cache/http/scripts/build/Dockerfile_build b/_13_sponge-dtm-cache/http/scripts/build/Dockerfile_build new file mode 100644 index 0000000..00eb432 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/build/Dockerfile_build @@ -0,0 +1,41 @@ +# Need to package the code first `tar zcf stock.tar.gz $(ls)` and move it to the same directory as Dokerfile + +# Compile the go code, you can specify the golang version +FROM golang:1.21-alpine as build +COPY . /go/src/stock +WORKDIR /go/src/stock +RUN tar zxf stock.tar.gz +RUN go env -w GOPROXY=https://goproxy.cn,direct +RUN go mod download +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o /stock cmd/stock/main.go + +# compressing binary files +#cd / +#upx -9 stock + + +# building images with binary +FROM alpine:latest +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# set the time zone to Shanghai +RUN apk add tzdata \ + && cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && echo "Asia/Shanghai" > /etc/timezone \ + && apk del tzdata + +# add curl, used for http service checking, can be installed without it if deployed in k8s +RUN apk add curl + +COPY --from=build /stock /app/stock +COPY --from=build /go/src/stock/configs/stock.yml /app/configs/stock.yml + +# http port +EXPOSE 8080 + + +WORKDIR /app + +CMD ["./stock", "-c", "configs/stock.yml"] +# if you use the Configuration Center, stock.yml is changed to the Configuration Center configuration. +#CMD ["./stock", "-c", "configs/stock.yml", "-enable-cc"] diff --git a/_13_sponge-dtm-cache/http/scripts/build/Dockerfile_test b/_13_sponge-dtm-cache/http/scripts/build/Dockerfile_test new file mode 100644 index 0000000..c4f380f --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/build/Dockerfile_test @@ -0,0 +1,16 @@ +# Need to package the code first `tar zcf stock.tar.gz $(ls)` and move it to the same directory as Dokerfile +# rpc server source code, used to test rpc methods +FROM golang:1.21-alpine +MAINTAINER zhufuyi "g.zhufuyi@gmail.com" + +# go test dependency packages +RUN apk add bash alpine-sdk build-base gcc + +COPY . /go/src/stock +WORKDIR /go/src/stock +RUN tar zxf stock.tar.gz +RUN go env -w GOPROXY=https://goproxy.cn,direct +RUN go mod download +RUN rm -f stock.tar.gz + +CMD ["sleep","86400"] diff --git a/_13_sponge-dtm-cache/http/scripts/build/README.md b/_13_sponge-dtm-cache/http/scripts/build/README.md new file mode 100644 index 0000000..ba0f3e8 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/build/README.md @@ -0,0 +1,4 @@ + +- `Dockerfile`: build the image by directly copying the compiled binaries, fast build speed. +- `Dockerfile_build`: two-stage build of the image, slower build speed, you can specify the golang version. +- `Dockerfile_test`: container for testing rpc services. diff --git a/_13_sponge-dtm-cache/http/scripts/deploy-binary.sh b/_13_sponge-dtm-cache/http/scripts/deploy-binary.sh new file mode 100644 index 0000000..4d3cf46 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/deploy-binary.sh @@ -0,0 +1,35 @@ +#!/usr/bin/expect + +set serviceName "stock" + +# parameters +set username [lindex $argv 0] +set password [lindex $argv 1] +set hostname [lindex $argv 2] + +set timeout 30 + +spawn scp -r ./${serviceName}-binary.tar.gz ${username}@${hostname}:/tmp/ +#expect "*yes/no*" +#send "yes\r" +expect "*password:*" +send "${password}\r" +expect eof + +spawn ssh ${username}@${hostname} +#expect "*yes/no*" +#send "yes\r" +expect "*password:*" +send "${password}\r" + +# execute a command or script +expect "*${username}@*" +send "cd /tmp && tar zxvf ${serviceName}-binary.tar.gz\r" +expect "*${username}@*" +send "bash /tmp/${serviceName}-binary/deploy.sh\r" + +# logging out of a session +expect "*${username}@*" +send "exit\r" + +expect eof diff --git a/_13_sponge-dtm-cache/http/scripts/deploy-docker.sh b/_13_sponge-dtm-cache/http/scripts/deploy-docker.sh new file mode 100644 index 0000000..ef8f27a --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/deploy-docker.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +dockerComposeFilePath="deployments/docker-compose" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +mkdir -p ${dockerComposeFilePath}/configs +if [ ! -f "${dockerComposeFilePath}/configs/stock.yml" ];then + cp configs/stock.yml ${dockerComposeFilePath}/configs +fi + +# shellcheck disable=SC2164 +cd ${dockerComposeFilePath} + +docker-compose down +checkResult $? + +docker-compose up -d +checkResult $? + +colorCyan='\033[1;36m' +highBright='\033[1m' +markEnd='\033[0m' + +echo "" +echo -e "run service successfully, if you want to stop the service, go into the ${highBright}${dockerComposeFilePath}${markEnd} directory and execute the command ${colorCyan}docker-compose down${markEnd}." +echo "" diff --git a/_13_sponge-dtm-cache/http/scripts/deploy-k8s.sh b/_13_sponge-dtm-cache/http/scripts/deploy-k8s.sh new file mode 100644 index 0000000..8a84082 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/deploy-k8s.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +SERVER_NAME="stock" +DEPLOY_FILE="deployments/kubernetes/${SERVER_NAME}-deployment.yml" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# Determining whether a file exists +if [ ! -f "${DEPLOY_FILE}" ];then + echo "Deployment file file ${DEPLOY_FILE} does not exist" + checkResult 1 +fi + +# Check if you are authorised to operate k8s +echo "kubectl version" +kubectl version +checkResult $? + +echo "kubectl delete -f ${DEPLOY_FILE} --ignore-not-found" +kubectl delete -f ${DEPLOY_FILE} --ignore-not-found +checkResult $? + +sleep 1 + +echo "kubectl apply -f ${DEPLOY_FILE}" +kubectl apply -f ${DEPLOY_FILE} diff --git a/_13_sponge-dtm-cache/http/scripts/image-build-local.sh b/_13_sponge-dtm-cache/http/scripts/image-build-local.sh new file mode 100644 index 0000000..68cd438 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/image-build-local.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +# build the image for local docker, using the binaries, if you want to reduce the size of the image, +# use upx to compress the binaries before building the image. + +serverName="stock" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/stock" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" + +mv -f cmd/${serverName}/${serverName} ${DOCKERFILE_PATH}/${serverName} + +# compressing binary file +#cd ${DOCKERFILE_PATH} +#upx -9 ${serverName} +#cd - + +mkdir -p ${DOCKERFILE_PATH}/configs && cp -f configs/${serverName}.yml ${DOCKERFILE_PATH}/configs/ +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME}:latest ${DOCKERFILE_PATH}" +docker build -f ${DOCKERFILE} -t ${IMAGE_NAME}:latest ${DOCKERFILE_PATH} + + +if [ -f "${DOCKERFILE_PATH}/${serverName}" ]; then + rm -f ${DOCKERFILE_PATH}/${serverName} +fi + +if [ -d "${DOCKERFILE_PATH}/configs" ]; then + rm -rf ${DOCKERFILE_PATH}/configs +fi + +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 diff --git a/_13_sponge-dtm-cache/http/scripts/image-build.sh b/_13_sponge-dtm-cache/http/scripts/image-build.sh new file mode 100644 index 0000000..7460f49 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/image-build.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +# build the docker image using the binaries, if you want to reduce the size of the image, +# use upx to compress the binaries before building the image. + +serverName="stock" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/stock" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-build.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +# binary executable files +BIN_FILE="cmd/${serverName}/${serverName}" +# configuration file directory +CONFIG_PATH="configs" + +CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o ${BIN_FILE} cmd/${serverName}/*.go +mv -f ${BIN_FILE} ${DOCKERFILE_PATH} +mkdir -p ${DOCKERFILE_PATH}/${CONFIG_PATH} && cp -f ${CONFIG_PATH}/${serverName}.yml ${DOCKERFILE_PATH}/${CONFIG_PATH} + +# compressing binary file +#cd ${DOCKERFILE_PATH} +#upx -9 ${serverName} +#cd - + +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} + + +if [ -f "${DOCKERFILE_PATH}/${serverName}" ]; then + rm -f ${DOCKERFILE_PATH}/${serverName} +fi + +if [ -d "${DOCKERFILE_PATH}/configs" ]; then + rm -rf ${DOCKERFILE_PATH}/configs +fi + +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 diff --git a/_13_sponge-dtm-cache/http/scripts/image-build2.sh b/_13_sponge-dtm-cache/http/scripts/image-build2.sh new file mode 100644 index 0000000..53f78aa --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/image-build2.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +# two-stage build docker image + +serverName="stock" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/stock" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_build" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-build.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +PROJECT_FILES=$(ls) +tar zcf ${serverName}.tar.gz ${PROJECT_FILES} +mv -f ${serverName}.tar.gz ${DOCKERFILE_PATH} +echo "docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} +rm -rf ${DOCKERFILE_PATH}/${serverName}.tar.gz +# delete none image +noneImages=$(docker images | grep "" | awk '{print $3}') +if [ "X${noneImages}" != "X" ]; then + docker rmi ${noneImages} > /dev/null +fi +exit 0 + diff --git a/_13_sponge-dtm-cache/http/scripts/image-push.sh b/_13_sponge-dtm-cache/http/scripts/image-push.sh new file mode 100644 index 0000000..72e37d0 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/image-push.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +# image name, prohibit uppercase letters in names. +IMAGE_NAME="eshop/stock" + +# image repo address, passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-push.sh hub.docker.com v1.0.0" + exit 1 +fi + +# version tag, passed in via the second parameter, if empty, defaults to latest +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# image repository host, https://index.docker.io/v1 is the official docker image repository +IMAGE_REPO_HOST="https://index.docker.io/v1" +# check if you are authorized to log into docker +function checkLogin() { + loginStatus=$(cat /root/.docker/config.json | grep "${IMAGE_REPO_HOST}") + if [ "X${loginStatus}" = "X" ];then + echo "docker is not logged into the image repository" + checkResult 1 + fi +} + +checkLogin + +# push image to image repository +echo "docker push ${IMAGE_NAME_TAG}" +docker push ${IMAGE_NAME_TAG} +checkResult $? +echo "docker push image success." + +sleep 1 + +# delete image +echo "docker rmi -f ${IMAGE_NAME_TAG}" +docker rmi -f ${IMAGE_NAME_TAG} +checkResult $? +echo "docker remove image success." diff --git a/_13_sponge-dtm-cache/http/scripts/image-rpc-test.sh b/_13_sponge-dtm-cache/http/scripts/image-rpc-test.sh new file mode 100644 index 0000000..b2e4e52 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/image-rpc-test.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# build rpc service test image + +serverName="stock" +# image name of the service, prohibit uppercase letters in names. +IMAGE_NAME="eshop/stock.rpc-test" +# Dockerfile file directory +DOCKERFILE_PATH="scripts/build" +DOCKERFILE="${DOCKERFILE_PATH}/Dockerfile_test" + +# image repo address, REPO_HOST="ip or domain", passed in via the first parameter +REPO_HOST=$1 +if [ "X${REPO_HOST}" = "X" ];then + echo "param 'repo host' cannot be empty, example: ./image-rpc-test.sh hub.docker.com v1.0.0" + exit 1 +fi +# the version tag, which defaults to latest if empty, is passed in via the second parameter +TAG=$2 +if [ "X${TAG}" = "X" ];then + TAG="latest" +fi +# image name and tag +IMAGE_NAME_TAG="${REPO_HOST}/${IMAGE_NAME}:${TAG}" + +PROJECT_FILES=$(ls) +tar zcf ${serverName}.tar.gz ${PROJECT_FILES} +mv -f ${serverName}.tar.gz ${DOCKERFILE_PATH} + +echo "docker build -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH}" +docker build --force-rm -f ${DOCKERFILE} -t ${IMAGE_NAME_TAG} ${DOCKERFILE_PATH} + +rm -rf ${DOCKERFILE_PATH}/${serverName}.tar.gz diff --git a/_13_sponge-dtm-cache/http/scripts/patch.sh b/_13_sponge-dtm-cache/http/scripts/patch.sh new file mode 100644 index 0000000..af7bbde --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/patch.sh @@ -0,0 +1,75 @@ +#!/bin/bash + +patchType=$1 +typesPb="types-pb" +initMysql="init-mysql" +initMongodb="init-mongodb" +initTidb="init-tidb" +initPostgresql="init-postgresql" +initSqlite="init-sqlite" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +function importPkg() { + go mod tidy +} + +function generateTypesPbCode() { + sponge patch gen-types-pb --out=./ + checkResult $? +} + +function generateInitMysqlCode() { + sponge patch gen-db-init --db-driver=mysql --out=./ + checkResult $? + importPkg +} + +function generateInitMongodbCode() { + sponge patch gen-db-init --db-driver=mongodb --out=./ + checkResult $? + importPkg +} + +function generateInitTidbCode() { + sponge patch gen-db-init --db-driver=tidb --out=./ + checkResult $? + importPkg +} + +function generateInitPostgresqlCode() { + sponge patch gen-db-init --db-driver=postgresql --out=./ + checkResult $? + importPkg +} + +function generateInitSqliteCode() { + sponge patch gen-db-init --db-driver=sqlite --out=./ + checkResult $? + importPkg +} + +if [ "$patchType" = "$typesPb" ]; then + generateTypesPbCode +elif [ "$patchType" = "$initMysql" ]; then + generateInitMysqlCode +elif [ "$patchType" = "$initMongodb" ]; then + generateInitMongodbCode +elif [ "$patchType" = "$initTidb" ]; then + generateInitTidbCode +elif [ "$patchType" = "$initPostgresql" ]; then + generateInitPostgresqlCode +elif [ "$patchType" = "$initSqlite" ]; then + generateInitSqliteCode +else + echo "invalid patch type: '$patchType'" + echo "supported types: $initMysql, $initMongodb, $initTidb, $initPostgresql, $initSqlite, $typesPb" + echo "e.g. make patch TYPE=init-mysql" + echo "" + exit 1 +fi diff --git a/_13_sponge-dtm-cache/http/scripts/proto-doc.sh b/_13_sponge-dtm-cache/http/scripts/proto-doc.sh new file mode 100644 index 0000000..2dc5c53 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/proto-doc.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +# the directory where the proto files are located +protoBasePath="api" +allProtoFiles="" + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +function listFiles(){ + cd $1 + items=$(ls) + + for item in $items; do + if [ -d "$item" ]; then + listFiles $item + else + if [ "${item#*.}"x = "proto"x ];then + file=$(pwd)/${item} + protoFile="${protoBasePath}${file#*${protoBasePath}}" + allProtoFiles="${allProtoFiles} ${protoFile}" + fi + fi + done + cd .. +} + +# get all proto file paths +listFiles $protoBasePath + +protoc --proto_path=. --proto_path=./third_party \ + --doc_out=. --doc_opt=html,apis.html \ + $allProtoFiles + +checkResult $? + +mv -f apis.html docs/apis.html + +echo "generate proto doc file successfully, view in docs/apis.html" diff --git a/b_sponge-dtm-msg/scripts/protoc.sh b/_13_sponge-dtm-cache/http/scripts/protoc.sh similarity index 82% rename from b_sponge-dtm-msg/scripts/protoc.sh rename to _13_sponge-dtm-cache/http/scripts/protoc.sh index e123e11..fb196f8 100644 --- a/b_sponge-dtm-msg/scripts/protoc.sh +++ b/_13_sponge-dtm-cache/http/scripts/protoc.sh @@ -107,12 +107,6 @@ function generateByAllProto(){ checkResult $? - # generate files *_grpc_pb.go - protoc --proto_path=. --proto_path=./third_party \ - --go-grpc_out=. --go-grpc_opt=paths=source_relative \ - $allProtoFiles - - checkResult $? # generate the file *_pb.validate.go @@ -131,9 +125,9 @@ function generateByAllProto(){ } function generateBySpecifiedProto(){ - # get the proto file of the transfer server + # get the proto file of the stock server allProtoFiles="" - listProtoFiles ${protoBasePath}/transfer + listProtoFiles ${protoBasePath}/stock cd .. specifiedProtoFiles="" getSpecifiedProtoFiles @@ -154,27 +148,42 @@ function generateBySpecifiedProto(){ fi echo "generate template code by proto files: $specifiedProtoFiles" + # generate the swagger document and merge all files into docs/apis.swagger.json + protoc --proto_path=. --proto_path=./third_party \ + --openapiv2_out=. --openapiv2_opt=logtostderr=true --openapiv2_opt=allow_merge=true --openapiv2_opt=merge_file_name=docs/apis.json \ + $specifiedProtoFiles + + checkResult $? + + sponge web swagger --file=docs/apis.swagger.json + checkResult $? + moduleName=$(cat docs/gen.info | head -1 | cut -d , -f 1) serverName=$(cat docs/gen.info | head -1 | cut -d , -f 2) - # Generate 2 files, a logic code template file *.go (default save path in internal/service), a return error code template file *_rpc.go (default save path in internal/ecode) + suitedMonoRepo=$(cat docs/gen.info | head -1 | cut -d , -f 3) + protoc --proto_path=. --proto_path=./third_party \ - --go-rpc-tmpl_out=. --go-rpc-tmpl_opt=paths=source_relative \ - --go-rpc-tmpl_opt=moduleName=${moduleName} --go-rpc-tmpl_opt=serverName=${serverName} \ + --go-gin_out=. --go-gin_opt=paths=source_relative --go-gin_opt=plugin=handler \ + --go-gin_opt=moduleName=${moduleName} --go-gin_opt=serverName=${serverName} --go-gin_opt=suitedMonoRepo=${suitedMonoRepo} \ $specifiedProtoFiles checkResult $? - sponge merge rpc-pb + sponge merge http-pb checkResult $? - colorCyan='\e[1;36m' - highBright='\e[1m' - markEnd='\e[0m' + colorCyan='\033[1;36m' + highBright='\033[1m' + markEnd='\033[0m' echo "" - echo -e "${highBright}Tip:${markEnd} execute the command ${colorCyan}make run${markEnd} and then test grpc method is in the file ${colorCyan}internal/service/xxx_client_test.go${markEnd}." + echo -e "${highBright}Tip:${markEnd} execute the command ${colorCyan}make run${markEnd} and then visit ${colorCyan}http://localhost:8080/apis/swagger/index.html${markEnd} in your browser. " echo "" + + if [ "$suitedMonoRepo" == "true" ]; then + sponge patch adapt-mono-repo + fi } # generate pb.go by all proto files @@ -193,8 +202,5 @@ sponge patch del-omitempty --dir=$protoBasePath --suffix-name=pb.go > /dev/null sponge patch modify-dup-num --dir=internal/ecode sponge patch modify-dup-err-code --dir=internal/ecode -go mod tidy -checkResult $? - echo "generated code successfully." echo "" diff --git a/_13_sponge-dtm-cache/http/scripts/run-nohup.sh b/_13_sponge-dtm-cache/http/scripts/run-nohup.sh new file mode 100644 index 0000000..d0b97e2 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/run-nohup.sh @@ -0,0 +1,61 @@ +#!/bin/bash + +# chkconfig: - 85 15 +# description: stock + +serverName="stock" +cmdStr="cmd/${serverName}/${serverName}" + + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +stopService(){ + NAME=$1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + for id in $ID + do + kill -9 $id + echo "Stopped ${NAME} service successfully, process ID=${ID}" + done + fi +} + +startService() { + NAME=$1 + + if [ -f "${NAME}" ] ;then + rm "${NAME}" + fi + sleep 0.2 + go build -o ${cmdStr} cmd/${NAME}/main.go + checkResult $? + + nohup ${cmdStr} > ${NAME}.log 2>&1 & + sleep 1 + + ID=`ps -ef | grep "$NAME" | grep -v "$0" | grep -v "grep" | awk '{print $2}'` + if [ -n "$ID" ]; then + echo "Start the ${NAME} service successfully, process ID=${ID}" + else + echo "Failed to start ${NAME} service" + return 1 + fi + return 0 +} + + +stopService ${serverName} +if [ "$1"x != "stop"x ] ;then + sleep 1 + startService ${serverName} + checkResult $? +else + echo "Service ${serverName} has stopped" +fi diff --git a/b_sponge-dtm-msg/scripts/run.sh b/_13_sponge-dtm-cache/http/scripts/run.sh similarity index 93% rename from b_sponge-dtm-msg/scripts/run.sh rename to _13_sponge-dtm-cache/http/scripts/run.sh index 2057622..ff1d61e 100644 --- a/b_sponge-dtm-msg/scripts/run.sh +++ b/_13_sponge-dtm-cache/http/scripts/run.sh @@ -1,6 +1,6 @@ #!/bin/bash -serverName="transfer" +serverName="stock" binaryFile="cmd/${serverName}/${serverName}" diff --git a/_13_sponge-dtm-cache/http/scripts/swag-docs.sh b/_13_sponge-dtm-cache/http/scripts/swag-docs.sh new file mode 100644 index 0000000..5154b70 --- /dev/null +++ b/_13_sponge-dtm-cache/http/scripts/swag-docs.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +HOST_ADDR=$1 + +function checkResult() { + result=$1 + if [ ${result} -ne 0 ]; then + exit ${result} + fi +} + +# change host addr +if [ "X${HOST_ADDR}" = "X" ];then + HOST_ADDR=$(cat cmd/stock/main.go | grep "@host" | awk '{print $3}') + HOST_ADDR=$(echo ${HOST_ADDR} | cut -d ':' -f 1) +else + sed -i "s/@host .*:8080/@host ${HOST_ADDR}:8080/g" cmd/stock/main.go +fi + +# generate api docs +swag init -g cmd/stock/main.go +checkResult $? + +# modify duplicate numbers and error codes +sponge patch modify-dup-num --dir=internal/ecode +sponge patch modify-dup-err-code --dir=internal/ecode + +colorCyan='\033[1;36m' +highBright='\033[1m' +markEnd='\033[0m' + +echo "" +echo -e "${highBright}Tip:${markEnd} execute the command ${colorCyan}make run${markEnd} and then visit ${colorCyan}http://${HOST_ADDR}:8080/swagger/index.html${markEnd} in your browser." +echo "" +echo "generated api docs successfully." +echo "" diff --git a/_13_sponge-dtm-cache/http/test/stock.sql b/_13_sponge-dtm-cache/http/test/stock.sql new file mode 100644 index 0000000..0866d4f --- /dev/null +++ b/_13_sponge-dtm-cache/http/test/stock.sql @@ -0,0 +1,18 @@ +CREATE DATABASE IF NOT EXISTS eshop_stock DEFAULT CHARSET utf8mb4 COLLATE utf8mb4_unicode_ci; + +create table eshop_stock.stock +( + id bigint unsigned auto_increment + primary key, + product_id bigint unsigned not null comment '商品id', + stock int(11) unsigned not null comment '库存', + created_at datetime null, + updated_at datetime null, + deleted_at datetime null, + constraint product_id + unique (product_id) +) + charset = utf8mb4; + +create index deleted_at + on eshop_stock.stock (deleted_at); diff --git a/_13_sponge-dtm-cache/http/third_party/gogo/protobuf/gogoproto/gogo.proto b/_13_sponge-dtm-cache/http/third_party/gogo/protobuf/gogoproto/gogo.proto new file mode 100644 index 0000000..b80c856 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/gogo/protobuf/gogoproto/gogo.proto @@ -0,0 +1,144 @@ +// Protocol Buffers for Go with Gadgets +// +// Copyright (c) 2013, The GoGo Authors. All rights reserved. +// http://github.com/gogo/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; +package gogoproto; + +import "google/protobuf/descriptor.proto"; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "GoGoProtos"; +option go_package = "github.com/gogo/protobuf/gogoproto"; + +extend google.protobuf.EnumOptions { + optional bool goproto_enum_prefix = 62001; + optional bool goproto_enum_stringer = 62021; + optional bool enum_stringer = 62022; + optional string enum_customname = 62023; + optional bool enumdecl = 62024; +} + +extend google.protobuf.EnumValueOptions { + optional string enumvalue_customname = 66001; +} + +extend google.protobuf.FileOptions { + optional bool goproto_getters_all = 63001; + optional bool goproto_enum_prefix_all = 63002; + optional bool goproto_stringer_all = 63003; + optional bool verbose_equal_all = 63004; + optional bool face_all = 63005; + optional bool gostring_all = 63006; + optional bool populate_all = 63007; + optional bool stringer_all = 63008; + optional bool onlyone_all = 63009; + + optional bool equal_all = 63013; + optional bool description_all = 63014; + optional bool testgen_all = 63015; + optional bool benchgen_all = 63016; + optional bool marshaler_all = 63017; + optional bool unmarshaler_all = 63018; + optional bool stable_marshaler_all = 63019; + + optional bool sizer_all = 63020; + + optional bool goproto_enum_stringer_all = 63021; + optional bool enum_stringer_all = 63022; + + optional bool unsafe_marshaler_all = 63023; + optional bool unsafe_unmarshaler_all = 63024; + + optional bool goproto_extensions_map_all = 63025; + optional bool goproto_unrecognized_all = 63026; + optional bool gogoproto_import = 63027; + optional bool protosizer_all = 63028; + optional bool compare_all = 63029; + optional bool typedecl_all = 63030; + optional bool enumdecl_all = 63031; + + optional bool goproto_registration = 63032; + optional bool messagename_all = 63033; + + optional bool goproto_sizecache_all = 63034; + optional bool goproto_unkeyed_all = 63035; +} + +extend google.protobuf.MessageOptions { + optional bool goproto_getters = 64001; + optional bool goproto_stringer = 64003; + optional bool verbose_equal = 64004; + optional bool face = 64005; + optional bool gostring = 64006; + optional bool populate = 64007; + optional bool stringer = 67008; + optional bool onlyone = 64009; + + optional bool equal = 64013; + optional bool description = 64014; + optional bool testgen = 64015; + optional bool benchgen = 64016; + optional bool marshaler = 64017; + optional bool unmarshaler = 64018; + optional bool stable_marshaler = 64019; + + optional bool sizer = 64020; + + optional bool unsafe_marshaler = 64023; + optional bool unsafe_unmarshaler = 64024; + + optional bool goproto_extensions_map = 64025; + optional bool goproto_unrecognized = 64026; + + optional bool protosizer = 64028; + optional bool compare = 64029; + + optional bool typedecl = 64030; + + optional bool messagename = 64033; + + optional bool goproto_sizecache = 64034; + optional bool goproto_unkeyed = 64035; +} + +extend google.protobuf.FieldOptions { + optional bool nullable = 65001; + optional bool embed = 65002; + optional string customtype = 65003; + optional string customname = 65004; + optional string jsontag = 65005; + optional string moretags = 65006; + optional string casttype = 65007; + optional string castkey = 65008; + optional string castvalue = 65009; + + optional bool stdtime = 65010; + optional bool stdduration = 65011; + optional bool wktpointer = 65012; + +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/BUILD.bazel b/_13_sponge-dtm-cache/http/third_party/google/api/BUILD.bazel new file mode 100644 index 0000000..ec63010 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/BUILD.bazel @@ -0,0 +1,671 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") + +# This is an API workspace, having public visibility by default makes perfect sense. +package(default_visibility = ["//visibility:public"]) + +############################################################################## +# Common +############################################################################## +proto_library( + name = "annotations_proto", + srcs = ["annotations.proto"], + deps = [ + ":http_proto", + "@com_google_protobuf//:descriptor_proto", + ], +) + +proto_library( + name = "auth_proto", + srcs = ["auth.proto"], + deps = [":annotations_proto"], +) + +proto_library( + name = "backend_proto", + srcs = ["backend.proto"], + visibility = ["//visibility:public"], +) + +proto_library( + name = "billing_proto", + srcs = ["billing.proto"], + deps = [ + ":annotations_proto", + ":metric_proto", + ], +) + +proto_library( + name = "client_proto", + srcs = ["client.proto"], + deps = [ + "@com_google_protobuf//:descriptor_proto", + ], +) + +proto_library( + name = "config_change_proto", + srcs = ["config_change.proto"], + visibility = ["//visibility:public"], +) + +proto_library( + name = "consumer_proto", + srcs = ["consumer.proto"], + visibility = ["//visibility:public"], +) + +proto_library( + name = "context_proto", + srcs = ["context.proto"], + visibility = ["//visibility:public"], +) + +proto_library( + name = "control_proto", + srcs = ["control.proto"], + visibility = ["//visibility:public"], +) + +proto_library( + name = "distribution_proto", + srcs = ["distribution.proto"], + deps = [ + ":annotations_proto", + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:timestamp_proto", + ], +) + +proto_library( + name = "documentation_proto", + srcs = ["documentation.proto"], + visibility = ["//visibility:public"], +) + +proto_library( + name = "endpoint_proto", + srcs = ["endpoint.proto"], + deps = [":annotations_proto"], +) + +proto_library( + name = "field_behavior_proto", + srcs = ["field_behavior.proto"], + deps = [ + "@com_google_protobuf//:descriptor_proto", + ], +) + +proto_library( + name = "http_proto", + srcs = ["http.proto"], + visibility = ["//visibility:public"], +) + +proto_library( + name = "httpbody_proto", + srcs = ["httpbody.proto"], + deps = ["@com_google_protobuf//:any_proto"], +) + +proto_library( + name = "label_proto", + srcs = ["label.proto"], + visibility = ["//visibility:public"], +) + +proto_library( + name = "launch_stage_proto", + srcs = ["launch_stage.proto"], +) + +proto_library( + name = "log_proto", + srcs = ["log.proto"], + deps = [":label_proto"], +) + +proto_library( + name = "logging_proto", + srcs = ["logging.proto"], + deps = [ + ":annotations_proto", + ":label_proto", + ], +) + +proto_library( + name = "metric_proto", + srcs = ["metric.proto"], + deps = [ + ":label_proto", + ":launch_stage_proto", + "@com_google_protobuf//:duration_proto", + ], +) + +proto_library( + name = "monitored_resource_proto", + srcs = ["monitored_resource.proto"], + deps = [ + ":label_proto", + ":launch_stage_proto", + "@com_google_protobuf//:struct_proto", + ], +) + +proto_library( + name = "monitoring_proto", + srcs = ["monitoring.proto"], + deps = [":annotations_proto"], +) + +proto_library( + name = "quota_proto", + srcs = ["quota.proto"], + deps = [":annotations_proto"], +) + +proto_library( + name = "resource_proto", + srcs = ["resource.proto"], + deps = [ + "@com_google_protobuf//:descriptor_proto", + ], +) + +proto_library( + name = "service_proto", + srcs = ["service.proto"], + deps = [ + ":annotations_proto", + ":auth_proto", + ":backend_proto", + ":billing_proto", + ":context_proto", + ":control_proto", + ":documentation_proto", + ":endpoint_proto", + ":http_proto", + ":label_proto", + ":log_proto", + ":logging_proto", + ":metric_proto", + ":monitored_resource_proto", + ":monitoring_proto", + ":quota_proto", + ":resource_proto", + ":source_info_proto", + ":system_parameter_proto", + ":usage_proto", + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:api_proto", + "@com_google_protobuf//:type_proto", + "@com_google_protobuf//:wrappers_proto", + ], +) + +proto_library( + name = "source_info_proto", + srcs = ["source_info.proto"], + deps = ["@com_google_protobuf//:any_proto"], +) + +proto_library( + name = "system_parameter_proto", + srcs = ["system_parameter.proto"], + visibility = ["//visibility:public"], +) + +proto_library( + name = "usage_proto", + srcs = ["usage.proto"], + deps = [":annotations_proto"], +) + +############################################################################## +# Java +############################################################################## +load("@com_google_googleapis_imports//:imports.bzl", "java_proto_library") + +java_proto_library( + name = "api_java_proto", + deps = [ + "annotations_proto", + "auth_proto", + "backend_proto", + "billing_proto", + "client_proto", + "config_change_proto", + "consumer_proto", + "context_proto", + "control_proto", + "distribution_proto", + "documentation_proto", + "endpoint_proto", + "field_behavior_proto", + "http_proto", + "httpbody_proto", + "label_proto", + "launch_stage_proto", + "log_proto", + "logging_proto", + "metric_proto", + "monitored_resource_proto", + "monitoring_proto", + "quota_proto", + "resource_proto", + "service_proto", + "source_info_proto", + "system_parameter_proto", + "usage_proto", + ], +) + +############################################################################## +# Go +############################################################################## +load("@com_google_googleapis_imports//:imports.bzl", "go_proto_library") + +go_proto_library( + name = "annotations_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/annotations", + protos = [ + ":annotations_proto", + ":http_proto", + ], +) + +go_proto_library( + name = "client_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/annotations;annotations", + protos = [":client_proto"], +) + +go_proto_library( + name = "configchange_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/configchange", + protos = [":config_change_proto"], +) + +go_proto_library( + name = "distribution_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/distribution", + protos = [":distribution_proto"], +) + +go_proto_library( + name = "field_behavior_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/annotations;annotations", + protos = [":field_behavior_proto"], +) + +go_proto_library( + name = "httpbody_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/httpbody", + protos = [":httpbody_proto"], +) + +go_proto_library( + name = "label_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/label", + protos = [":label_proto"], +) + +go_proto_library( + name = "api_go_proto", + importpath = "google.golang.org/genproto/googleapis/api", + protos = [ + ":launch_stage_proto", + ], + deps = [ + ":annotations_go_proto", + ], +) + +go_proto_library( + name = "metric_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/metric", + protos = [":metric_proto"], + deps = [ + ":api_go_proto", + ":label_go_proto", + ], +) + +go_proto_library( + name = "monitoredres_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/monitoredres", + protos = [":monitored_resource_proto"], + deps = [ + ":api_go_proto", + ":label_go_proto", + ], +) + +go_proto_library( + name = "resource_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/annotations;annotations", + protos = [":resource_proto"], +) + +go_proto_library( + name = "serviceconfig_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/serviceconfig", + protos = [ + ":auth_proto", + ":backend_proto", + ":billing_proto", + ":context_proto", + ":control_proto", + ":documentation_proto", + ":endpoint_proto", + ":log_proto", + ":logging_proto", + ":monitoring_proto", + ":quota_proto", + ":service_proto", + ":source_info_proto", + ":system_parameter_proto", + ":usage_proto", + ], + deps = [ + ":annotations_go_proto", + ":api_go_proto", + ":label_go_proto", + ":metric_go_proto", + ":monitoredres_go_proto", + ], +) + +############################################################################## +# C++ +############################################################################## +load("@com_google_googleapis_imports//:imports.bzl", "cc_proto_library") + +cc_proto_library( + name = "annotations_cc_proto", + deps = [":annotations_proto"], +) + +cc_proto_library( + name = "auth_cc_proto", + deps = [":auth_proto"], +) + +cc_proto_library( + name = "backend_cc_proto", + deps = [":backend_proto"], +) + +cc_proto_library( + name = "billing_cc_proto", + deps = [":billing_proto"], +) + +cc_proto_library( + name = "client_cc_proto", + deps = [":client_proto"], +) + +cc_proto_library( + name = "config_change_cc_proto", + deps = [":config_change_proto"], +) + +cc_proto_library( + name = "consumer_cc_proto", + deps = [":consumer_proto"], +) + +cc_proto_library( + name = "context_cc_proto", + deps = [":context_proto"], +) + +cc_proto_library( + name = "control_cc_proto", + deps = [":control_proto"], +) + +cc_proto_library( + name = "distribution_cc_proto", + deps = [":distribution_proto"], +) + +cc_proto_library( + name = "documentation_cc_proto", + deps = [":documentation_proto"], +) + +cc_proto_library( + name = "endpoint_cc_proto", + deps = [":endpoint_proto"], +) + +cc_proto_library( + name = "field_behavior_cc_proto", + deps = [":field_behavior_proto"], +) + +cc_proto_library( + name = "http_cc_proto", + deps = [":http_proto"], +) + +cc_proto_library( + name = "httpbody_cc_proto", + deps = [":httpbody_proto"], +) + +cc_proto_library( + name = "label_cc_proto", + deps = [":label_proto"], +) + +cc_proto_library( + name = "launch_stage_cc_proto", + deps = [":launch_stage_proto"], +) + +cc_proto_library( + name = "log_cc_proto", + deps = [":log_proto"], +) + +cc_proto_library( + name = "logging_cc_proto", + deps = [":logging_proto"], +) + +cc_proto_library( + name = "metric_cc_proto", + deps = [":metric_proto"], +) + +cc_proto_library( + name = "monitored_resource_cc_proto", + deps = [":monitored_resource_proto"], +) + +cc_proto_library( + name = "monitoring_cc_proto", + deps = ["monitoring_proto"], +) + +cc_proto_library( + name = "quota_cc_proto", + deps = ["quota_proto"], +) + +cc_proto_library( + name = "resource_cc_proto", + deps = [":resource_proto"], +) + +cc_proto_library( + name = "service_cc_proto", + deps = [":service_proto"], +) + +cc_proto_library( + name = "source_info_cc_proto", + deps = [":source_info_proto"], +) + +cc_proto_library( + name = "system_parameter_cc_proto", + deps = [":system_parameter_proto"], +) + +cc_proto_library( + name = "usage_cc_proto", + deps = [":usage_proto"], +) + +############################################################################## +# Python +############################################################################## +load("@com_google_googleapis_imports//:imports.bzl", "py_proto_library") + +py_proto_library( + name = "annotations_py_proto", + deps = [":annotations_proto"], +) + +py_proto_library( + name = "auth_py_proto", + deps = [":auth_proto"], +) + +py_proto_library( + name = "backend_py_proto", + deps = [":backend_proto"], +) + +py_proto_library( + name = "billing_py_proto", + deps = [":billing_proto"], +) + +py_proto_library( + name = "client_py_proto", + deps = [":client_proto"], +) + +py_proto_library( + name = "config_change_py_proto", + deps = [":config_change_proto"], +) + +py_proto_library( + name = "consumer_py_proto", + deps = [":consumer_proto"], +) + +py_proto_library( + name = "context_py_proto", + deps = [":context_proto"], +) + +py_proto_library( + name = "control_py_proto", + deps = [":control_proto"], +) + +py_proto_library( + name = "distribution_py_proto", + deps = [":distribution_proto"], +) + +py_proto_library( + name = "documentation_py_proto", + deps = [":documentation_proto"], +) + +py_proto_library( + name = "endpoint_py_proto", + deps = [":endpoint_proto"], +) + +py_proto_library( + name = "field_behavior_py_proto", + deps = [":field_behavior_proto"], +) + +py_proto_library( + name = "http_py_proto", + deps = [":http_proto"], +) + +py_proto_library( + name = "httpbody_py_proto", + deps = [":httpbody_proto"], +) + +py_proto_library( + name = "label_py_proto", + deps = [":label_proto"], +) + +py_proto_library( + name = "launch_stage_py_proto", + deps = [":launch_stage_proto"], +) + +py_proto_library( + name = "log_py_proto", + deps = [":log_proto"], +) + +py_proto_library( + name = "logging_py_proto", + deps = [":logging_proto"], +) + +py_proto_library( + name = "metric_py_proto", + deps = [":metric_proto"], +) + +py_proto_library( + name = "monitored_resource_py_proto", + deps = [":monitored_resource_proto"], +) + +py_proto_library( + name = "monitoring_py_proto", + deps = ["monitoring_proto"], +) + +py_proto_library( + name = "quota_py_proto", + deps = ["quota_proto"], +) + +py_proto_library( + name = "resource_py_proto", + deps = [":resource_proto"], +) + +py_proto_library( + name = "service_py_proto", + deps = [":service_proto"], +) + +py_proto_library( + name = "source_info_py_proto", + deps = [":source_info_proto"], +) + +py_proto_library( + name = "system_parameter_py_proto", + deps = [":system_parameter_proto"], +) + +py_proto_library( + name = "usage_py_proto", + deps = [":usage_proto"], +) diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/README.md b/_13_sponge-dtm-cache/http/third_party/google/api/README.md new file mode 100644 index 0000000..eafe588 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/README.md @@ -0,0 +1,5 @@ +This folder contains the schema of the configuration model for the API services +platform. + +**Note**: Protos under this directory are in Alpha status, and therefore are +subject to breaking changes. diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/annotations.proto b/_13_sponge-dtm-cache/http/third_party/google/api/annotations.proto new file mode 100644 index 0000000..85c361b --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/annotations.proto @@ -0,0 +1,31 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/http.proto"; +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See `HttpRule`. + HttpRule http = 72295728; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/auth.proto b/_13_sponge-dtm-cache/http/third_party/google/api/auth.proto new file mode 100644 index 0000000..3ff0eef --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/auth.proto @@ -0,0 +1,228 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "AuthProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Authentication` defines the authentication configuration for an API. +// +// Example for an API targeted for external use: +// +// name: calendar.googleapis.com +// authentication: +// providers: +// - id: google_calendar_auth +// jwks_uri: https://www.googleapis.com/oauth2/v1/certs +// issuer: https://securetoken.google.com +// rules: +// - selector: "*" +// requirements: +// provider_id: google_calendar_auth +message Authentication { + // A list of authentication rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated AuthenticationRule rules = 3; + + // Defines a set of authentication providers that a service supports. + repeated AuthProvider providers = 4; +} + +// Authentication rules for the service. +// +// By default, if a method has any authentication requirements, every request +// must include a valid credential matching one of the requirements. +// It's an error to include more than one kind of credential in a single +// request. +// +// If a method doesn't have any auth requirements, request credentials will be +// ignored. +message AuthenticationRule { + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // The requirements for OAuth credentials. + OAuthRequirements oauth = 2; + + // If true, the service accepts API keys without any other credential. + // This flag only applies to HTTP and gRPC requests. + bool allow_without_credential = 5; + + // Requirements for additional authentication providers. + repeated AuthRequirement requirements = 7; +} + +// Specifies a location to extract JWT from an API request. +message JwtLocation { + oneof in { + // Specifies HTTP header name to extract JWT token. + string header = 1; + + // Specifies URL query parameter name to extract JWT token. + string query = 2; + } + + // The value prefix. The value format is "value_prefix{token}" + // Only applies to "in" header type. Must be empty for "in" query type. + // If not empty, the header value has to match (case sensitive) this prefix. + // If not matched, JWT will not be extracted. If matched, JWT will be + // extracted after the prefix is removed. + // + // For example, for "Authorization: Bearer {JWT}", + // value_prefix="Bearer " with a space at the end. + string value_prefix = 3; +} + +// Configuration for an authentication provider, including support for +// [JSON Web Token +// (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32). +message AuthProvider { + // The unique identifier of the auth provider. It will be referred to by + // `AuthRequirement.provider_id`. + // + // Example: "bookstore_auth". + string id = 1; + + // Identifies the principal that issued the JWT. See + // https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.1 + // Usually a URL or an email address. + // + // Example: https://securetoken.google.com + // Example: 1234567-compute@developer.gserviceaccount.com + string issuer = 2; + + // URL of the provider's public key set to validate signature of the JWT. See + // [OpenID + // Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata). + // Optional if the key set document: + // - can be retrieved from + // [OpenID + // Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html of + // the issuer. + // - can be inferred from the email domain of the issuer (e.g. a Google + // service account). + // + // Example: https://www.googleapis.com/oauth2/v1/certs + string jwks_uri = 3; + + // The list of JWT + // [audiences](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.3). + // that are allowed to access. A JWT containing any of these audiences will + // be accepted. When this setting is absent, JWTs with audiences: + // - "https://[service.name]/[google.protobuf.Api.name]" + // - "https://[service.name]/" + // will be accepted. + // For example, if no audiences are in the setting, LibraryService API will + // accept JWTs with the following audiences: + // - + // https://library-example.googleapis.com/google.example.library.v1.LibraryService + // - https://library-example.googleapis.com/ + // + // Example: + // + // audiences: bookstore_android.apps.googleusercontent.com, + // bookstore_web.apps.googleusercontent.com + string audiences = 4; + + // Redirect URL if JWT token is required but not present or is expired. + // Implement authorizationUrl of securityDefinitions in OpenAPI spec. + string authorization_url = 5; + + // Defines the locations to extract the JWT. + // + // JWT locations can be either from HTTP headers or URL query parameters. + // The rule is that the first match wins. The checking order is: checking + // all headers first, then URL query parameters. + // + // If not specified, default to use following 3 locations: + // 1) Authorization: Bearer + // 2) x-goog-iap-jwt-assertion + // 3) access_token query parameter + // + // Default locations can be specified as followings: + // jwt_locations: + // - header: Authorization + // value_prefix: "Bearer " + // - header: x-goog-iap-jwt-assertion + // - query: access_token + repeated JwtLocation jwt_locations = 6; +} + +// OAuth scopes are a way to define data and permissions on data. For example, +// there are scopes defined for "Read-only access to Google Calendar" and +// "Access to Cloud Platform". Users can consent to a scope for an application, +// giving it permission to access that data on their behalf. +// +// OAuth scope specifications should be fairly coarse grained; a user will need +// to see and understand the text description of what your scope means. +// +// In most cases: use one or at most two OAuth scopes for an entire family of +// products. If your product has multiple APIs, you should probably be sharing +// the OAuth scope across all of those APIs. +// +// When you need finer grained OAuth consent screens: talk with your product +// management about how developers will use them in practice. +// +// Please note that even though each of the canonical scopes is enough for a +// request to be accepted and passed to the backend, a request can still fail +// due to the backend requiring additional scopes or permissions. +message OAuthRequirements { + // The list of publicly documented OAuth scopes that are allowed access. An + // OAuth token containing any of these scopes will be accepted. + // + // Example: + // + // canonical_scopes: https://www.googleapis.com/auth/calendar, + // https://www.googleapis.com/auth/calendar.read + string canonical_scopes = 1; +} + +// User-defined authentication requirements, including support for +// [JSON Web Token +// (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32). +message AuthRequirement { + // [id][google.api.AuthProvider.id] from authentication provider. + // + // Example: + // + // provider_id: bookstore_auth + string provider_id = 1; + + // NOTE: This will be deprecated soon, once AuthProvider.audiences is + // implemented and accepted in all the runtime components. + // + // The list of JWT + // [audiences](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.3). + // that are allowed to access. A JWT containing any of these audiences will + // be accepted. When this setting is absent, only JWTs with audience + // "https://[Service_name][google.api.Service.name]/[API_name][google.protobuf.Api.name]" + // will be accepted. For example, if no audiences are in the setting, + // LibraryService API will only accept JWTs with the following audience + // "https://library-example.googleapis.com/google.example.library.v1.LibraryService". + // + // Example: + // + // audiences: bookstore_android.apps.googleusercontent.com, + // bookstore_web.apps.googleusercontent.com + string audiences = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/backend.proto b/_13_sponge-dtm-cache/http/third_party/google/api/backend.proto new file mode 100644 index 0000000..729bccd --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/backend.proto @@ -0,0 +1,182 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "BackendProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Backend` defines the backend configuration for a service. +message Backend { + // A list of API backend rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated BackendRule rules = 1; +} + +// A backend rule provides configuration for an individual API element. +message BackendRule { + // Path Translation specifies how to combine the backend address with the + // request path in order to produce the appropriate forwarding URL for the + // request. + // + // Path Translation is applicable only to HTTP-based backends. Backends which + // do not accept requests over HTTP/HTTPS should leave `path_translation` + // unspecified. + enum PathTranslation { + PATH_TRANSLATION_UNSPECIFIED = 0; + + // Use the backend address as-is, with no modification to the path. If the + // URL pattern contains variables, the variable names and values will be + // appended to the query string. If a query string parameter and a URL + // pattern variable have the same name, this may result in duplicate keys in + // the query string. + // + // # Examples + // + // Given the following operation config: + // + // Method path: /api/company/{cid}/user/{uid} + // Backend address: https://example.cloudfunctions.net/getUser + // + // Requests to the following request paths will call the backend at the + // translated path: + // + // Request path: /api/company/widgetworks/user/johndoe + // Translated: + // https://example.cloudfunctions.net/getUser?cid=widgetworks&uid=johndoe + // + // Request path: /api/company/widgetworks/user/johndoe?timezone=EST + // Translated: + // https://example.cloudfunctions.net/getUser?timezone=EST&cid=widgetworks&uid=johndoe + CONSTANT_ADDRESS = 1; + + // The request path will be appended to the backend address. + // + // # Examples + // + // Given the following operation config: + // + // Method path: /api/company/{cid}/user/{uid} + // Backend address: https://example.appspot.com + // + // Requests to the following request paths will call the backend at the + // translated path: + // + // Request path: /api/company/widgetworks/user/johndoe + // Translated: + // https://example.appspot.com/api/company/widgetworks/user/johndoe + // + // Request path: /api/company/widgetworks/user/johndoe?timezone=EST + // Translated: + // https://example.appspot.com/api/company/widgetworks/user/johndoe?timezone=EST + APPEND_PATH_TO_ADDRESS = 2; + } + + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // The address of the API backend. + // + // The scheme is used to determine the backend protocol and security. + // The following schemes are accepted: + // + // SCHEME PROTOCOL SECURITY + // http:// HTTP None + // https:// HTTP TLS + // grpc:// gRPC None + // grpcs:// gRPC TLS + // + // It is recommended to explicitly include a scheme. Leaving out the scheme + // may cause constrasting behaviors across platforms. + // + // If the port is unspecified, the default is: + // - 80 for schemes without TLS + // - 443 for schemes with TLS + // + // For HTTP backends, use [protocol][google.api.BackendRule.protocol] + // to specify the protocol version. + string address = 2; + + // The number of seconds to wait for a response from a request. The default + // varies based on the request protocol and deployment environment. + double deadline = 3; + + // Minimum deadline in seconds needed for this method. Calls having deadline + // value lower than this will be rejected. + double min_deadline = 4; + + // The number of seconds to wait for the completion of a long running + // operation. The default is no deadline. + double operation_deadline = 5; + + PathTranslation path_translation = 6; + + // Authentication settings used by the backend. + // + // These are typically used to provide service management functionality to + // a backend served on a publicly-routable URL. The `authentication` + // details should match the authentication behavior used by the backend. + // + // For example, specifying `jwt_audience` implies that the backend expects + // authentication via a JWT. + // + // When authentication is unspecified, the resulting behavior is the same + // as `disable_auth` set to `true`. + // + // Refer to https://developers.google.com/identity/protocols/OpenIDConnect for + // JWT ID token. + oneof authentication { + // The JWT audience is used when generating a JWT ID token for the backend. + // This ID token will be added in the HTTP "authorization" header, and sent + // to the backend. + string jwt_audience = 7; + + // When disable_auth is true, a JWT ID token won't be generated and the + // original "Authorization" HTTP header will be preserved. If the header is + // used to carry the original token and is expected by the backend, this + // field must be set to true to preserve the header. + bool disable_auth = 8; + } + + // The protocol used for sending a request to the backend. + // The supported values are "http/1.1" and "h2". + // + // The default value is inferred from the scheme in the + // [address][google.api.BackendRule.address] field: + // + // SCHEME PROTOCOL + // http:// http/1.1 + // https:// http/1.1 + // grpc:// h2 + // grpcs:// h2 + // + // For secure HTTP backends (https://) that support HTTP/2, set this field + // to "h2" for improved performance. + // + // Configuring this field to non-default values is only supported for secure + // HTTP backends. This field will be ignored for all other backends. + // + // See + // https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + // for more details on the supported values. + string protocol = 9; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/billing.proto b/_13_sponge-dtm-cache/http/third_party/google/api/billing.proto new file mode 100644 index 0000000..8e70388 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/billing.proto @@ -0,0 +1,77 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/metric.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "BillingProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Billing related configuration of the service. +// +// The following example shows how to configure monitored resources and metrics +// for billing, `consumer_destinations` is the only supported destination and +// the monitored resources need at least one label key +// `cloud.googleapis.com/location` to indicate the location of the billing +// usage, using different monitored resources between monitoring and billing is +// recommended so they can be evolved independently: +// +// +// monitored_resources: +// - type: library.googleapis.com/billing_branch +// labels: +// - key: cloud.googleapis.com/location +// description: | +// Predefined label to support billing location restriction. +// - key: city +// description: | +// Custom label to define the city where the library branch is located +// in. +// - key: name +// description: Custom label to define the name of the library branch. +// metrics: +// - name: library.googleapis.com/book/borrowed_count +// metric_kind: DELTA +// value_type: INT64 +// unit: "1" +// billing: +// consumer_destinations: +// - monitored_resource: library.googleapis.com/billing_branch +// metrics: +// - library.googleapis.com/book/borrowed_count +message Billing { + // Configuration of a specific billing destination (Currently only support + // bill against consumer project). + message BillingDestination { + // The monitored resource type. The type must be defined in + // [Service.monitored_resources][google.api.Service.monitored_resources] section. + string monitored_resource = 1; + + // Names of the metrics to report to this billing destination. + // Each name must be defined in [Service.metrics][google.api.Service.metrics] section. + repeated string metrics = 2; + } + + // Billing configurations for sending metrics to the consumer project. + // There can be multiple consumer destinations per service, each one must have + // a different monitored resource type. A metric can be used in at most + // one consumer destination. + repeated BillingDestination consumer_destinations = 8; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/client.proto b/_13_sponge-dtm-cache/http/third_party/google/api/client.proto new file mode 100644 index 0000000..2102623 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/client.proto @@ -0,0 +1,99 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "ClientProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // A definition of a client library method signature. + // + // In client libraries, each proto RPC corresponds to one or more methods + // which the end user is able to call, and calls the underlying RPC. + // Normally, this method receives a single argument (a struct or instance + // corresponding to the RPC request object). Defining this field will + // add one or more overloads providing flattened or simpler method signatures + // in some languages. + // + // The fields on the method signature are provided as a comma-separated + // string. + // + // For example, the proto RPC and annotation: + // + // rpc CreateSubscription(CreateSubscriptionRequest) + // returns (Subscription) { + // option (google.api.method_signature) = "name,topic"; + // } + // + // Would add the following Java overload (in addition to the method accepting + // the request object): + // + // public final Subscription createSubscription(String name, String topic) + // + // The following backwards-compatibility guidelines apply: + // + // * Adding this annotation to an unannotated method is backwards + // compatible. + // * Adding this annotation to a method which already has existing + // method signature annotations is backwards compatible if and only if + // the new method signature annotation is last in the sequence. + // * Modifying or removing an existing method signature annotation is + // a breaking change. + // * Re-ordering existing method signature annotations is a breaking + // change. + repeated string method_signature = 1051; +} + +extend google.protobuf.ServiceOptions { + // The hostname for this service. + // This should be specified with no prefix or protocol. + // + // Example: + // + // service Foo { + // option (google.api.default_host) = "foo.googleapi.com"; + // ... + // } + string default_host = 1049; + + // OAuth scopes needed for the client. + // + // Example: + // + // service Foo { + // option (google.api.oauth_scopes) = \ + // "https://www.googleapis.com/auth/cloud-platform"; + // ... + // } + // + // If there is more than one scope, use a comma-separated string: + // + // Example: + // + // service Foo { + // option (google.api.oauth_scopes) = \ + // "https://www.googleapis.com/auth/cloud-platform," + // "https://www.googleapis.com/auth/monitoring"; + // ... + // } + string oauth_scopes = 1050; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/config_change.proto b/_13_sponge-dtm-cache/http/third_party/google/api/config_change.proto new file mode 100644 index 0000000..953bb13 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/config_change.proto @@ -0,0 +1,84 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/configchange;configchange"; +option java_multiple_files = true; +option java_outer_classname = "ConfigChangeProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Output generated from semantically comparing two versions of a service +// configuration. +// +// Includes detailed information about a field that have changed with +// applicable advice about potential consequences for the change, such as +// backwards-incompatibility. +message ConfigChange { + // Object hierarchy path to the change, with levels separated by a '.' + // character. For repeated fields, an applicable unique identifier field is + // used for the index (usually selector, name, or id). For maps, the term + // 'key' is used. If the field has no unique identifier, the numeric index + // is used. + // Examples: + // - visibility.rules[selector=="google.LibraryService.ListBooks"].restriction + // - quota.metric_rules[selector=="google"].metric_costs[key=="reads"].value + // - logging.producer_destinations[0] + string element = 1; + + // Value of the changed object in the old Service configuration, + // in JSON format. This field will not be populated if ChangeType == ADDED. + string old_value = 2; + + // Value of the changed object in the new Service configuration, + // in JSON format. This field will not be populated if ChangeType == REMOVED. + string new_value = 3; + + // The type for this change, either ADDED, REMOVED, or MODIFIED. + ChangeType change_type = 4; + + // Collection of advice provided for this change, useful for determining the + // possible impact of this change. + repeated Advice advices = 5; +} + +// Generated advice about this change, used for providing more +// information about how a change will affect the existing service. +message Advice { + // Useful description for why this advice was applied and what actions should + // be taken to mitigate any implied risks. + string description = 2; +} + +// Classifies set of possible modifications to an object in the service +// configuration. +enum ChangeType { + // No value was provided. + CHANGE_TYPE_UNSPECIFIED = 0; + + // The changed object exists in the 'new' service configuration, but not + // in the 'old' service configuration. + ADDED = 1; + + // The changed object exists in the 'old' service configuration, but not + // in the 'new' service configuration. + REMOVED = 2; + + // The changed object exists in both service configurations, but its value + // is different. + MODIFIED = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/consumer.proto b/_13_sponge-dtm-cache/http/third_party/google/api/consumer.proto new file mode 100644 index 0000000..0facc2e --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/consumer.proto @@ -0,0 +1,82 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "ConsumerProto"; +option java_package = "com.google.api"; + +// A descriptor for defining project properties for a service. One service may +// have many consumer projects, and the service may want to behave differently +// depending on some properties on the project. For example, a project may be +// associated with a school, or a business, or a government agency, a business +// type property on the project may affect how a service responds to the client. +// This descriptor defines which properties are allowed to be set on a project. +// +// Example: +// +// project_properties: +// properties: +// - name: NO_WATERMARK +// type: BOOL +// description: Allows usage of the API without watermarks. +// - name: EXTENDED_TILE_CACHE_PERIOD +// type: INT64 +message ProjectProperties { + // List of per consumer project-specific properties. + repeated Property properties = 1; +} + +// Defines project properties. +// +// API services can define properties that can be assigned to consumer projects +// so that backends can perform response customization without having to make +// additional calls or maintain additional storage. For example, Maps API +// defines properties that controls map tile cache period, or whether to embed a +// watermark in a result. +// +// These values can be set via API producer console. Only API providers can +// define and set these properties. +message Property { + // Supported data type of the property values + enum PropertyType { + // The type is unspecified, and will result in an error. + UNSPECIFIED = 0; + + // The type is `int64`. + INT64 = 1; + + // The type is `bool`. + BOOL = 2; + + // The type is `string`. + STRING = 3; + + // The type is 'double'. + DOUBLE = 4; + } + + // The name of the property (a.k.a key). + string name = 1; + + // The type of this property. + PropertyType type = 2; + + // The description of the property + string description = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/context.proto b/_13_sponge-dtm-cache/http/third_party/google/api/context.proto new file mode 100644 index 0000000..20c919f --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/context.proto @@ -0,0 +1,89 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "ContextProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Context` defines which contexts an API requests. +// +// Example: +// +// context: +// rules: +// - selector: "*" +// requested: +// - google.rpc.context.ProjectContext +// - google.rpc.context.OriginContext +// +// The above specifies that all methods in the API request +// `google.rpc.context.ProjectContext` and +// `google.rpc.context.OriginContext`. +// +// Available context types are defined in package +// `google.rpc.context`. +// +// This also provides mechanism to allowlist any protobuf message extension that +// can be sent in grpc metadata using "x-goog-ext--bin" and +// "x-goog-ext--jspb" format. For example, list any service +// specific protobuf types that can appear in grpc metadata as follows in your +// yaml file: +// +// Example: +// +// context: +// rules: +// - selector: "google.example.library.v1.LibraryService.CreateBook" +// allowed_request_extensions: +// - google.foo.v1.NewExtension +// allowed_response_extensions: +// - google.foo.v1.NewExtension +// +// You can also specify extension ID instead of fully qualified extension name +// here. +message Context { + // A list of RPC context rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated ContextRule rules = 1; +} + +// A context rule provides information about the context for an individual API +// element. +message ContextRule { + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // A list of full type names of requested contexts. + repeated string requested = 2; + + // A list of full type names of provided contexts. + repeated string provided = 3; + + // A list of full type names or extension IDs of extensions allowed in grpc + // side channel from client to backend. + repeated string allowed_request_extensions = 4; + + // A list of full type names or extension IDs of extensions allowed in grpc + // side channel from backend to client. + repeated string allowed_response_extensions = 5; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/control.proto b/_13_sponge-dtm-cache/http/third_party/google/api/control.proto new file mode 100644 index 0000000..9fdc881 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/control.proto @@ -0,0 +1,32 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "ControlProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Selects and configures the service controller used by the service. The +// service controller handles features like abuse, quota, billing, logging, +// monitoring, etc. +message Control { + // The service control environment to use. If empty, no control plane + // feature (like quota and billing) will be enabled. + string environment = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/distribution.proto b/_13_sponge-dtm-cache/http/third_party/google/api/distribution.proto new file mode 100644 index 0000000..b6f55ce --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/distribution.proto @@ -0,0 +1,211 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/distribution;distribution"; +option java_multiple_files = true; +option java_outer_classname = "DistributionProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Distribution` contains summary statistics for a population of values. It +// optionally contains a histogram representing the distribution of those values +// across a set of buckets. +// +// The summary statistics are the count, mean, sum of the squared deviation from +// the mean, the minimum, and the maximum of the set of population of values. +// The histogram is based on a sequence of buckets and gives a count of values +// that fall into each bucket. The boundaries of the buckets are given either +// explicitly or by formulas for buckets of fixed or exponentially increasing +// widths. +// +// Although it is not forbidden, it is generally a bad idea to include +// non-finite values (infinities or NaNs) in the population of values, as this +// will render the `mean` and `sum_of_squared_deviation` fields meaningless. +message Distribution { + // The range of the population values. + message Range { + // The minimum of the population values. + double min = 1; + + // The maximum of the population values. + double max = 2; + } + + // `BucketOptions` describes the bucket boundaries used to create a histogram + // for the distribution. The buckets can be in a linear sequence, an + // exponential sequence, or each bucket can be specified explicitly. + // `BucketOptions` does not include the number of values in each bucket. + // + // A bucket has an inclusive lower bound and exclusive upper bound for the + // values that are counted for that bucket. The upper bound of a bucket must + // be strictly greater than the lower bound. The sequence of N buckets for a + // distribution consists of an underflow bucket (number 0), zero or more + // finite buckets (number 1 through N - 2) and an overflow bucket (number N - + // 1). The buckets are contiguous: the lower bound of bucket i (i > 0) is the + // same as the upper bound of bucket i - 1. The buckets span the whole range + // of finite values: lower bound of the underflow bucket is -infinity and the + // upper bound of the overflow bucket is +infinity. The finite buckets are + // so-called because both bounds are finite. + message BucketOptions { + // Specifies a linear sequence of buckets that all have the same width + // (except overflow and underflow). Each bucket represents a constant + // absolute uncertainty on the specific value in the bucket. + // + // There are `num_finite_buckets + 2` (= N) buckets. Bucket `i` has the + // following boundaries: + // + // Upper bound (0 <= i < N-1): offset + (width * i). + // Lower bound (1 <= i < N): offset + (width * (i - 1)). + message Linear { + // Must be greater than 0. + int32 num_finite_buckets = 1; + + // Must be greater than 0. + double width = 2; + + // Lower bound of the first bucket. + double offset = 3; + } + + // Specifies an exponential sequence of buckets that have a width that is + // proportional to the value of the lower bound. Each bucket represents a + // constant relative uncertainty on a specific value in the bucket. + // + // There are `num_finite_buckets + 2` (= N) buckets. Bucket `i` has the + // following boundaries: + // + // Upper bound (0 <= i < N-1): scale * (growth_factor ^ i). + // Lower bound (1 <= i < N): scale * (growth_factor ^ (i - 1)). + message Exponential { + // Must be greater than 0. + int32 num_finite_buckets = 1; + + // Must be greater than 1. + double growth_factor = 2; + + // Must be greater than 0. + double scale = 3; + } + + // Specifies a set of buckets with arbitrary widths. + // + // There are `size(bounds) + 1` (= N) buckets. Bucket `i` has the following + // boundaries: + // + // Upper bound (0 <= i < N-1): bounds[i] + // Lower bound (1 <= i < N); bounds[i - 1] + // + // The `bounds` field must contain at least one element. If `bounds` has + // only one element, then there are no finite buckets, and that single + // element is the common boundary of the overflow and underflow buckets. + message Explicit { + // The values must be monotonically increasing. + repeated double bounds = 1; + } + + // Exactly one of these three fields must be set. + oneof options { + // The linear bucket. + Linear linear_buckets = 1; + + // The exponential buckets. + Exponential exponential_buckets = 2; + + // The explicit buckets. + Explicit explicit_buckets = 3; + } + } + + // Exemplars are example points that may be used to annotate aggregated + // distribution values. They are metadata that gives information about a + // particular value added to a Distribution bucket, such as a trace ID that + // was active when a value was added. They may contain further information, + // such as a example values and timestamps, origin, etc. + message Exemplar { + // Value of the exemplar point. This value determines to which bucket the + // exemplar belongs. + double value = 1; + + // The observation (sampling) time of the above value. + google.protobuf.Timestamp timestamp = 2; + + // Contextual information about the example value. Examples are: + // + // Trace: type.googleapis.com/google.monitoring.v3.SpanContext + // + // Literal string: type.googleapis.com/google.protobuf.StringValue + // + // Labels dropped during aggregation: + // type.googleapis.com/google.monitoring.v3.DroppedLabels + // + // There may be only a single attachment of any given message type in a + // single exemplar, and this is enforced by the system. + repeated google.protobuf.Any attachments = 3; + } + + // The number of values in the population. Must be non-negative. This value + // must equal the sum of the values in `bucket_counts` if a histogram is + // provided. + int64 count = 1; + + // The arithmetic mean of the values in the population. If `count` is zero + // then this field must be zero. + double mean = 2; + + // The sum of squared deviations from the mean of the values in the + // population. For values x_i this is: + // + // Sum[i=1..n]((x_i - mean)^2) + // + // Knuth, "The Art of Computer Programming", Vol. 2, page 232, 3rd edition + // describes Welford's method for accumulating this sum in one pass. + // + // If `count` is zero then this field must be zero. + double sum_of_squared_deviation = 3; + + // If specified, contains the range of the population values. The field + // must not be present if the `count` is zero. + Range range = 4; + + // Defines the histogram bucket boundaries. If the distribution does not + // contain a histogram, then omit this field. + BucketOptions bucket_options = 6; + + // The number of values in each bucket of the histogram, as described in + // `bucket_options`. If the distribution does not have a histogram, then omit + // this field. If there is a histogram, then the sum of the values in + // `bucket_counts` must equal the value in the `count` field of the + // distribution. + // + // If present, `bucket_counts` should contain N values, where N is the number + // of buckets specified in `bucket_options`. If you supply fewer than N + // values, the remaining values are assumed to be 0. + // + // The order of the values in `bucket_counts` follows the bucket numbering + // schemes described for the three bucket types. The first value must be the + // count for the underflow bucket (number 0). The next N-2 values are the + // counts for the finite buckets (number 1 through N-2). The N'th value in + // `bucket_counts` is the count for the overflow bucket (number N-1). + repeated int64 bucket_counts = 7; + + // Must be in increasing order of `value` field. + repeated Exemplar exemplars = 10; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/documentation.proto b/_13_sponge-dtm-cache/http/third_party/google/api/documentation.proto new file mode 100644 index 0000000..2334b52 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/documentation.proto @@ -0,0 +1,162 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "DocumentationProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Documentation` provides the information for describing a service. +// +// Example: +//
documentation:
+//   summary: >
+//     The Google Calendar API gives access
+//     to most calendar features.
+//   pages:
+//   - name: Overview
+//     content: (== include google/foo/overview.md ==)
+//   - name: Tutorial
+//     content: (== include google/foo/tutorial.md ==)
+//     subpages;
+//     - name: Java
+//       content: (== include google/foo/tutorial_java.md ==)
+//   rules:
+//   - selector: google.calendar.Calendar.Get
+//     description: >
+//       ...
+//   - selector: google.calendar.Calendar.Put
+//     description: >
+//       ...
+// 
+// Documentation is provided in markdown syntax. In addition to +// standard markdown features, definition lists, tables and fenced +// code blocks are supported. Section headers can be provided and are +// interpreted relative to the section nesting of the context where +// a documentation fragment is embedded. +// +// Documentation from the IDL is merged with documentation defined +// via the config at normalization time, where documentation provided +// by config rules overrides IDL provided. +// +// A number of constructs specific to the API platform are supported +// in documentation text. +// +// In order to reference a proto element, the following +// notation can be used: +//
[fully.qualified.proto.name][]
+// To override the display text used for the link, this can be used: +//
[display text][fully.qualified.proto.name]
+// Text can be excluded from doc using the following notation: +//
(-- internal comment --)
+// +// A few directives are available in documentation. Note that +// directives must appear on a single line to be properly +// identified. The `include` directive includes a markdown file from +// an external source: +//
(== include path/to/file ==)
+// The `resource_for` directive marks a message to be the resource of +// a collection in REST view. If it is not specified, tools attempt +// to infer the resource from the operations in a collection: +//
(== resource_for v1.shelves.books ==)
+// The directive `suppress_warning` does not directly affect documentation +// and is documented together with service config validation. +message Documentation { + // A short summary of what the service does. Can only be provided by + // plain text. + string summary = 1; + + // The top level pages for the documentation set. + repeated Page pages = 5; + + // A list of documentation rules that apply to individual API elements. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated DocumentationRule rules = 3; + + // The URL to the root of documentation. + string documentation_root_url = 4; + + // Specifies the service root url if the default one (the service name + // from the yaml file) is not suitable. This can be seen in any fully + // specified service urls as well as sections that show a base that other + // urls are relative to. + string service_root_url = 6; + + // Declares a single overview page. For example: + //
documentation:
+  //   summary: ...
+  //   overview: (== include overview.md ==)
+  // 
+ // This is a shortcut for the following declaration (using pages style): + //
documentation:
+  //   summary: ...
+  //   pages:
+  //   - name: Overview
+  //     content: (== include overview.md ==)
+  // 
+ // Note: you cannot specify both `overview` field and `pages` field. + string overview = 2; +} + +// A documentation rule provides information about individual API elements. +message DocumentationRule { + // The selector is a comma-separated list of patterns. Each pattern is a + // qualified name of the element which may end in "*", indicating a wildcard. + // Wildcards are only allowed at the end and for a whole component of the + // qualified name, i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". A + // wildcard will match one or more components. To specify a default for all + // applicable elements, the whole pattern "*" is used. + string selector = 1; + + // Description of the selected API(s). + string description = 2; + + // Deprecation description of the selected element(s). It can be provided if + // an element is marked as `deprecated`. + string deprecation_description = 3; +} + +// Represents a documentation page. A page can contain subpages to represent +// nested documentation set structure. +message Page { + // The name of the page. It will be used as an identity of the page to + // generate URI of the page, text of the link to this page in navigation, + // etc. The full page name (start from the root page name to this page + // concatenated with `.`) can be used as reference to the page in your + // documentation. For example: + //
pages:
+  // - name: Tutorial
+  //   content: (== include tutorial.md ==)
+  //   subpages:
+  //   - name: Java
+  //     content: (== include tutorial_java.md ==)
+  // 
+ // You can reference `Java` page using Markdown reference link syntax: + // `[Java][Tutorial.Java]`. + string name = 1; + + // The Markdown content of the page. You can use (== include {path} + // ==) to include content from a Markdown file. + string content = 2; + + // Subpages of this page. The order of subpages specified here will be + // honored in the generated docset. + repeated Page subpages = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/endpoint.proto b/_13_sponge-dtm-cache/http/third_party/google/api/endpoint.proto new file mode 100644 index 0000000..6843a88 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/endpoint.proto @@ -0,0 +1,66 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "EndpointProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Endpoint` describes a network endpoint that serves a set of APIs. +// A service may expose any number of endpoints, and all endpoints share the +// same service configuration, such as quota configuration and monitoring +// configuration. +// +// Example service configuration: +// +// name: library-example.googleapis.com +// endpoints: +// # Below entry makes 'google.example.library.v1.Library' +// # API be served from endpoint address library-example.googleapis.com. +// # It also allows HTTP OPTIONS calls to be passed to the backend, for +// # it to decide whether the subsequent cross-origin request is +// # allowed to proceed. +// - name: library-example.googleapis.com +// allow_cors: true +message Endpoint { + // The canonical name of this endpoint. + string name = 1; + + // DEPRECATED: This field is no longer supported. Instead of using aliases, + // please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended + // aliases. + // + // Additional names that this endpoint will be hosted on. + repeated string aliases = 2 [deprecated = true]; + + // The specification of an Internet routable address of API frontend that will + // handle requests to this [API + // Endpoint](https://cloud.google.com/apis/design/glossary). It should be + // either a valid IPv4 address or a fully-qualified domain name. For example, + // "8.8.8.8" or "myservice.appspot.com". + string target = 101; + + // Allowing + // [CORS](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing), aka + // cross-domain traffic, would allow the backends served from this endpoint to + // receive and respond to HTTP OPTIONS requests. The response will be used by + // the browser to determine whether the subsequent cross-origin request is + // allowed to proceed. + bool allow_cors = 5; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/BUILD.bazel b/_13_sponge-dtm-cache/http/third_party/google/api/expr/BUILD.bazel new file mode 100644 index 0000000..a87c57f --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/BUILD.bazel @@ -0,0 +1 @@ +exports_files(glob(["*.yaml"])) diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/cel.yaml b/_13_sponge-dtm-cache/http/third_party/google/api/expr/cel.yaml new file mode 100644 index 0000000..bbe7fbd --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/cel.yaml @@ -0,0 +1,61 @@ +type: google.api.Service +config_version: 3 +name: cel.googleapis.com +title: Common Expression Language + +apis: +- name: google.api.expr.v1alpha1.ConformanceService +- name: google.api.expr.v1alpha1.CelService + +documentation: + summary: Defines common types for the Common Expression Language. + overview: |- + # Common Expression Language + + The Common Expression Language (CEL) implements common semantics for + expression evaluation, enabling different applications to more easily + interoperate. + + Key Applications + + * Security policy: organization have complex infrastructure and need + common tooling to reason about the system as a whole * Protocols: + expressions are a useful data type and require interoperability across + programming languages and platforms. + + + + Guiding philosophy: + + 1. Keep it small & fast. * CEL evaluates in linear time, is mutation + free, and not Turing-complete. This limitation is a feature of the language + design, which allows the implementation to evaluate orders of magnitude + faster than equivalently sandboxed JavaScript. 2. Make it extensible. * + CEL is designed to be embedded in applications, and allows for extensibility + via its context which allows for functions and data to be provided by the + software that embeds it. 3. Developer-friendly * The language is + approachable to developers. The initial spec was based on the experience of + developing Firebase Rules and usability testing many prior iterations. * + The library itself and accompanying toolings should be easy to adopt by + teams that seek to integrate CEL into their platforms. + + The required components of a system that supports CEL are: + + * The textual representation of an expression as written by a developer. + It is of similar syntax of expressions in C/C++/Java/JavaScript * A binary + representation of an expression. It is an abstract syntax tree (AST). * A + compiler library that converts the textual representation to the binary + representation. This can be done ahead of time (in the control plane) or + just before evaluation (in the data plane). * A context containing one or + more typed variables, often protobuf messages. Most use-case will use + attribute_context.proto * An evaluator library that takes the binary + format in the context and produces a result, usually a Boolean. + + Example of boolean conditions and object construction: + + ``` c // Condition account.balance >= transaction.withdrawal || + (account.overdraftProtection && account.overdraftLimit >= + transaction.withdrawal - account.balance) + + // Object construction common.GeoPoint{ latitude: 10.0, longitude: -5.5 } + ``` diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/BUILD.bazel b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/BUILD.bazel new file mode 100644 index 0000000..c2e842b --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/BUILD.bazel @@ -0,0 +1,314 @@ +# This file was automatically generated by BuildFileGenerator + +# This is an API workspace, having public visibility by default makes perfect sense. +package(default_visibility = ["//visibility:public"]) + +############################################################################## +# Common +############################################################################## +load("@rules_proto//proto:defs.bzl", "proto_library") + +proto_library( + name = "expr_proto", + srcs = [ + "checked.proto", + "eval.proto", + "explain.proto", + "syntax.proto", + "value.proto", + ], + deps = [ + "//google/rpc:status_proto", + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:duration_proto", + "@com_google_protobuf//:empty_proto", + "@com_google_protobuf//:struct_proto", + "@com_google_protobuf//:timestamp_proto", + ], +) + +proto_library( + name = "conformance_service_proto", + srcs = [ + "conformance_service.proto", + ], + deps = [ + ":checked_proto", + ":eval_proto", + ":syntax_proto", + "//google/rpc:status_proto", + ], +) + +proto_library( + name = "checked_proto", + srcs = ["checked.proto"], + deps = [ + ":syntax_proto", + "@com_google_protobuf//:empty_proto", + "@com_google_protobuf//:struct_proto", + ], +) + +proto_library( + name = "eval_proto", + srcs = ["eval.proto"], + deps = [ + ":value_proto", + "//google/rpc:status_proto", + ], +) + +proto_library( + name = "explain_proto", + srcs = ["explain.proto"], + deps = [ + ":value_proto", + ], +) + +proto_library( + name = "syntax_proto", + srcs = ["syntax.proto"], + deps = [ + "@com_google_protobuf//:duration_proto", + "@com_google_protobuf//:struct_proto", + "@com_google_protobuf//:timestamp_proto", + ], +) + +proto_library( + name = "value_proto", + srcs = ["value.proto"], + deps = [ + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:struct_proto", + ], +) + +############################################################################## +# Java +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "java_grpc_library", + "java_proto_library", +) + +java_proto_library( + name = "expr_java_proto", + deps = [":expr_proto"], +) + +java_proto_library( + name = "conformance_service_java_proto", + deps = [":conformance_service_proto"], +) + +java_grpc_library( + name = "conformance_service_java_grpc", + srcs = [":conformance_service_proto"], + deps = [":conformance_service_java_proto"], +) + +############################################################################## +# Go +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "go_proto_library", +) + +go_proto_library( + name = "expr_go_proto", + compilers = ["@io_bazel_rules_go//proto:go_grpc"], + importpath = "google.golang.org/genproto/googleapis/api/expr/v1alpha1", + protos = [":expr_proto"], + deps = [ + "//google/rpc:status_go_proto", + ], +) + +go_proto_library( + name = "conformance_service_go_proto", + compilers = ["@io_bazel_rules_go//proto:go_grpc"], + importpath = "google.golang.org/genproto/googleapis/api/expr/conformance/v1alpha1", + protos = [":conformance_service_proto"], + deps = [ + ":expr_go_proto", + "//google/rpc:status_go_proto", + ], +) + + +############################################################################## +# Python +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "moved_proto_library", + "py_grpc_library", + "py_proto_library", +) + +moved_proto_library( + name = "expr_moved_proto", + srcs = [":expr_proto"], + deps = [ + "//google/rpc:status_proto", + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:duration_proto", + "@com_google_protobuf//:empty_proto", + "@com_google_protobuf//:struct_proto", + "@com_google_protobuf//:timestamp_proto", + ], +) + +moved_proto_library( + name = "conformance_service_moved_proto", + srcs = [":conformance_service_proto"], + deps = [ + ":expr_moved_proto", + "//google/rpc:status_proto", + ], +) + +py_proto_library( + name = "expr_py_proto", + plugin = "@protoc_docs_plugin//:docs_plugin", + deps = [":expr_moved_proto"], +) + +py_proto_library( + name = "conformance_service_py_proto", + plugin = "@protoc_docs_plugin//:docs_plugin", + deps = [":conformance_service_moved_proto"], +) + +py_grpc_library( + name = "conformance_service_py_grpc", + srcs = [":conformance_service_moved_proto"], + deps = [":conformance_service_py_proto"], +) + +############################################################################## +# PHP +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "php_grpc_library", + "php_proto_library", +) + +php_proto_library( + name = "expr_php_proto", + deps = [":expr_proto"], +) + +php_proto_library( + name = "conformance_service_php_proto", + deps = [":conformance_service_proto"], +) + +php_grpc_library( + name = "conformance_service_php_grpc", + srcs = [":conformance_service_proto"], + deps = [":conformance_service_php_proto"], +) + +############################################################################## +# Ruby +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "ruby_grpc_library", + "ruby_proto_library", +) + +ruby_proto_library( + name = "expr_ruby_proto", + deps = [":expr_proto"], +) + +ruby_proto_library( + name = "conformance_service_ruby_proto", + deps = [":conformance_service_proto"], +) + +ruby_grpc_library( + name = "conformance_service_ruby_grpc", + srcs = [":conformance_service_proto"], + deps = [":conformance_service_ruby_proto"], +) + +############################################################################## +# C# +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "csharp_grpc_library", + "csharp_proto_library", +) + +csharp_proto_library( + name = "expr_csharp_proto", + deps = [":expr_proto"], +) + +csharp_proto_library( + name = "conformance_service_csharp_proto", + deps = [":conformance_service_proto"], +) + +csharp_grpc_library( + name = "conformance_service_csharp_grpc", + srcs = [":conformance_service_proto"], + deps = [":conformance_service_csharp_proto"], +) + +############################################################################## +# C++ +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "cc_grpc_library", + "cc_proto_library", +) + +cc_proto_library( + name = "checked_cc_proto", + deps = [":checked_proto"], +) + +cc_proto_library( + name = "conformance_service_cc_proto", + deps = [":conformance_service_proto"], +) + +cc_grpc_library( + name = "conformance_service_cc_grpc", + srcs = [":conformance_service_proto"], + generate_mocks = True, + grpc_only = True, + deps = [":conformance_service_cc_proto"], +) + +cc_proto_library( + name = "eval_cc_proto", + deps = [":eval_proto"], +) + +cc_proto_library( + name = "explain_cc_proto", + deps = [":explain_proto"], +) + +cc_proto_library( + name = "syntax_cc_proto", + deps = [":syntax_proto"], +) + +cc_proto_library( + name = "value_cc_proto", + deps = [":value_proto"], +) diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/checked.proto b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/checked.proto new file mode 100644 index 0000000..e6333b0 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/checked.proto @@ -0,0 +1,330 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.expr.v1alpha1; + +import "google/api/expr/v1alpha1/syntax.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/struct.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/expr/v1alpha1;expr"; +option java_multiple_files = true; +option java_outer_classname = "DeclProto"; +option java_package = "com.google.api.expr.v1alpha1"; + +// Protos for representing CEL declarations and typed checked expressions. + +// A CEL expression which has been successfully type checked. +message CheckedExpr { + // A map from expression ids to resolved references. + // + // The following entries are in this table: + // + // - An Ident or Select expression is represented here if it resolves to a + // declaration. For instance, if `a.b.c` is represented by + // `select(select(id(a), b), c)`, and `a.b` resolves to a declaration, + // while `c` is a field selection, then the reference is attached to the + // nested select expression (but not to the id or or the outer select). + // In turn, if `a` resolves to a declaration and `b.c` are field selections, + // the reference is attached to the ident expression. + // - Every Call expression has an entry here, identifying the function being + // called. + // - Every CreateStruct expression for a message has an entry, identifying + // the message. + map reference_map = 2; + + // A map from expression ids to types. + // + // Every expression node which has a type different than DYN has a mapping + // here. If an expression has type DYN, it is omitted from this map to save + // space. + map type_map = 3; + + // The source info derived from input that generated the parsed `expr` and + // any optimizations made during the type-checking pass. + SourceInfo source_info = 5; + + // The checked expression. Semantically equivalent to the parsed `expr`, but + // may have structural differences. + Expr expr = 4; +} + +// Represents a CEL type. +message Type { + // List type with typed elements, e.g. `list`. + message ListType { + // The element type. + Type elem_type = 1; + } + + // Map type with parameterized key and value types, e.g. `map`. + message MapType { + // The type of the key. + Type key_type = 1; + + // The type of the value. + Type value_type = 2; + } + + // Function type with result and arg types. + message FunctionType { + // Result type of the function. + Type result_type = 1; + + // Argument types of the function. + repeated Type arg_types = 2; + } + + // Application defined abstract type. + message AbstractType { + // The fully qualified name of this abstract type. + string name = 1; + + // Parameter types for this abstract type. + repeated Type parameter_types = 2; + } + + // CEL primitive types. + enum PrimitiveType { + // Unspecified type. + PRIMITIVE_TYPE_UNSPECIFIED = 0; + + // Boolean type. + BOOL = 1; + + // Int64 type. + // + // Proto-based integer values are widened to int64. + INT64 = 2; + + // Uint64 type. + // + // Proto-based unsigned integer values are widened to uint64. + UINT64 = 3; + + // Double type. + // + // Proto-based float values are widened to double values. + DOUBLE = 4; + + // String type. + STRING = 5; + + // Bytes type. + BYTES = 6; + } + + // Well-known protobuf types treated with first-class support in CEL. + enum WellKnownType { + // Unspecified type. + WELL_KNOWN_TYPE_UNSPECIFIED = 0; + + // Well-known protobuf.Any type. + // + // Any types are a polymorphic message type. During type-checking they are + // treated like `DYN` types, but at runtime they are resolved to a specific + // message type specified at evaluation time. + ANY = 1; + + // Well-known protobuf.Timestamp type, internally referenced as `timestamp`. + TIMESTAMP = 2; + + // Well-known protobuf.Duration type, internally referenced as `duration`. + DURATION = 3; + } + + // The kind of type. + oneof type_kind { + // Dynamic type. + google.protobuf.Empty dyn = 1; + + // Null value. + google.protobuf.NullValue null = 2; + + // Primitive types: `true`, `1u`, `-2.0`, `'string'`, `b'bytes'`. + PrimitiveType primitive = 3; + + // Wrapper of a primitive type, e.g. `google.protobuf.Int64Value`. + PrimitiveType wrapper = 4; + + // Well-known protobuf type such as `google.protobuf.Timestamp`. + WellKnownType well_known = 5; + + // Parameterized list with elements of `list_type`, e.g. `list`. + ListType list_type = 6; + + // Parameterized map with typed keys and values. + MapType map_type = 7; + + // Function type. + FunctionType function = 8; + + // Protocol buffer message type. + // + // The `message_type` string specifies the qualified message type name. For + // example, `google.plus.Profile`. + string message_type = 9; + + // Type param type. + // + // The `type_param` string specifies the type parameter name, e.g. `list` + // would be a `list_type` whose element type was a `type_param` type + // named `E`. + string type_param = 10; + + // Type type. + // + // The `type` value specifies the target type. e.g. int is type with a + // target type of `Primitive.INT`. + Type type = 11; + + // Error type. + // + // During type-checking if an expression is an error, its type is propagated + // as the `ERROR` type. This permits the type-checker to discover other + // errors present in the expression. + google.protobuf.Empty error = 12; + + // Abstract, application defined type. + AbstractType abstract_type = 14; + } +} + +// Represents a declaration of a named value or function. +// +// A declaration is part of the contract between the expression, the agent +// evaluating that expression, and the caller requesting evaluation. +message Decl { + // Identifier declaration which specifies its type and optional `Expr` value. + // + // An identifier without a value is a declaration that must be provided at + // evaluation time. An identifier with a value should resolve to a constant, + // but may be used in conjunction with other identifiers bound at evaluation + // time. + message IdentDecl { + // Required. The type of the identifier. + Type type = 1; + + // The constant value of the identifier. If not specified, the identifier + // must be supplied at evaluation time. + Constant value = 2; + + // Documentation string for the identifier. + string doc = 3; + } + + // Function declaration specifies one or more overloads which indicate the + // function's parameter types and return type, and may optionally specify a + // function definition in terms of CEL expressions. + // + // Functions have no observable side-effects (there may be side-effects like + // logging which are not observable from CEL). + message FunctionDecl { + // An overload indicates a function's parameter types and return type, and + // may optionally include a function body described in terms of [Expr][google.api.expr.v1alpha1.Expr] + // values. + // + // Functions overloads are declared in either a function or method + // call-style. For methods, the `params[0]` is the expected type of the + // target receiver. + // + // Overloads must have non-overlapping argument types after erasure of all + // parameterized type variables (similar as type erasure in Java). + message Overload { + // Required. Globally unique overload name of the function which reflects + // the function name and argument types. + // + // This will be used by a [Reference][google.api.expr.v1alpha1.Reference] to indicate the `overload_id` that + // was resolved for the function `name`. + string overload_id = 1; + + // List of function parameter [Type][google.api.expr.v1alpha1.Type] values. + // + // Param types are disjoint after generic type parameters have been + // replaced with the type `DYN`. Since the `DYN` type is compatible with + // any other type, this means that if `A` is a type parameter, the + // function types `int` and `int` are not disjoint. Likewise, + // `map` is not disjoint from `map`. + // + // When the `result_type` of a function is a generic type param, the + // type param name also appears as the `type` of on at least one params. + repeated Type params = 2; + + // The type param names associated with the function declaration. + // + // For example, `function ex(K key, map map) : V` would yield + // the type params of `K, V`. + repeated string type_params = 3; + + // Required. The result type of the function. For example, the operator + // `string.isEmpty()` would have `result_type` of `kind: BOOL`. + Type result_type = 4; + + // Whether the function is to be used in a method call-style `x.f(...)` + // of a function call-style `f(x, ...)`. + // + // For methods, the first parameter declaration, `params[0]` is the + // expected type of the target receiver. + bool is_instance_function = 5; + + // Documentation string for the overload. + string doc = 6; + } + + // Required. List of function overloads, must contain at least one overload. + repeated Overload overloads = 1; + } + + // The fully qualified name of the declaration. + // + // Declarations are organized in containers and this represents the full path + // to the declaration in its container, as in `google.api.expr.Decl`. + // + // Declarations used as [FunctionDecl.Overload][google.api.expr.v1alpha1.Decl.FunctionDecl.Overload] parameters may or may not + // have a name depending on whether the overload is function declaration or a + // function definition containing a result [Expr][google.api.expr.v1alpha1.Expr]. + string name = 1; + + // Required. The declaration kind. + oneof decl_kind { + // Identifier declaration. + IdentDecl ident = 2; + + // Function declaration. + FunctionDecl function = 3; + } +} + +// Describes a resolved reference to a declaration. +message Reference { + // The fully qualified name of the declaration. + string name = 1; + + // For references to functions, this is a list of `Overload.overload_id` + // values which match according to typing rules. + // + // If the list has more than one element, overload resolution among the + // presented candidates must happen at runtime because of dynamic types. The + // type checker attempts to narrow down this list as much as possible. + // + // Empty if this is not a reference to a [Decl.FunctionDecl][google.api.expr.v1alpha1.Decl.FunctionDecl]. + repeated string overload_id = 3; + + // For references to constants, this may contain the value of the + // constant if known at compile time. + Constant value = 4; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/conformance_service.proto b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/conformance_service.proto new file mode 100644 index 0000000..8dfa9d7 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/conformance_service.proto @@ -0,0 +1,164 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.expr.v1alpha1; + +import "google/api/expr/v1alpha1/checked.proto"; +import "google/api/expr/v1alpha1/eval.proto"; +import "google/api/expr/v1alpha1/syntax.proto"; +import "google/rpc/status.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/expr/conformance/v1alpha1;confpb"; +option java_multiple_files = true; +option java_outer_classname = "ConformanceServiceProto"; +option java_package = "com.google.api.expr.v1alpha1"; + +// Access a CEL implementation from another process or machine. +// A CEL implementation is decomposed as a parser, a static checker, +// and an evaluator. Every CEL implementation is expected to provide +// a server for this API. The API will be used for conformance testing +// and other utilities. +service ConformanceService { + + // Transforms CEL source text into a parsed representation. + rpc Parse(ParseRequest) returns (ParseResponse) { + } + + // Runs static checks on a parsed CEL representation and return + // an annotated representation, or a set of issues. + rpc Check(CheckRequest) returns (CheckResponse) { + } + + // Evaluates a parsed or annotation CEL representation given + // values of external bindings. + rpc Eval(EvalRequest) returns (EvalResponse) { + } +} + +// Request message for the Parse method. +message ParseRequest { + // Required. Source text in CEL syntax. + string cel_source = 1; + + // Tag for version of CEL syntax, for future use. + string syntax_version = 2; + + // File or resource for source text, used in [SourceInfo][google.api.expr.v1alpha1.SourceInfo]. + string source_location = 3; + + // Prevent macro expansion. See "Macros" in Language Defiinition. + bool disable_macros = 4; +} + +// Response message for the Parse method. +message ParseResponse { + // The parsed representation, or unset if parsing failed. + ParsedExpr parsed_expr = 1; + + // Any number of issues with [StatusDetails][] as the details. + repeated google.rpc.Status issues = 2; +} + +// Request message for the Check method. +message CheckRequest { + // Required. The parsed representation of the CEL program. + ParsedExpr parsed_expr = 1; + + // Declarations of types for external variables and functions. + // Required if program uses external variables or functions + // not in the default environment. + repeated Decl type_env = 2; + + // The protocol buffer context. See "Name Resolution" in the + // Language Definition. + string container = 3; + + // If true, use only the declarations in [type_env][google.api.expr.v1alpha1.CheckRequest.type_env]. If false (default), + // add declarations for the standard definitions to the type environment. See + // "Standard Definitions" in the Language Definition. + bool no_std_env = 4; +} + +// Response message for the Check method. +message CheckResponse { + // The annotated representation, or unset if checking failed. + CheckedExpr checked_expr = 1; + + // Any number of issues with [StatusDetails][] as the details. + repeated google.rpc.Status issues = 2; +} + +// Request message for the Eval method. +message EvalRequest { + // Required. Either the parsed or annotated representation of the CEL program. + oneof expr_kind { + // Evaluate based on the parsed representation. + ParsedExpr parsed_expr = 1; + + // Evaluate based on the checked representation. + CheckedExpr checked_expr = 2; + } + + // Bindings for the external variables. The types SHOULD be compatible + // with the type environment in [CheckRequest][google.api.expr.v1alpha1.CheckRequest], if checked. + map bindings = 3; + + // SHOULD be the same container as used in [CheckRequest][google.api.expr.v1alpha1.CheckRequest], if checked. + string container = 4; +} + +// Response message for the Eval method. +message EvalResponse { + // The execution result, or unset if execution couldn't start. + ExprValue result = 1; + + // Any number of issues with [StatusDetails][] as the details. + // Note that CEL execution errors are reified into [ExprValue][google.api.expr.v1alpha1.ExprValue]. + // Nevertheless, we'll allow out-of-band issues to be raised, + // which also makes the replies more regular. + repeated google.rpc.Status issues = 2; +} + +// Warnings or errors in service execution are represented by +// [google.rpc.Status][google.rpc.Status] messages, with the following message +// in the details field. +message IssueDetails { + // Severities of issues. + enum Severity { + // An unspecified severity. + SEVERITY_UNSPECIFIED = 0; + + // Deprecation issue for statements and method that may no longer be + // supported or maintained. + DEPRECATION = 1; + + // Warnings such as: unused variables. + WARNING = 2; + + // Errors such as: unmatched curly braces or variable redefinition. + ERROR = 3; + } + + // The severity of the issue. + Severity severity = 1; + + // Position in the source, if known. + SourcePosition position = 2; + + // Expression ID from [Expr][google.api.expr.v1alpha1.Expr], 0 if unknown. + int64 id = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/eval.proto b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/eval.proto new file mode 100644 index 0000000..c095068 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/eval.proto @@ -0,0 +1,118 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.expr.v1alpha1; + +import "google/api/expr/v1alpha1/value.proto"; +import "google/rpc/status.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/expr/v1alpha1;expr"; +option java_multiple_files = true; +option java_outer_classname = "EvalProto"; +option java_package = "com.google.api.expr.v1alpha1"; + +// The state of an evaluation. +// +// Can represent an inital, partial, or completed state of evaluation. +message EvalState { + // A single evalution result. + message Result { + // The id of the expression this result if for. + int64 expr = 1; + + // The index in `values` of the resulting value. + int64 value = 2; + } + + // The unique values referenced in this message. + repeated ExprValue values = 1; + + // An ordered list of results. + // + // Tracks the flow of evaluation through the expression. + // May be sparse. + repeated Result results = 3; +} + +// The value of an evaluated expression. +message ExprValue { + // An expression can resolve to a value, error or unknown. + oneof kind { + // A concrete value. + Value value = 1; + + // The set of errors in the critical path of evalution. + // + // Only errors in the critical path are included. For example, + // `( || true) && ` will only result in ``, + // while ` || ` will result in both `` and + // ``. + // + // Errors cause by the presence of other errors are not included in the + // set. For example `.foo`, `foo()`, and ` + 1` will + // only result in ``. + // + // Multiple errors *might* be included when evaluation could result + // in different errors. For example ` + ` and + // `foo(, )` may result in ``, `` or both. + // The exact subset of errors included for this case is unspecified and + // depends on the implementation details of the evaluator. + ErrorSet error = 2; + + // The set of unknowns in the critical path of evaluation. + // + // Unknown behaves identically to Error with regards to propagation. + // Specifically, only unknowns in the critical path are included, unknowns + // caused by the presence of other unknowns are not included, and multiple + // unknowns *might* be included included when evaluation could result in + // different unknowns. For example: + // + // ( || true) && -> + // || -> + // .foo -> + // foo() -> + // + -> or + // + // Unknown takes precidence over Error in cases where a `Value` can short + // circuit the result: + // + // || -> + // && -> + // + // Errors take precidence in all other cases: + // + // + -> + // foo(, ) -> + UnknownSet unknown = 3; + } +} + +// A set of errors. +// +// The errors included depend on the context. See `ExprValue.error`. +message ErrorSet { + // The errors in the set. + repeated google.rpc.Status errors = 1; +} + +// A set of expressions for which the value is unknown. +// +// The unknowns included depend on the context. See `ExprValue.unknown`. +message UnknownSet { + // The ids of the expressions with unknown values. + repeated int64 exprs = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/explain.proto b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/explain.proto new file mode 100644 index 0000000..5e1bc94 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/explain.proto @@ -0,0 +1,53 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.expr.v1alpha1; + +import "google/api/expr/v1alpha1/value.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/expr/v1alpha1;expr"; +option java_multiple_files = true; +option java_outer_classname = "ExplainProto"; +option java_package = "com.google.api.expr.v1alpha1"; + +// Values of intermediate expressions produced when evaluating expression. +// Deprecated, use `EvalState` instead. +message Explain { + option deprecated = true; + + // ID and value index of one step. + message ExprStep { + // ID of corresponding Expr node. + int64 id = 1; + + // Index of the value in the values list. + int32 value_index = 2; + } + + // All of the observed values. + // + // The field value_index is an index in the values list. + // Separating values from steps is needed to remove redundant values. + repeated Value values = 1; + + // List of steps. + // + // Repeated evaluations of the same expression generate new ExprStep + // instances. The order of such ExprStep instances matches the order of + // elements returned by Comprehension.iter_range. + repeated ExprStep expr_steps = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/syntax.proto b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/syntax.proto new file mode 100644 index 0000000..3f7652f --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/syntax.proto @@ -0,0 +1,327 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.expr.v1alpha1; + +import "google/protobuf/duration.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/expr/v1alpha1;expr"; +option java_multiple_files = true; +option java_outer_classname = "SyntaxProto"; +option java_package = "com.google.api.expr.v1alpha1"; + +// A representation of the abstract syntax of the Common Expression Language. + +// An expression together with source information as returned by the parser. +message ParsedExpr { + // The parsed expression. + Expr expr = 2; + + // The source info derived from input that generated the parsed `expr`. + SourceInfo source_info = 3; +} + +// An abstract representation of a common expression. +// +// Expressions are abstractly represented as a collection of identifiers, +// select statements, function calls, literals, and comprehensions. All +// operators with the exception of the '.' operator are modelled as function +// calls. This makes it easy to represent new operators into the existing AST. +// +// All references within expressions must resolve to a [Decl][google.api.expr.v1alpha1.Decl] provided at +// type-check for an expression to be valid. A reference may either be a bare +// identifier `name` or a qualified identifier `google.api.name`. References +// may either refer to a value or a function declaration. +// +// For example, the expression `google.api.name.startsWith('expr')` references +// the declaration `google.api.name` within a [Expr.Select][google.api.expr.v1alpha1.Expr.Select] expression, and +// the function declaration `startsWith`. +message Expr { + // An identifier expression. e.g. `request`. + message Ident { + // Required. Holds a single, unqualified identifier, possibly preceded by a + // '.'. + // + // Qualified names are represented by the [Expr.Select][google.api.expr.v1alpha1.Expr.Select] expression. + string name = 1; + } + + // A field selection expression. e.g. `request.auth`. + message Select { + // Required. The target of the selection expression. + // + // For example, in the select expression `request.auth`, the `request` + // portion of the expression is the `operand`. + Expr operand = 1; + + // Required. The name of the field to select. + // + // For example, in the select expression `request.auth`, the `auth` portion + // of the expression would be the `field`. + string field = 2; + + // Whether the select is to be interpreted as a field presence test. + // + // This results from the macro `has(request.auth)`. + bool test_only = 3; + } + + // A call expression, including calls to predefined functions and operators. + // + // For example, `value == 10`, `size(map_value)`. + message Call { + // The target of an method call-style expression. For example, `x` in + // `x.f()`. + Expr target = 1; + + // Required. The name of the function or method being called. + string function = 2; + + // The arguments. + repeated Expr args = 3; + } + + // A list creation expression. + // + // Lists may either be homogenous, e.g. `[1, 2, 3]`, or heterogenous, e.g. + // `dyn([1, 'hello', 2.0])` + message CreateList { + // The elements part of the list. + repeated Expr elements = 1; + } + + // A map or message creation expression. + // + // Maps are constructed as `{'key_name': 'value'}`. Message construction is + // similar, but prefixed with a type name and composed of field ids: + // `types.MyType{field_id: 'value'}`. + message CreateStruct { + // Represents an entry. + message Entry { + // Required. An id assigned to this node by the parser which is unique + // in a given expression tree. This is used to associate type + // information and other attributes to the node. + int64 id = 1; + + // The `Entry` key kinds. + oneof key_kind { + // The field key for a message creator statement. + string field_key = 2; + + // The key expression for a map creation statement. + Expr map_key = 3; + } + + // Required. The value assigned to the key. + Expr value = 4; + } + + // The type name of the message to be created, empty when creating map + // literals. + string message_name = 1; + + // The entries in the creation expression. + repeated Entry entries = 2; + } + + // A comprehension expression applied to a list or map. + // + // Comprehensions are not part of the core syntax, but enabled with macros. + // A macro matches a specific call signature within a parsed AST and replaces + // the call with an alternate AST block. Macro expansion happens at parse + // time. + // + // The following macros are supported within CEL: + // + // Aggregate type macros may be applied to all elements in a list or all keys + // in a map: + // + // * `all`, `exists`, `exists_one` - test a predicate expression against + // the inputs and return `true` if the predicate is satisfied for all, + // any, or only one value `list.all(x, x < 10)`. + // * `filter` - test a predicate expression against the inputs and return + // the subset of elements which satisfy the predicate: + // `payments.filter(p, p > 1000)`. + // * `map` - apply an expression to all elements in the input and return the + // output aggregate type: `[1, 2, 3].map(i, i * i)`. + // + // The `has(m.x)` macro tests whether the property `x` is present in struct + // `m`. The semantics of this macro depend on the type of `m`. For proto2 + // messages `has(m.x)` is defined as 'defined, but not set`. For proto3, the + // macro tests whether the property is set to its default. For map and struct + // types, the macro tests whether the property `x` is defined on `m`. + message Comprehension { + // The name of the iteration variable. + string iter_var = 1; + + // The range over which var iterates. + Expr iter_range = 2; + + // The name of the variable used for accumulation of the result. + string accu_var = 3; + + // The initial value of the accumulator. + Expr accu_init = 4; + + // An expression which can contain iter_var and accu_var. + // + // Returns false when the result has been computed and may be used as + // a hint to short-circuit the remainder of the comprehension. + Expr loop_condition = 5; + + // An expression which can contain iter_var and accu_var. + // + // Computes the next value of accu_var. + Expr loop_step = 6; + + // An expression which can contain accu_var. + // + // Computes the result. + Expr result = 7; + } + + // Required. An id assigned to this node by the parser which is unique in a + // given expression tree. This is used to associate type information and other + // attributes to a node in the parse tree. + int64 id = 2; + + // Required. Variants of expressions. + oneof expr_kind { + // A literal expression. + Constant const_expr = 3; + + // An identifier expression. + Ident ident_expr = 4; + + // A field selection expression, e.g. `request.auth`. + Select select_expr = 5; + + // A call expression, including calls to predefined functions and operators. + Call call_expr = 6; + + // A list creation expression. + CreateList list_expr = 7; + + // A map or message creation expression. + CreateStruct struct_expr = 8; + + // A comprehension expression. + Comprehension comprehension_expr = 9; + } +} + +// Represents a primitive literal. +// +// Named 'Constant' here for backwards compatibility. +// +// This is similar as the primitives supported in the well-known type +// `google.protobuf.Value`, but richer so it can represent CEL's full range of +// primitives. +// +// Lists and structs are not included as constants as these aggregate types may +// contain [Expr][google.api.expr.v1alpha1.Expr] elements which require evaluation and are thus not constant. +// +// Examples of literals include: `"hello"`, `b'bytes'`, `1u`, `4.2`, `-2`, +// `true`, `null`. +message Constant { + // Required. The valid constant kinds. + oneof constant_kind { + // null value. + google.protobuf.NullValue null_value = 1; + + // boolean value. + bool bool_value = 2; + + // int64 value. + int64 int64_value = 3; + + // uint64 value. + uint64 uint64_value = 4; + + // double value. + double double_value = 5; + + // string value. + string string_value = 6; + + // bytes value. + bytes bytes_value = 7; + + // protobuf.Duration value. + // + // Deprecated: duration is no longer considered a builtin cel type. + google.protobuf.Duration duration_value = 8 [deprecated = true]; + + // protobuf.Timestamp value. + // + // Deprecated: timestamp is no longer considered a builtin cel type. + google.protobuf.Timestamp timestamp_value = 9 [deprecated = true]; + } +} + +// Source information collected at parse time. +message SourceInfo { + // The syntax version of the source, e.g. `cel1`. + string syntax_version = 1; + + // The location name. All position information attached to an expression is + // relative to this location. + // + // The location could be a file, UI element, or similar. For example, + // `acme/app/AnvilPolicy.cel`. + string location = 2; + + // Monotonically increasing list of character offsets where newlines appear. + // + // The line number of a given position is the index `i` where for a given + // `id` the `line_offsets[i] < id_positions[id] < line_offsets[i+1]`. The + // column may be derivd from `id_positions[id] - line_offsets[i]`. + repeated int32 line_offsets = 3; + + // A map from the parse node id (e.g. `Expr.id`) to the character offset + // within source. + map positions = 4; + + // A map from the parse node id where a macro replacement was made to the + // call `Expr` that resulted in a macro expansion. + // + // For example, `has(value.field)` is a function call that is replaced by a + // `test_only` field selection in the AST. Likewise, the call + // `list.exists(e, e > 10)` translates to a comprehension expression. The key + // in the map corresponds to the expression id of the expanded macro, and the + // value is the call `Expr` that was replaced. + map macro_calls = 5; +} + +// A specific position in source. +message SourcePosition { + // The soucre location name (e.g. file name). + string location = 1; + + // The character offset. + int32 offset = 2; + + // The 1-based index of the starting line in the source text + // where the issue occurs, or 0 if unknown. + int32 line = 3; + + // The 0-based index of the starting position within the line of source text + // where the issue occurs. Only meaningful if line is nonzero. + int32 column = 4; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/value.proto b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/value.proto new file mode 100644 index 0000000..3f2d250 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1alpha1/value.proto @@ -0,0 +1,115 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.expr.v1alpha1; + +import "google/protobuf/any.proto"; +import "google/protobuf/struct.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/expr/v1alpha1;expr"; +option java_multiple_files = true; +option java_outer_classname = "ValueProto"; +option java_package = "com.google.api.expr.v1alpha1"; + +// Contains representations for CEL runtime values. + +// Represents a CEL value. +// +// This is similar to `google.protobuf.Value`, but can represent CEL's full +// range of values. +message Value { + // Required. The valid kinds of values. + oneof kind { + // Null value. + google.protobuf.NullValue null_value = 1; + + // Boolean value. + bool bool_value = 2; + + // Signed integer value. + int64 int64_value = 3; + + // Unsigned integer value. + uint64 uint64_value = 4; + + // Floating point value. + double double_value = 5; + + // UTF-8 string value. + string string_value = 6; + + // Byte string value. + bytes bytes_value = 7; + + // An enum value. + EnumValue enum_value = 9; + + // The proto message backing an object value. + google.protobuf.Any object_value = 10; + + // Map value. + MapValue map_value = 11; + + // List value. + ListValue list_value = 12; + + // Type value. + string type_value = 15; + } +} + +// An enum value. +message EnumValue { + // The fully qualified name of the enum type. + string type = 1; + + // The value of the enum. + int32 value = 2; +} + +// A list. +// +// Wrapped in a message so 'not set' and empty can be differentiated, which is +// required for use in a 'oneof'. +message ListValue { + // The ordered values in the list. + repeated Value values = 1; +} + +// A map. +// +// Wrapped in a message so 'not set' and empty can be differentiated, which is +// required for use in a 'oneof'. +message MapValue { + // An entry in the map. + message Entry { + // The key. + // + // Must be unique with in the map. + // Currently only boolean, int, uint, and string values can be keys. + Value key = 1; + + // The value. + Value value = 2; + } + + // The set of map entries. + // + // CEL has fewer restrictions on keys, so a protobuf map represenation + // cannot be used. + repeated Entry entries = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/BUILD.bazel b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/BUILD.bazel new file mode 100644 index 0000000..819e136 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/BUILD.bazel @@ -0,0 +1,91 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") + +# This is an API workspace, having public visibility by default makes perfect sense. +package(default_visibility = ["//visibility:public"]) + +proto_library( + name = "decl_proto", + srcs = ["decl.proto"], + deps = [ + ":expr_proto", + ], +) + +proto_library( + name = "eval_proto", + srcs = ["eval.proto"], + deps = [ + ":value_proto", + "//google/rpc:status_proto", + ], +) + +proto_library( + name = "expr_proto", + srcs = ["expr.proto"], + deps = [ + ":source_proto", + "@com_google_protobuf//:struct_proto", + ], +) + +proto_library( + name = "source_proto", + srcs = ["source.proto"], +) + +proto_library( + name = "value_proto", + srcs = ["value.proto"], + deps = [ + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:struct_proto", + ], +) + +proto_library( + name = "cel_proto", + deps = [ + ":decl_proto", + ":eval_proto", + ":expr_proto", + ":source_proto", + ":value_proto", + "//google/rpc:status_proto", + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:struct_proto", + ], +) + +############################################################################## +# C++ +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "cc_proto_library", +) + +cc_proto_library( + name = "decl_cc_proto", + deps = [":decl_proto"], +) + +cc_proto_library( + name = "eval_cc_proto", + deps = [":eval_proto"], +) + +cc_proto_library( + name = "expr_cc_proto", + deps = [":expr_proto"], +) + +cc_proto_library( + name = "source_cc_proto", + deps = [":source_proto"], +) + +cc_proto_library( + name = "value_cc_proto", + deps = [":value_proto"], +) diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/decl.proto b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/decl.proto new file mode 100644 index 0000000..d3d748b --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/decl.proto @@ -0,0 +1,84 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.api.expr.v1beta1; + +import "google/api/expr/v1beta1/expr.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/expr/v1beta1;expr"; +option java_multiple_files = true; +option java_outer_classname = "DeclProto"; +option java_package = "com.google.api.expr.v1beta1"; + +// A declaration. +message Decl { + // The id of the declaration. + int32 id = 1; + + // The name of the declaration. + string name = 2; + + // The documentation string for the declaration. + string doc = 3; + + // The kind of declaration. + oneof kind { + // An identifier declaration. + IdentDecl ident = 4; + + // A function declaration. + FunctionDecl function = 5; + } +} + +// The declared type of a variable. +// +// Extends runtime type values with extra information used for type checking +// and dispatching. +message DeclType { + // The expression id of the declared type, if applicable. + int32 id = 1; + + // The type name, e.g. 'int', 'my.type.Type' or 'T' + string type = 2; + + // An ordered list of type parameters, e.g. ``. + // Only applies to a subset of types, e.g. `map`, `list`. + repeated DeclType type_params = 4; +} + +// An identifier declaration. +message IdentDecl { + // Optional type of the identifier. + DeclType type = 3; + + // Optional value of the identifier. + Expr value = 4; +} + +// A function declaration. +message FunctionDecl { + // The function arguments. + repeated IdentDecl args = 1; + + // Optional declared return type. + DeclType return_type = 2; + + // If the first argument of the function is the receiver. + bool receiver_function = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/eval.proto b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/eval.proto new file mode 100644 index 0000000..0c6c4d9 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/eval.proto @@ -0,0 +1,125 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.api.expr.v1beta1; + +import "google/api/expr/v1beta1/value.proto"; +import "google/rpc/status.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/expr/v1beta1;expr"; +option java_multiple_files = true; +option java_outer_classname = "EvalProto"; +option java_package = "com.google.api.expr.v1beta1"; + +// The state of an evaluation. +// +// Can represent an initial, partial, or completed state of evaluation. +message EvalState { + // A single evaluation result. + message Result { + // The expression this result is for. + IdRef expr = 1; + + // The index in `values` of the resulting value. + int32 value = 2; + } + + // The unique values referenced in this message. + repeated ExprValue values = 1; + + // An ordered list of results. + // + // Tracks the flow of evaluation through the expression. + // May be sparse. + repeated Result results = 3; +} + +// The value of an evaluated expression. +message ExprValue { + // An expression can resolve to a value, error or unknown. + oneof kind { + // A concrete value. + Value value = 1; + + // The set of errors in the critical path of evalution. + // + // Only errors in the critical path are included. For example, + // `( || true) && ` will only result in ``, + // while ` || ` will result in both `` and + // ``. + // + // Errors cause by the presence of other errors are not included in the + // set. For example `.foo`, `foo()`, and ` + 1` will + // only result in ``. + // + // Multiple errors *might* be included when evaluation could result + // in different errors. For example ` + ` and + // `foo(, )` may result in ``, `` or both. + // The exact subset of errors included for this case is unspecified and + // depends on the implementation details of the evaluator. + ErrorSet error = 2; + + // The set of unknowns in the critical path of evaluation. + // + // Unknown behaves identically to Error with regards to propagation. + // Specifically, only unknowns in the critical path are included, unknowns + // caused by the presence of other unknowns are not included, and multiple + // unknowns *might* be included included when evaluation could result in + // different unknowns. For example: + // + // ( || true) && -> + // || -> + // .foo -> + // foo() -> + // + -> or + // + // Unknown takes precidence over Error in cases where a `Value` can short + // circuit the result: + // + // || -> + // && -> + // + // Errors take precidence in all other cases: + // + // + -> + // foo(, ) -> + UnknownSet unknown = 3; + } +} + +// A set of errors. +// +// The errors included depend on the context. See `ExprValue.error`. +message ErrorSet { + // The errors in the set. + repeated google.rpc.Status errors = 1; +} + +// A set of expressions for which the value is unknown. +// +// The unknowns included depend on the context. See `ExprValue.unknown`. +message UnknownSet { + // The ids of the expressions with unknown values. + repeated IdRef exprs = 1; +} + +// A reference to an expression id. +message IdRef { + // The expression id. + int32 id = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/expr.proto b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/expr.proto new file mode 100644 index 0000000..77249ba --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/expr.proto @@ -0,0 +1,265 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.api.expr.v1beta1; + +import "google/api/expr/v1beta1/source.proto"; +import "google/protobuf/struct.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/expr/v1beta1;expr"; +option java_multiple_files = true; +option java_outer_classname = "ExprProto"; +option java_package = "com.google.api.expr.v1beta1"; + +// An expression together with source information as returned by the parser. +message ParsedExpr { + // The parsed expression. + Expr expr = 2; + + // The source info derived from input that generated the parsed `expr`. + SourceInfo source_info = 3; + + // The syntax version of the source, e.g. `cel1`. + string syntax_version = 4; +} + +// An abstract representation of a common expression. +// +// Expressions are abstractly represented as a collection of identifiers, +// select statements, function calls, literals, and comprehensions. All +// operators with the exception of the '.' operator are modelled as function +// calls. This makes it easy to represent new operators into the existing AST. +// +// All references within expressions must resolve to a [Decl][google.api.expr.v1beta1.Decl] provided at +// type-check for an expression to be valid. A reference may either be a bare +// identifier `name` or a qualified identifier `google.api.name`. References +// may either refer to a value or a function declaration. +// +// For example, the expression `google.api.name.startsWith('expr')` references +// the declaration `google.api.name` within a [Expr.Select][google.api.expr.v1beta1.Expr.Select] expression, and +// the function declaration `startsWith`. +message Expr { + // An identifier expression. e.g. `request`. + message Ident { + // Required. Holds a single, unqualified identifier, possibly preceded by a + // '.'. + // + // Qualified names are represented by the [Expr.Select][google.api.expr.v1beta1.Expr.Select] expression. + string name = 1; + } + + // A field selection expression. e.g. `request.auth`. + message Select { + // Required. The target of the selection expression. + // + // For example, in the select expression `request.auth`, the `request` + // portion of the expression is the `operand`. + Expr operand = 1; + + // Required. The name of the field to select. + // + // For example, in the select expression `request.auth`, the `auth` portion + // of the expression would be the `field`. + string field = 2; + + // Whether the select is to be interpreted as a field presence test. + // + // This results from the macro `has(request.auth)`. + bool test_only = 3; + } + + // A call expression, including calls to predefined functions and operators. + // + // For example, `value == 10`, `size(map_value)`. + message Call { + // The target of an method call-style expression. For example, `x` in + // `x.f()`. + Expr target = 1; + + // Required. The name of the function or method being called. + string function = 2; + + // The arguments. + repeated Expr args = 3; + } + + // A list creation expression. + // + // Lists may either be homogenous, e.g. `[1, 2, 3]`, or heterogenous, e.g. + // `dyn([1, 'hello', 2.0])` + message CreateList { + // The elements part of the list. + repeated Expr elements = 1; + } + + // A map or message creation expression. + // + // Maps are constructed as `{'key_name': 'value'}`. Message construction is + // similar, but prefixed with a type name and composed of field ids: + // `types.MyType{field_id: 'value'}`. + message CreateStruct { + // Represents an entry. + message Entry { + // Required. An id assigned to this node by the parser which is unique + // in a given expression tree. This is used to associate type + // information and other attributes to the node. + int32 id = 1; + + // The `Entry` key kinds. + oneof key_kind { + // The field key for a message creator statement. + string field_key = 2; + + // The key expression for a map creation statement. + Expr map_key = 3; + } + + // Required. The value assigned to the key. + Expr value = 4; + } + + // The type name of the message to be created, empty when creating map + // literals. + string type = 1; + + // The entries in the creation expression. + repeated Entry entries = 2; + } + + // A comprehension expression applied to a list or map. + // + // Comprehensions are not part of the core syntax, but enabled with macros. + // A macro matches a specific call signature within a parsed AST and replaces + // the call with an alternate AST block. Macro expansion happens at parse + // time. + // + // The following macros are supported within CEL: + // + // Aggregate type macros may be applied to all elements in a list or all keys + // in a map: + // + // * `all`, `exists`, `exists_one` - test a predicate expression against + // the inputs and return `true` if the predicate is satisfied for all, + // any, or only one value `list.all(x, x < 10)`. + // * `filter` - test a predicate expression against the inputs and return + // the subset of elements which satisfy the predicate: + // `payments.filter(p, p > 1000)`. + // * `map` - apply an expression to all elements in the input and return the + // output aggregate type: `[1, 2, 3].map(i, i * i)`. + // + // The `has(m.x)` macro tests whether the property `x` is present in struct + // `m`. The semantics of this macro depend on the type of `m`. For proto2 + // messages `has(m.x)` is defined as 'defined, but not set`. For proto3, the + // macro tests whether the property is set to its default. For map and struct + // types, the macro tests whether the property `x` is defined on `m`. + message Comprehension { + // The name of the iteration variable. + string iter_var = 1; + + // The range over which var iterates. + Expr iter_range = 2; + + // The name of the variable used for accumulation of the result. + string accu_var = 3; + + // The initial value of the accumulator. + Expr accu_init = 4; + + // An expression which can contain iter_var and accu_var. + // + // Returns false when the result has been computed and may be used as + // a hint to short-circuit the remainder of the comprehension. + Expr loop_condition = 5; + + // An expression which can contain iter_var and accu_var. + // + // Computes the next value of accu_var. + Expr loop_step = 6; + + // An expression which can contain accu_var. + // + // Computes the result. + Expr result = 7; + } + + // Required. An id assigned to this node by the parser which is unique in a + // given expression tree. This is used to associate type information and other + // attributes to a node in the parse tree. + int32 id = 2; + + // Required. Variants of expressions. + oneof expr_kind { + // A literal expression. + Literal literal_expr = 3; + + // An identifier expression. + Ident ident_expr = 4; + + // A field selection expression, e.g. `request.auth`. + Select select_expr = 5; + + // A call expression, including calls to predefined functions and operators. + Call call_expr = 6; + + // A list creation expression. + CreateList list_expr = 7; + + // A map or object creation expression. + CreateStruct struct_expr = 8; + + // A comprehension expression. + Comprehension comprehension_expr = 9; + } +} + +// Represents a primitive literal. +// +// This is similar to the primitives supported in the well-known type +// `google.protobuf.Value`, but richer so it can represent CEL's full range of +// primitives. +// +// Lists and structs are not included as constants as these aggregate types may +// contain [Expr][google.api.expr.v1beta1.Expr] elements which require evaluation and are thus not constant. +// +// Examples of literals include: `"hello"`, `b'bytes'`, `1u`, `4.2`, `-2`, +// `true`, `null`. +message Literal { + // Required. The valid constant kinds. + oneof constant_kind { + // null value. + google.protobuf.NullValue null_value = 1; + + // boolean value. + bool bool_value = 2; + + // int64 value. + int64 int64_value = 3; + + // uint64 value. + uint64 uint64_value = 4; + + // double value. + double double_value = 5; + + // string value. + string string_value = 6; + + // bytes value. + bytes bytes_value = 7; + } +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/source.proto b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/source.proto new file mode 100644 index 0000000..78bb0a0 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/source.proto @@ -0,0 +1,62 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.api.expr.v1beta1; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/expr/v1beta1;expr"; +option java_multiple_files = true; +option java_outer_classname = "SourceProto"; +option java_package = "com.google.api.expr.v1beta1"; + +// Source information collected at parse time. +message SourceInfo { + // The location name. All position information attached to an expression is + // relative to this location. + // + // The location could be a file, UI element, or similar. For example, + // `acme/app/AnvilPolicy.cel`. + string location = 2; + + // Monotonically increasing list of character offsets where newlines appear. + // + // The line number of a given position is the index `i` where for a given + // `id` the `line_offsets[i] < id_positions[id] < line_offsets[i+1]`. The + // column may be derivd from `id_positions[id] - line_offsets[i]`. + repeated int32 line_offsets = 3; + + // A map from the parse node id (e.g. `Expr.id`) to the character offset + // within source. + map positions = 4; +} + +// A specific position in source. +message SourcePosition { + // The soucre location name (e.g. file name). + string location = 1; + + // The character offset. + int32 offset = 2; + + // The 1-based index of the starting line in the source text + // where the issue occurs, or 0 if unknown. + int32 line = 3; + + // The 0-based index of the starting position within the line of source text + // where the issue occurs. Only meaningful if line is nonzer.. + int32 column = 4; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/value.proto b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/value.proto new file mode 100644 index 0000000..0978228 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/expr/v1beta1/value.proto @@ -0,0 +1,114 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.api.expr.v1beta1; + +import "google/protobuf/any.proto"; +import "google/protobuf/struct.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/expr/v1beta1;expr"; +option java_multiple_files = true; +option java_outer_classname = "ValueProto"; +option java_package = "com.google.api.expr.v1beta1"; + +// Represents a CEL value. +// +// This is similar to `google.protobuf.Value`, but can represent CEL's full +// range of values. +message Value { + // Required. The valid kinds of values. + oneof kind { + // Null value. + google.protobuf.NullValue null_value = 1; + + // Boolean value. + bool bool_value = 2; + + // Signed integer value. + int64 int64_value = 3; + + // Unsigned integer value. + uint64 uint64_value = 4; + + // Floating point value. + double double_value = 5; + + // UTF-8 string value. + string string_value = 6; + + // Byte string value. + bytes bytes_value = 7; + + // An enum value. + EnumValue enum_value = 9; + + // The proto message backing an object value. + google.protobuf.Any object_value = 10; + + // Map value. + MapValue map_value = 11; + + // List value. + ListValue list_value = 12; + + // A Type value represented by the fully qualified name of the type. + string type_value = 15; + } +} + +// An enum value. +message EnumValue { + // The fully qualified name of the enum type. + string type = 1; + + // The value of the enum. + int32 value = 2; +} + +// A list. +// +// Wrapped in a message so 'not set' and empty can be differentiated, which is +// required for use in a 'oneof'. +message ListValue { + // The ordered values in the list. + repeated Value values = 1; +} + +// A map. +// +// Wrapped in a message so 'not set' and empty can be differentiated, which is +// required for use in a 'oneof'. +message MapValue { + // An entry in the map. + message Entry { + // The key. + // + // Must be unique with in the map. + // Currently only boolean, int, uint, and string values can be keys. + Value key = 1; + + // The value. + Value value = 2; + } + + // The set of map entries. + // + // CEL has fewer restrictions on keys, so a protobuf map represenation + // cannot be used. + repeated Entry entries = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/field_behavior.proto b/_13_sponge-dtm-cache/http/third_party/google/api/field_behavior.proto new file mode 100644 index 0000000..aa7127b --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/field_behavior.proto @@ -0,0 +1,78 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "FieldBehaviorProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.FieldOptions { + // A designation of a specific field behavior (required, output only, etc.) + // in protobuf messages. + // + // Examples: + // + // string name = 1 [(google.api.field_behavior) = REQUIRED]; + // State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + // google.protobuf.Duration ttl = 1 + // [(google.api.field_behavior) = INPUT_ONLY]; + // google.protobuf.Timestamp expire_time = 1 + // [(google.api.field_behavior) = OUTPUT_ONLY, + // (google.api.field_behavior) = IMMUTABLE]; + repeated google.api.FieldBehavior field_behavior = 1052; +} + +// An indicator of the behavior of a given field (for example, that a field +// is required in requests, or given as output but ignored as input). +// This **does not** change the behavior in protocol buffers itself; it only +// denotes the behavior and may affect how API tooling handles the field. +// +// Note: This enum **may** receive new values in the future. +enum FieldBehavior { + // Conventional default for enums. Do not use this. + FIELD_BEHAVIOR_UNSPECIFIED = 0; + + // Specifically denotes a field as optional. + // While all fields in protocol buffers are optional, this may be specified + // for emphasis if appropriate. + OPTIONAL = 1; + + // Denotes a field as required. + // This indicates that the field **must** be provided as part of the request, + // and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + REQUIRED = 2; + + // Denotes a field as output only. + // This indicates that the field is provided in responses, but including the + // field in a request does nothing (the server *must* ignore it and + // *must not* throw an error as a result of the field's presence). + OUTPUT_ONLY = 3; + + // Denotes a field as input only. + // This indicates that the field is provided in requests, and the + // corresponding field is not included in output. + INPUT_ONLY = 4; + + // Denotes a field as immutable. + // This indicates that the field may be set once in a request to create a + // resource, but may not be changed thereafter. + IMMUTABLE = 5; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/http.proto b/_13_sponge-dtm-cache/http/third_party/google/api/http.proto new file mode 100644 index 0000000..69460cf --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/http.proto @@ -0,0 +1,375 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "HttpProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines the HTTP configuration for an API service. It contains a list of +// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +// to one or more HTTP REST API methods. +message Http { + // A list of HTTP configuration rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated HttpRule rules = 1; + + // When set to true, URL path parameters will be fully URI-decoded except in + // cases of single segment matches in reserved expansion, where "%2F" will be + // left encoded. + // + // The default behavior is to not decode RFC 6570 reserved characters in multi + // segment matches. + bool fully_decode_reserved_expansion = 2; +} + +// # gRPC Transcoding +// +// gRPC Transcoding is a feature for mapping between a gRPC method and one or +// more HTTP REST endpoints. It allows developers to build a single API service +// that supports both gRPC APIs and REST APIs. Many systems, including [Google +// APIs](https://github.com/googleapis/googleapis), +// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC +// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), +// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature +// and use it for large scale production services. +// +// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +// how different portions of the gRPC request message are mapped to the URL +// path, URL query parameters, and HTTP request body. It also controls how the +// gRPC response message is mapped to the HTTP response body. `HttpRule` is +// typically specified as an `google.api.http` annotation on the gRPC method. +// +// Each mapping specifies a URL path template and an HTTP method. The path +// template may refer to one or more fields in the gRPC request message, as long +// as each field is a non-repeated field with a primitive (non-message) type. +// The path template controls how fields of the request message are mapped to +// the URL path. +// +// Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/{name=messages/*}" +// }; +// } +// } +// message GetMessageRequest { +// string name = 1; // Mapped to URL path. +// } +// message Message { +// string text = 1; // The resource content. +// } +// +// This enables an HTTP REST to gRPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` +// +// Any fields in the request message which are not bound by the path template +// automatically become HTTP query parameters if there is no HTTP request body. +// For example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get:"/v1/messages/{message_id}" +// }; +// } +// } +// message GetMessageRequest { +// message SubMessage { +// string subfield = 1; +// } +// string message_id = 1; // Mapped to URL path. +// int64 revision = 2; // Mapped to URL query parameter `revision`. +// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +// } +// +// This enables a HTTP JSON to RPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | +// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: +// "foo"))` +// +// Note that fields which are mapped to URL query parameters must have a +// primitive type or a repeated primitive type or a non-repeated message type. +// In the case of a repeated type, the parameter can be repeated in the URL +// as `...?param=A¶m=B`. In the case of a message type, each field of the +// message is mapped to a separate parameter, such as +// `...?foo.a=A&foo.b=B&foo.c=C`. +// +// For HTTP methods that allow a request body, the `body` field +// specifies the mapping. Consider a REST update method on the +// message resource collection: +// +// service Messaging { +// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "message" +// }; +// } +// } +// message UpdateMessageRequest { +// string message_id = 1; // mapped to the URL +// Message message = 2; // mapped to the body +// } +// +// The following HTTP JSON to RPC mapping is enabled, where the +// representation of the JSON in the request body is determined by +// protos JSON encoding: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" message { text: "Hi!" })` +// +// The special name `*` can be used in the body mapping to define that +// every field not bound by the path template should be mapped to the +// request body. This enables the following alternative definition of +// the update method: +// +// service Messaging { +// rpc UpdateMessage(Message) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "*" +// }; +// } +// } +// message Message { +// string message_id = 1; +// string text = 2; +// } +// +// +// The following HTTP JSON to RPC mapping is enabled: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" text: "Hi!")` +// +// Note that when using `*` in the body mapping, it is not possible to +// have HTTP parameters, as all fields not bound by the path end in +// the body. This makes this option more rarely used in practice when +// defining REST APIs. The common usage of `*` is in custom methods +// which don't use the URL at all for transferring data. +// +// It is possible to define multiple HTTP methods for one RPC by using +// the `additional_bindings` option. Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/messages/{message_id}" +// additional_bindings { +// get: "/v1/users/{user_id}/messages/{message_id}" +// } +// }; +// } +// } +// message GetMessageRequest { +// string message_id = 1; +// string user_id = 2; +// } +// +// This enables the following two alternative HTTP JSON to RPC mappings: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` +// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: +// "123456")` +// +// ## Rules for HTTP mapping +// +// 1. Leaf request fields (recursive expansion nested messages in the request +// message) are classified into three categories: +// - Fields referred by the path template. They are passed via the URL path. +// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP +// request body. +// - All other fields are passed via the URL query parameters, and the +// parameter name is the field path in the request message. A repeated +// field can be represented as multiple query parameters under the same +// name. +// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields +// are passed via URL path and HTTP request body. +// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all +// fields are passed via URL path and URL query parameters. +// +// ### Path template syntax +// +// Template = "/" Segments [ Verb ] ; +// Segments = Segment { "/" Segment } ; +// Segment = "*" | "**" | LITERAL | Variable ; +// Variable = "{" FieldPath [ "=" Segments ] "}" ; +// FieldPath = IDENT { "." IDENT } ; +// Verb = ":" LITERAL ; +// +// The syntax `*` matches a single URL path segment. The syntax `**` matches +// zero or more URL path segments, which must be the last part of the URL path +// except the `Verb`. +// +// The syntax `Variable` matches part of the URL path as specified by its +// template. A variable template must not contain other variables. If a variable +// matches a single path segment, its template may be omitted, e.g. `{var}` +// is equivalent to `{var=*}`. +// +// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +// contains any reserved character, such characters should be percent-encoded +// before the matching. +// +// If a variable contains exactly one path segment, such as `"{var}"` or +// `"{var=*}"`, when such a variable is expanded into a URL path on the client +// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The +// server side does the reverse decoding. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{var}`. +// +// If a variable contains multiple path segments, such as `"{var=foo/*}"` +// or `"{var=**}"`, when such a variable is expanded into a URL path on the +// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. +// The server side does the reverse decoding, except "%2F" and "%2f" are left +// unchanged. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{+var}`. +// +// ## Using gRPC API Service Configuration +// +// gRPC API Service Configuration (service config) is a configuration language +// for configuring a gRPC service to become a user-facing product. The +// service config is simply the YAML representation of the `google.api.Service` +// proto message. +// +// As an alternative to annotating your proto file, you can configure gRPC +// transcoding in your service config YAML files. You do this by specifying a +// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +// effect as the proto annotation. This can be particularly useful if you +// have a proto that is reused in multiple services. Note that any transcoding +// specified in the service config will override any matching transcoding +// configuration in the proto. +// +// Example: +// +// http: +// rules: +// # Selects a gRPC method and applies HttpRule to it. +// - selector: example.v1.Messaging.GetMessage +// get: /v1/messages/{message_id}/{sub.subfield} +// +// ## Special notes +// +// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +// proto to JSON conversion must follow the [proto3 +// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). +// +// While the single segment variable follows the semantics of +// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String +// Expansion, the multi segment variable **does not** follow RFC 6570 Section +// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +// does not expand special characters like `?` and `#`, which would lead +// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +// for multi segment variables. +// +// The path variables **must not** refer to any repeated or mapped field, +// because client libraries are not capable of handling such variable expansion. +// +// The path variables **must not** capture the leading "/" character. The reason +// is that the most common use case "{var}" does not capture the leading "/" +// character. For consistency, all path variables must share the same behavior. +// +// Repeated message fields must not be mapped to URL query parameters, because +// no client library can support such complicated mapping. +// +// If an API needs to use a JSON array for request or response body, it can map +// the request or response body to a repeated field. However, some gRPC +// Transcoding implementations may not support this feature. +message HttpRule { + // Selects a method to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // Determines the URL pattern is matched by this rules. This pattern can be + // used with any of the {get|put|post|delete|patch} methods. A custom method + // can be defined using the 'custom' field. + oneof pattern { + // Maps to HTTP GET. Used for listing and getting information about + // resources. + string get = 2; + + // Maps to HTTP PUT. Used for replacing a resource. + string put = 3; + + // Maps to HTTP POST. Used for creating a resource or performing an action. + string post = 4; + + // Maps to HTTP DELETE. Used for deleting a resource. + string delete = 5; + + // Maps to HTTP PATCH. Used for updating a resource. + string patch = 6; + + // The custom pattern is used for specifying an HTTP method that is not + // included in the `pattern` field, such as HEAD, or "*" to leave the + // HTTP method unspecified for this rule. The wild-card rule is useful + // for services that provide content to Web (HTML) clients. + CustomHttpPattern custom = 8; + } + + // The name of the request field whose value is mapped to the HTTP request + // body, or `*` for mapping all request fields not captured by the path + // pattern to the HTTP body, or omitted for not having any HTTP request body. + // + // NOTE: the referred field must be present at the top-level of the request + // message type. + string body = 7; + + // Optional. The name of the response field whose value is mapped to the HTTP + // response body. When omitted, the entire response message will be used + // as the HTTP response body. + // + // NOTE: The referred field must be present at the top-level of the response + // message type. + string response_body = 12; + + // Additional HTTP bindings for the selector. Nested bindings must + // not contain an `additional_bindings` field themselves (that is, + // the nesting may only be one level deep). + repeated HttpRule additional_bindings = 11; +} + +// A custom pattern is used for defining custom HTTP verb. +message CustomHttpPattern { + // The name of this custom HTTP verb. + string kind = 1; + + // The path matched by this custom verb. + string path = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/httpbody.proto b/_13_sponge-dtm-cache/http/third_party/google/api/httpbody.proto new file mode 100644 index 0000000..1a5bb78 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/httpbody.proto @@ -0,0 +1,77 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/any.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/httpbody;httpbody"; +option java_multiple_files = true; +option java_outer_classname = "HttpBodyProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Message that represents an arbitrary HTTP body. It should only be used for +// payload formats that can't be represented as JSON, such as raw binary or +// an HTML page. +// +// +// This message can be used both in streaming and non-streaming API methods in +// the request as well as the response. +// +// It can be used as a top-level request field, which is convenient if one +// wants to extract parameters from either the URL or HTTP template into the +// request fields and also want access to the raw HTTP body. +// +// Example: +// +// message GetResourceRequest { +// // A unique request id. +// string request_id = 1; +// +// // The raw HTTP body is bound to this field. +// google.api.HttpBody http_body = 2; +// } +// +// service ResourceService { +// rpc GetResource(GetResourceRequest) returns (google.api.HttpBody); +// rpc UpdateResource(google.api.HttpBody) returns +// (google.protobuf.Empty); +// } +// +// Example with streaming methods: +// +// service CaldavService { +// rpc GetCalendar(stream google.api.HttpBody) +// returns (stream google.api.HttpBody); +// rpc UpdateCalendar(stream google.api.HttpBody) +// returns (stream google.api.HttpBody); +// } +// +// Use of this type only changes how the request and response bodies are +// handled, all other features will continue to work unchanged. +message HttpBody { + // The HTTP Content-Type header value specifying the content type of the body. + string content_type = 1; + + // The HTTP request/response body as raw binary. + bytes data = 2; + + // Application specific response metadata. Must be set in the first response + // for streaming APIs. + repeated google.protobuf.Any extensions = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/label.proto b/_13_sponge-dtm-cache/http/third_party/google/api/label.proto new file mode 100644 index 0000000..62f6cfa --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/label.proto @@ -0,0 +1,48 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/label;label"; +option java_multiple_files = true; +option java_outer_classname = "LabelProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// A description of a label. +message LabelDescriptor { + // Value types that can be used as label values. + enum ValueType { + // A variable-length string. This is the default. + STRING = 0; + + // Boolean; true or false. + BOOL = 1; + + // A 64-bit signed integer. + INT64 = 2; + } + + // The label key. + string key = 1; + + // The type of data that can be assigned to the label. + ValueType value_type = 2; + + // A human-readable description for the label. + string description = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/launch_stage.proto b/_13_sponge-dtm-cache/http/third_party/google/api/launch_stage.proto new file mode 100644 index 0000000..f5827cc --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/launch_stage.proto @@ -0,0 +1,72 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api;api"; +option java_multiple_files = true; +option java_outer_classname = "LaunchStageProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// The launch stage as defined by [Google Cloud Platform +// Launch Stages](http://cloud.google.com/terms/launch-stages). +enum LaunchStage { + // Do not use this default value. + LAUNCH_STAGE_UNSPECIFIED = 0; + + // The feature is not yet implemented. Users can not use it. + UNIMPLEMENTED = 6; + + // Prelaunch features are hidden from users and are only visible internally. + PRELAUNCH = 7; + + // Early Access features are limited to a closed group of testers. To use + // these features, you must sign up in advance and sign a Trusted Tester + // agreement (which includes confidentiality provisions). These features may + // be unstable, changed in backward-incompatible ways, and are not + // guaranteed to be released. + EARLY_ACCESS = 1; + + // Alpha is a limited availability test for releases before they are cleared + // for widespread use. By Alpha, all significant design issues are resolved + // and we are in the process of verifying functionality. Alpha customers + // need to apply for access, agree to applicable terms, and have their + // projects allowlisted. Alpha releases don’t have to be feature complete, + // no SLAs are provided, and there are no technical support obligations, but + // they will be far enough along that customers can actually use them in + // test environments or for limited-use tests -- just like they would in + // normal production cases. + ALPHA = 2; + + // Beta is the point at which we are ready to open a release for any + // customer to use. There are no SLA or technical support obligations in a + // Beta release. Products will be complete from a feature perspective, but + // may have some open outstanding issues. Beta releases are suitable for + // limited production use cases. + BETA = 3; + + // GA features are open to all developers and are considered stable and + // fully qualified for production use. + GA = 4; + + // Deprecated features are scheduled to be shut down and removed. For more + // information, see the "Deprecation Policy" section of our [Terms of + // Service](https://cloud.google.com/terms/) + // and the [Google Cloud Platform Subject to the Deprecation + // Policy](https://cloud.google.com/terms/deprecation) documentation. + DEPRECATED = 5; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/log.proto b/_13_sponge-dtm-cache/http/third_party/google/api/log.proto new file mode 100644 index 0000000..12e8c1c --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/log.proto @@ -0,0 +1,54 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/label.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "LogProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// A description of a log type. Example in YAML format: +// +// - name: library.googleapis.com/activity_history +// description: The history of borrowing and returning library items. +// display_name: Activity +// labels: +// - key: /customer_id +// description: Identifier of a library customer +message LogDescriptor { + // The name of the log. It must be less than 512 characters long and can + // include the following characters: upper- and lower-case alphanumeric + // characters [A-Za-z0-9], and punctuation characters including + // slash, underscore, hyphen, period [/_-.]. + string name = 1; + + // The set of labels that are available to describe a specific log entry. + // Runtime requests that contain labels not specified here are + // considered invalid. + repeated LabelDescriptor labels = 2; + + // A human-readable description of this log. This information appears in + // the documentation and can contain details. + string description = 3; + + // The human-readable name for this log. This information appears on + // the user interface and should be concise. + string display_name = 4; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/logging.proto b/_13_sponge-dtm-cache/http/third_party/google/api/logging.proto new file mode 100644 index 0000000..e9ae9bf --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/logging.proto @@ -0,0 +1,80 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "LoggingProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Logging configuration of the service. +// +// The following example shows how to configure logs to be sent to the +// producer and consumer projects. In the example, the `activity_history` +// log is sent to both the producer and consumer projects, whereas the +// `purchase_history` log is only sent to the producer project. +// +// monitored_resources: +// - type: library.googleapis.com/branch +// labels: +// - key: /city +// description: The city where the library branch is located in. +// - key: /name +// description: The name of the branch. +// logs: +// - name: activity_history +// labels: +// - key: /customer_id +// - name: purchase_history +// logging: +// producer_destinations: +// - monitored_resource: library.googleapis.com/branch +// logs: +// - activity_history +// - purchase_history +// consumer_destinations: +// - monitored_resource: library.googleapis.com/branch +// logs: +// - activity_history +message Logging { + // Configuration of a specific logging destination (the producer project + // or the consumer project). + message LoggingDestination { + // The monitored resource type. The type must be defined in the + // [Service.monitored_resources][google.api.Service.monitored_resources] section. + string monitored_resource = 3; + + // Names of the logs to be sent to this destination. Each name must + // be defined in the [Service.logs][google.api.Service.logs] section. If the log name is + // not a domain scoped name, it will be automatically prefixed with + // the service name followed by "/". + repeated string logs = 1; + } + + // Logging configurations for sending logs to the producer project. + // There can be multiple producer destinations, each one must have a + // different monitored resource type. A log can be used in at most + // one producer destination. + repeated LoggingDestination producer_destinations = 1; + + // Logging configurations for sending logs to the consumer project. + // There can be multiple consumer destinations, each one must have a + // different monitored resource type. A log can be used in at most + // one consumer destination. + repeated LoggingDestination consumer_destinations = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/metric.proto b/_13_sponge-dtm-cache/http/third_party/google/api/metric.proto new file mode 100644 index 0000000..a830405 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/metric.proto @@ -0,0 +1,264 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/label.proto"; +import "google/api/launch_stage.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/metric;metric"; +option java_multiple_files = true; +option java_outer_classname = "MetricProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines a metric type and its schema. Once a metric descriptor is created, +// deleting or altering it stops data collection and makes the metric type's +// existing data unusable. +// +message MetricDescriptor { + // Additional annotations that can be used to guide the usage of a metric. + message MetricDescriptorMetadata { + // Deprecated. Must use the [MetricDescriptor.launch_stage][google.api.MetricDescriptor.launch_stage] instead. + LaunchStage launch_stage = 1 [deprecated = true]; + + // The sampling period of metric data points. For metrics which are written + // periodically, consecutive data points are stored at this time interval, + // excluding data loss due to errors. Metrics with a higher granularity have + // a smaller sampling period. + google.protobuf.Duration sample_period = 2; + + // The delay of data points caused by ingestion. Data points older than this + // age are guaranteed to be ingested and available to be read, excluding + // data loss due to errors. + google.protobuf.Duration ingest_delay = 3; + } + + // The kind of measurement. It describes how the data is reported. + // For information on setting the start time and end time based on + // the MetricKind, see [TimeInterval][google.monitoring.v3.TimeInterval]. + enum MetricKind { + // Do not use this default value. + METRIC_KIND_UNSPECIFIED = 0; + + // An instantaneous measurement of a value. + GAUGE = 1; + + // The change in a value during a time interval. + DELTA = 2; + + // A value accumulated over a time interval. Cumulative + // measurements in a time series should have the same start time + // and increasing end times, until an event resets the cumulative + // value to zero and sets a new start time for the following + // points. + CUMULATIVE = 3; + } + + // The value type of a metric. + enum ValueType { + // Do not use this default value. + VALUE_TYPE_UNSPECIFIED = 0; + + // The value is a boolean. + // This value type can be used only if the metric kind is `GAUGE`. + BOOL = 1; + + // The value is a signed 64-bit integer. + INT64 = 2; + + // The value is a double precision floating point number. + DOUBLE = 3; + + // The value is a text string. + // This value type can be used only if the metric kind is `GAUGE`. + STRING = 4; + + // The value is a [`Distribution`][google.api.Distribution]. + DISTRIBUTION = 5; + + // The value is money. + MONEY = 6; + } + + // The resource name of the metric descriptor. + string name = 1; + + // The metric type, including its DNS name prefix. The type is not + // URL-encoded. All user-defined metric types have the DNS name + // `custom.googleapis.com` or `external.googleapis.com`. Metric types should + // use a natural hierarchical grouping. For example: + // + // "custom.googleapis.com/invoice/paid/amount" + // "external.googleapis.com/prometheus/up" + // "appengine.googleapis.com/http/server/response_latencies" + string type = 8; + + // The set of labels that can be used to describe a specific + // instance of this metric type. For example, the + // `appengine.googleapis.com/http/server/response_latencies` metric + // type has a label for the HTTP response code, `response_code`, so + // you can look at latencies for successful responses or just + // for responses that failed. + repeated LabelDescriptor labels = 2; + + // Whether the metric records instantaneous values, changes to a value, etc. + // Some combinations of `metric_kind` and `value_type` might not be supported. + MetricKind metric_kind = 3; + + // Whether the measurement is an integer, a floating-point number, etc. + // Some combinations of `metric_kind` and `value_type` might not be supported. + ValueType value_type = 4; + + // The units in which the metric value is reported. It is only applicable + // if the `value_type` is `INT64`, `DOUBLE`, or `DISTRIBUTION`. The `unit` + // defines the representation of the stored metric values. + // + // Different systems may scale the values to be more easily displayed (so a + // value of `0.02KBy` _might_ be displayed as `20By`, and a value of + // `3523KBy` _might_ be displayed as `3.5MBy`). However, if the `unit` is + // `KBy`, then the value of the metric is always in thousands of bytes, no + // matter how it may be displayed.. + // + // If you want a custom metric to record the exact number of CPU-seconds used + // by a job, you can create an `INT64 CUMULATIVE` metric whose `unit` is + // `s{CPU}` (or equivalently `1s{CPU}` or just `s`). If the job uses 12,005 + // CPU-seconds, then the value is written as `12005`. + // + // Alternatively, if you want a custom metric to record data in a more + // granular way, you can create a `DOUBLE CUMULATIVE` metric whose `unit` is + // `ks{CPU}`, and then write the value `12.005` (which is `12005/1000`), + // or use `Kis{CPU}` and write `11.723` (which is `12005/1024`). + // + // The supported units are a subset of [The Unified Code for Units of + // Measure](http://unitsofmeasure.org/ucum.html) standard: + // + // **Basic units (UNIT)** + // + // * `bit` bit + // * `By` byte + // * `s` second + // * `min` minute + // * `h` hour + // * `d` day + // * `1` dimensionless + // + // **Prefixes (PREFIX)** + // + // * `k` kilo (10^3) + // * `M` mega (10^6) + // * `G` giga (10^9) + // * `T` tera (10^12) + // * `P` peta (10^15) + // * `E` exa (10^18) + // * `Z` zetta (10^21) + // * `Y` yotta (10^24) + // + // * `m` milli (10^-3) + // * `u` micro (10^-6) + // * `n` nano (10^-9) + // * `p` pico (10^-12) + // * `f` femto (10^-15) + // * `a` atto (10^-18) + // * `z` zepto (10^-21) + // * `y` yocto (10^-24) + // + // * `Ki` kibi (2^10) + // * `Mi` mebi (2^20) + // * `Gi` gibi (2^30) + // * `Ti` tebi (2^40) + // * `Pi` pebi (2^50) + // + // **Grammar** + // + // The grammar also includes these connectors: + // + // * `/` division or ratio (as an infix operator). For examples, + // `kBy/{email}` or `MiBy/10ms` (although you should almost never + // have `/s` in a metric `unit`; rates should always be computed at + // query time from the underlying cumulative or delta value). + // * `.` multiplication or composition (as an infix operator). For + // examples, `GBy.d` or `k{watt}.h`. + // + // The grammar for a unit is as follows: + // + // Expression = Component { "." Component } { "/" Component } ; + // + // Component = ( [ PREFIX ] UNIT | "%" ) [ Annotation ] + // | Annotation + // | "1" + // ; + // + // Annotation = "{" NAME "}" ; + // + // Notes: + // + // * `Annotation` is just a comment if it follows a `UNIT`. If the annotation + // is used alone, then the unit is equivalent to `1`. For examples, + // `{request}/s == 1/s`, `By{transmitted}/s == By/s`. + // * `NAME` is a sequence of non-blank printable ASCII characters not + // containing `{` or `}`. + // * `1` represents a unitary [dimensionless + // unit](https://en.wikipedia.org/wiki/Dimensionless_quantity) of 1, such + // as in `1/s`. It is typically used when none of the basic units are + // appropriate. For example, "new users per day" can be represented as + // `1/d` or `{new-users}/d` (and a metric value `5` would mean "5 new + // users). Alternatively, "thousands of page views per day" would be + // represented as `1000/d` or `k1/d` or `k{page_views}/d` (and a metric + // value of `5.3` would mean "5300 page views per day"). + // * `%` represents dimensionless value of 1/100, and annotates values giving + // a percentage (so the metric values are typically in the range of 0..100, + // and a metric value `3` means "3 percent"). + // * `10^2.%` indicates a metric contains a ratio, typically in the range + // 0..1, that will be multiplied by 100 and displayed as a percentage + // (so a metric value `0.03` means "3 percent"). + string unit = 5; + + // A detailed description of the metric, which can be used in documentation. + string description = 6; + + // A concise name for the metric, which can be displayed in user interfaces. + // Use sentence case without an ending period, for example "Request count". + // This field is optional but it is recommended to be set for any metrics + // associated with user-visible concepts, such as Quota. + string display_name = 7; + + // Optional. Metadata which can be used to guide usage of the metric. + MetricDescriptorMetadata metadata = 10; + + // Optional. The launch stage of the metric definition. + LaunchStage launch_stage = 12; + + // Read-only. If present, then a [time + // series][google.monitoring.v3.TimeSeries], which is identified partially by + // a metric type and a [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor], that is associated + // with this metric type can only be associated with one of the monitored + // resource types listed here. + repeated string monitored_resource_types = 13; +} + +// A specific metric, identified by specifying values for all of the +// labels of a [`MetricDescriptor`][google.api.MetricDescriptor]. +message Metric { + // An existing metric type, see [google.api.MetricDescriptor][google.api.MetricDescriptor]. + // For example, `custom.googleapis.com/invoice/paid/amount`. + string type = 3; + + // The set of label values that uniquely identify this metric. All + // labels listed in the `MetricDescriptor` must be assigned values. + map labels = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/monitored_resource.proto b/_13_sponge-dtm-cache/http/third_party/google/api/monitored_resource.proto new file mode 100644 index 0000000..05ed6e8 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/monitored_resource.proto @@ -0,0 +1,118 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/label.proto"; +import "google/api/launch_stage.proto"; +import "google/protobuf/struct.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/monitoredres;monitoredres"; +option java_multiple_files = true; +option java_outer_classname = "MonitoredResourceProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// An object that describes the schema of a [MonitoredResource][google.api.MonitoredResource] object using a +// type name and a set of labels. For example, the monitored resource +// descriptor for Google Compute Engine VM instances has a type of +// `"gce_instance"` and specifies the use of the labels `"instance_id"` and +// `"zone"` to identify particular VM instances. +// +// Different APIs can support different monitored resource types. APIs generally +// provide a `list` method that returns the monitored resource descriptors used +// by the API. +// +message MonitoredResourceDescriptor { + // Optional. The resource name of the monitored resource descriptor: + // `"projects/{project_id}/monitoredResourceDescriptors/{type}"` where + // {type} is the value of the `type` field in this object and + // {project_id} is a project ID that provides API-specific context for + // accessing the type. APIs that do not use project information can use the + // resource name format `"monitoredResourceDescriptors/{type}"`. + string name = 5; + + // Required. The monitored resource type. For example, the type + // `"cloudsql_database"` represents databases in Google Cloud SQL. + string type = 1; + + // Optional. A concise name for the monitored resource type that might be + // displayed in user interfaces. It should be a Title Cased Noun Phrase, + // without any article or other determiners. For example, + // `"Google Cloud SQL Database"`. + string display_name = 2; + + // Optional. A detailed description of the monitored resource type that might + // be used in documentation. + string description = 3; + + // Required. A set of labels used to describe instances of this monitored + // resource type. For example, an individual Google Cloud SQL database is + // identified by values for the labels `"database_id"` and `"zone"`. + repeated LabelDescriptor labels = 4; + + // Optional. The launch stage of the monitored resource definition. + LaunchStage launch_stage = 7; +} + +// An object representing a resource that can be used for monitoring, logging, +// billing, or other purposes. Examples include virtual machine instances, +// databases, and storage devices such as disks. The `type` field identifies a +// [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] object that describes the resource's +// schema. Information in the `labels` field identifies the actual resource and +// its attributes according to the schema. For example, a particular Compute +// Engine VM instance could be represented by the following object, because the +// [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] for `"gce_instance"` has labels +// `"instance_id"` and `"zone"`: +// +// { "type": "gce_instance", +// "labels": { "instance_id": "12345678901234", +// "zone": "us-central1-a" }} +message MonitoredResource { + // Required. The monitored resource type. This field must match + // the `type` field of a [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] object. For + // example, the type of a Compute Engine VM instance is `gce_instance`. + string type = 1; + + // Required. Values for all of the labels listed in the associated monitored + // resource descriptor. For example, Compute Engine VM instances use the + // labels `"project_id"`, `"instance_id"`, and `"zone"`. + map labels = 2; +} + +// Auxiliary metadata for a [MonitoredResource][google.api.MonitoredResource] object. +// [MonitoredResource][google.api.MonitoredResource] objects contain the minimum set of information to +// uniquely identify a monitored resource instance. There is some other useful +// auxiliary metadata. Monitoring and Logging use an ingestion +// pipeline to extract metadata for cloud resources of all types, and store +// the metadata in this message. +message MonitoredResourceMetadata { + // Output only. Values for predefined system metadata labels. + // System labels are a kind of metadata extracted by Google, including + // "machine_image", "vpc", "subnet_id", + // "security_group", "name", etc. + // System label values can be only strings, Boolean values, or a list of + // strings. For example: + // + // { "name": "my-test-instance", + // "security_group": ["a", "b", "c"], + // "spot_instance": false } + google.protobuf.Struct system_labels = 1; + + // Output only. A map of user-defined metadata labels. + map user_labels = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/monitoring.proto b/_13_sponge-dtm-cache/http/third_party/google/api/monitoring.proto new file mode 100644 index 0000000..0acbfc9 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/monitoring.proto @@ -0,0 +1,105 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "MonitoringProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Monitoring configuration of the service. +// +// The example below shows how to configure monitored resources and metrics +// for monitoring. In the example, a monitored resource and two metrics are +// defined. The `library.googleapis.com/book/returned_count` metric is sent +// to both producer and consumer projects, whereas the +// `library.googleapis.com/book/num_overdue` metric is only sent to the +// consumer project. +// +// monitored_resources: +// - type: library.googleapis.com/Branch +// display_name: "Library Branch" +// description: "A branch of a library." +// launch_stage: GA +// labels: +// - key: resource_container +// description: "The Cloud container (ie. project id) for the Branch." +// - key: location +// description: "The location of the library branch." +// - key: branch_id +// description: "The id of the branch." +// metrics: +// - name: library.googleapis.com/book/returned_count +// display_name: "Books Returned" +// description: "The count of books that have been returned." +// launch_stage: GA +// metric_kind: DELTA +// value_type: INT64 +// unit: "1" +// labels: +// - key: customer_id +// description: "The id of the customer." +// - name: library.googleapis.com/book/num_overdue +// display_name: "Books Overdue" +// description: "The current number of overdue books." +// launch_stage: GA +// metric_kind: GAUGE +// value_type: INT64 +// unit: "1" +// labels: +// - key: customer_id +// description: "The id of the customer." +// monitoring: +// producer_destinations: +// - monitored_resource: library.googleapis.com/Branch +// metrics: +// - library.googleapis.com/book/returned_count +// consumer_destinations: +// - monitored_resource: library.googleapis.com/Branch +// metrics: +// - library.googleapis.com/book/returned_count +// - library.googleapis.com/book/num_overdue +message Monitoring { + // Configuration of a specific monitoring destination (the producer project + // or the consumer project). + message MonitoringDestination { + // The monitored resource type. The type must be defined in + // [Service.monitored_resources][google.api.Service.monitored_resources] section. + string monitored_resource = 1; + + // Types of the metrics to report to this monitoring destination. + // Each type must be defined in [Service.metrics][google.api.Service.metrics] section. + repeated string metrics = 2; + } + + // Monitoring configurations for sending metrics to the producer project. + // There can be multiple producer destinations. A monitored resource type may + // appear in multiple monitoring destinations if different aggregations are + // needed for different sets of metrics associated with that monitored + // resource type. A monitored resource and metric pair may only be used once + // in the Monitoring configuration. + repeated MonitoringDestination producer_destinations = 1; + + // Monitoring configurations for sending metrics to the consumer project. + // There can be multiple consumer destinations. A monitored resource type may + // appear in multiple monitoring destinations if different aggregations are + // needed for different sets of metrics associated with that monitored + // resource type. A monitored resource and metric pair may only be used once + // in the Monitoring configuration. + repeated MonitoringDestination consumer_destinations = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/quota.proto b/_13_sponge-dtm-cache/http/third_party/google/api/quota.proto new file mode 100644 index 0000000..c9c7899 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/quota.proto @@ -0,0 +1,183 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "QuotaProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Quota configuration helps to achieve fairness and budgeting in service +// usage. +// +// The metric based quota configuration works this way: +// - The service configuration defines a set of metrics. +// - For API calls, the quota.metric_rules maps methods to metrics with +// corresponding costs. +// - The quota.limits defines limits on the metrics, which will be used for +// quota checks at runtime. +// +// An example quota configuration in yaml format: +// +// quota: +// limits: +// +// - name: apiWriteQpsPerProject +// metric: library.googleapis.com/write_calls +// unit: "1/min/{project}" # rate limit for consumer projects +// values: +// STANDARD: 10000 +// +// +// # The metric rules bind all methods to the read_calls metric, +// # except for the UpdateBook and DeleteBook methods. These two methods +// # are mapped to the write_calls metric, with the UpdateBook method +// # consuming at twice rate as the DeleteBook method. +// metric_rules: +// - selector: "*" +// metric_costs: +// library.googleapis.com/read_calls: 1 +// - selector: google.example.library.v1.LibraryService.UpdateBook +// metric_costs: +// library.googleapis.com/write_calls: 2 +// - selector: google.example.library.v1.LibraryService.DeleteBook +// metric_costs: +// library.googleapis.com/write_calls: 1 +// +// Corresponding Metric definition: +// +// metrics: +// - name: library.googleapis.com/read_calls +// display_name: Read requests +// metric_kind: DELTA +// value_type: INT64 +// +// - name: library.googleapis.com/write_calls +// display_name: Write requests +// metric_kind: DELTA +// value_type: INT64 +// +// +message Quota { + // List of `QuotaLimit` definitions for the service. + repeated QuotaLimit limits = 3; + + // List of `MetricRule` definitions, each one mapping a selected method to one + // or more metrics. + repeated MetricRule metric_rules = 4; +} + +// Bind API methods to metrics. Binding a method to a metric causes that +// metric's configured quota behaviors to apply to the method call. +message MetricRule { + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // Metrics to update when the selected methods are called, and the associated + // cost applied to each metric. + // + // The key of the map is the metric name, and the values are the amount + // increased for the metric against which the quota limits are defined. + // The value must not be negative. + map metric_costs = 2; +} + +// `QuotaLimit` defines a specific limit that applies over a specified duration +// for a limit type. There can be at most one limit for a duration and limit +// type combination defined within a `QuotaGroup`. +message QuotaLimit { + // Name of the quota limit. + // + // The name must be provided, and it must be unique within the service. The + // name can only include alphanumeric characters as well as '-'. + // + // The maximum length of the limit name is 64 characters. + string name = 6; + + // Optional. User-visible, extended description for this quota limit. + // Should be used only when more context is needed to understand this limit + // than provided by the limit's display name (see: `display_name`). + string description = 2; + + // Default number of tokens that can be consumed during the specified + // duration. This is the number of tokens assigned when a client + // application developer activates the service for his/her project. + // + // Specifying a value of 0 will block all requests. This can be used if you + // are provisioning quota to selected consumers and blocking others. + // Similarly, a value of -1 will indicate an unlimited quota. No other + // negative values are allowed. + // + // Used by group-based quotas only. + int64 default_limit = 3; + + // Maximum number of tokens that can be consumed during the specified + // duration. Client application developers can override the default limit up + // to this maximum. If specified, this value cannot be set to a value less + // than the default limit. If not specified, it is set to the default limit. + // + // To allow clients to apply overrides with no upper bound, set this to -1, + // indicating unlimited maximum quota. + // + // Used by group-based quotas only. + int64 max_limit = 4; + + // Free tier value displayed in the Developers Console for this limit. + // The free tier is the number of tokens that will be subtracted from the + // billed amount when billing is enabled. + // This field can only be set on a limit with duration "1d", in a billable + // group; it is invalid on any other limit. If this field is not set, it + // defaults to 0, indicating that there is no free tier for this service. + // + // Used by group-based quotas only. + int64 free_tier = 7; + + // Duration of this limit in textual notation. Must be "100s" or "1d". + // + // Used by group-based quotas only. + string duration = 5; + + // The name of the metric this quota limit applies to. The quota limits with + // the same metric will be checked together during runtime. The metric must be + // defined within the service config. + string metric = 8; + + // Specify the unit of the quota limit. It uses the same syntax as + // [Metric.unit][]. The supported unit kinds are determined by the quota + // backend system. + // + // Here are some examples: + // * "1/min/{project}" for quota per minute per project. + // + // Note: the order of unit components is insignificant. + // The "1" at the beginning is required to follow the metric unit syntax. + string unit = 9; + + // Tiered limit values. You must specify this as a key:value pair, with an + // integer value that is the maximum number of requests allowed for the + // specified unit. Currently only STANDARD is supported. + map values = 10; + + // User-visible display name for this limit. + // Optional. If not set, the UI will provide a default display name based on + // the quota configuration. This field can be used to override the default + // display name generated from the configuration. + string display_name = 12; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/resource.proto b/_13_sponge-dtm-cache/http/third_party/google/api/resource.proto new file mode 100644 index 0000000..fd9ee66 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/resource.proto @@ -0,0 +1,299 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "ResourceProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.FieldOptions { + // An annotation that describes a resource reference, see + // [ResourceReference][]. + google.api.ResourceReference resource_reference = 1055; +} + +extend google.protobuf.FileOptions { + // An annotation that describes a resource definition without a corresponding + // message; see [ResourceDescriptor][]. + repeated google.api.ResourceDescriptor resource_definition = 1053; +} + +extend google.protobuf.MessageOptions { + // An annotation that describes a resource definition, see + // [ResourceDescriptor][]. + google.api.ResourceDescriptor resource = 1053; +} + +// A simple descriptor of a resource type. +// +// ResourceDescriptor annotates a resource message (either by means of a +// protobuf annotation or use in the service config), and associates the +// resource's schema, the resource type, and the pattern of the resource name. +// +// Example: +// +// message Topic { +// // Indicates this message defines a resource schema. +// // Declares the resource type in the format of {service}/{kind}. +// // For Kubernetes resources, the format is {api group}/{kind}. +// option (google.api.resource) = { +// type: "pubsub.googleapis.com/Topic" +// name_descriptor: { +// pattern: "projects/{project}/topics/{topic}" +// parent_type: "cloudresourcemanager.googleapis.com/Project" +// parent_name_extractor: "projects/{project}" +// } +// }; +// } +// +// The ResourceDescriptor Yaml config will look like: +// +// resources: +// - type: "pubsub.googleapis.com/Topic" +// name_descriptor: +// - pattern: "projects/{project}/topics/{topic}" +// parent_type: "cloudresourcemanager.googleapis.com/Project" +// parent_name_extractor: "projects/{project}" +// +// Sometimes, resources have multiple patterns, typically because they can +// live under multiple parents. +// +// Example: +// +// message LogEntry { +// option (google.api.resource) = { +// type: "logging.googleapis.com/LogEntry" +// name_descriptor: { +// pattern: "projects/{project}/logs/{log}" +// parent_type: "cloudresourcemanager.googleapis.com/Project" +// parent_name_extractor: "projects/{project}" +// } +// name_descriptor: { +// pattern: "folders/{folder}/logs/{log}" +// parent_type: "cloudresourcemanager.googleapis.com/Folder" +// parent_name_extractor: "folders/{folder}" +// } +// name_descriptor: { +// pattern: "organizations/{organization}/logs/{log}" +// parent_type: "cloudresourcemanager.googleapis.com/Organization" +// parent_name_extractor: "organizations/{organization}" +// } +// name_descriptor: { +// pattern: "billingAccounts/{billing_account}/logs/{log}" +// parent_type: "billing.googleapis.com/BillingAccount" +// parent_name_extractor: "billingAccounts/{billing_account}" +// } +// }; +// } +// +// The ResourceDescriptor Yaml config will look like: +// +// resources: +// - type: 'logging.googleapis.com/LogEntry' +// name_descriptor: +// - pattern: "projects/{project}/logs/{log}" +// parent_type: "cloudresourcemanager.googleapis.com/Project" +// parent_name_extractor: "projects/{project}" +// - pattern: "folders/{folder}/logs/{log}" +// parent_type: "cloudresourcemanager.googleapis.com/Folder" +// parent_name_extractor: "folders/{folder}" +// - pattern: "organizations/{organization}/logs/{log}" +// parent_type: "cloudresourcemanager.googleapis.com/Organization" +// parent_name_extractor: "organizations/{organization}" +// - pattern: "billingAccounts/{billing_account}/logs/{log}" +// parent_type: "billing.googleapis.com/BillingAccount" +// parent_name_extractor: "billingAccounts/{billing_account}" +// +// For flexible resources, the resource name doesn't contain parent names, but +// the resource itself has parents for policy evaluation. +// +// Example: +// +// message Shelf { +// option (google.api.resource) = { +// type: "library.googleapis.com/Shelf" +// name_descriptor: { +// pattern: "shelves/{shelf}" +// parent_type: "cloudresourcemanager.googleapis.com/Project" +// } +// name_descriptor: { +// pattern: "shelves/{shelf}" +// parent_type: "cloudresourcemanager.googleapis.com/Folder" +// } +// }; +// } +// +// The ResourceDescriptor Yaml config will look like: +// +// resources: +// - type: 'library.googleapis.com/Shelf' +// name_descriptor: +// - pattern: "shelves/{shelf}" +// parent_type: "cloudresourcemanager.googleapis.com/Project" +// - pattern: "shelves/{shelf}" +// parent_type: "cloudresourcemanager.googleapis.com/Folder" +message ResourceDescriptor { + // A description of the historical or future-looking state of the + // resource pattern. + enum History { + // The "unset" value. + HISTORY_UNSPECIFIED = 0; + + // The resource originally had one pattern and launched as such, and + // additional patterns were added later. + ORIGINALLY_SINGLE_PATTERN = 1; + + // The resource has one pattern, but the API owner expects to add more + // later. (This is the inverse of ORIGINALLY_SINGLE_PATTERN, and prevents + // that from being necessary once there are multiple patterns.) + FUTURE_MULTI_PATTERN = 2; + } + + // A flag representing a specific style that a resource claims to conform to. + enum Style { + // The unspecified value. Do not use. + STYLE_UNSPECIFIED = 0; + + // This resource is intended to be "declarative-friendly". + // + // Declarative-friendly resources must be more strictly consistent, and + // setting this to true communicates to tools that this resource should + // adhere to declarative-friendly expectations. + // + // Note: This is used by the API linter (linter.aip.dev) to enable + // additional checks. + DECLARATIVE_FRIENDLY = 1; + } + + // The resource type. It must be in the format of + // {service_name}/{resource_type_kind}. The `resource_type_kind` must be + // singular and must not include version numbers. + // + // Example: `storage.googleapis.com/Bucket` + // + // The value of the resource_type_kind must follow the regular expression + // /[A-Za-z][a-zA-Z0-9]+/. It should start with an upper case character and + // should use PascalCase (UpperCamelCase). The maximum number of + // characters allowed for the `resource_type_kind` is 100. + string type = 1; + + // Optional. The relative resource name pattern associated with this resource + // type. The DNS prefix of the full resource name shouldn't be specified here. + // + // The path pattern must follow the syntax, which aligns with HTTP binding + // syntax: + // + // Template = Segment { "/" Segment } ; + // Segment = LITERAL | Variable ; + // Variable = "{" LITERAL "}" ; + // + // Examples: + // + // - "projects/{project}/topics/{topic}" + // - "projects/{project}/knowledgeBases/{knowledge_base}" + // + // The components in braces correspond to the IDs for each resource in the + // hierarchy. It is expected that, if multiple patterns are provided, + // the same component name (e.g. "project") refers to IDs of the same + // type of resource. + repeated string pattern = 2; + + // Optional. The field on the resource that designates the resource name + // field. If omitted, this is assumed to be "name". + string name_field = 3; + + // Optional. The historical or future-looking state of the resource pattern. + // + // Example: + // + // // The InspectTemplate message originally only supported resource + // // names with organization, and project was added later. + // message InspectTemplate { + // option (google.api.resource) = { + // type: "dlp.googleapis.com/InspectTemplate" + // pattern: + // "organizations/{organization}/inspectTemplates/{inspect_template}" + // pattern: "projects/{project}/inspectTemplates/{inspect_template}" + // history: ORIGINALLY_SINGLE_PATTERN + // }; + // } + History history = 4; + + // The plural name used in the resource name and permission names, such as + // 'projects' for the resource name of 'projects/{project}' and the permission + // name of 'cloudresourcemanager.googleapis.com/projects.get'. It is the same + // concept of the `plural` field in k8s CRD spec + // https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/ + // + // Note: The plural form is required even for singleton resources. See + // https://aip.dev/156 + string plural = 5; + + // The same concept of the `singular` field in k8s CRD spec + // https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/ + // Such as "project" for the `resourcemanager.googleapis.com/Project` type. + string singular = 6; + + // Style flag(s) for this resource. + // These indicate that a resource is expected to conform to a given + // style. See the specific style flags for additional information. + repeated Style style = 10; +} + +// Defines a proto annotation that describes a string field that refers to +// an API resource. +message ResourceReference { + // The resource type that the annotated field references. + // + // Example: + // + // message Subscription { + // string topic = 2 [(google.api.resource_reference) = { + // type: "pubsub.googleapis.com/Topic" + // }]; + // } + // + // Occasionally, a field may reference an arbitrary resource. In this case, + // APIs use the special value * in their resource reference. + // + // Example: + // + // message GetIamPolicyRequest { + // string resource = 2 [(google.api.resource_reference) = { + // type: "*" + // }]; + // } + string type = 1; + + // The resource type of a child collection that the annotated field + // references. This is useful for annotating the `parent` field that + // doesn't have a fixed resource type. + // + // Example: + // + // message ListLogEntriesRequest { + // string parent = 1 [(google.api.resource_reference) = { + // child_type: "logging.googleapis.com/LogEntry" + // }; + // } + string child_type = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/service.proto b/_13_sponge-dtm-cache/http/third_party/google/api/service.proto new file mode 100644 index 0000000..eccd1ca --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/service.proto @@ -0,0 +1,173 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/auth.proto"; +import "google/api/backend.proto"; +import "google/api/billing.proto"; +import "google/api/context.proto"; +import "google/api/control.proto"; +import "google/api/documentation.proto"; +import "google/api/endpoint.proto"; +import "google/api/http.proto"; +import "google/api/label.proto"; +import "google/api/log.proto"; +import "google/api/logging.proto"; +import "google/api/metric.proto"; +import "google/api/monitored_resource.proto"; +import "google/api/monitoring.proto"; +import "google/api/quota.proto"; +import "google/api/resource.proto"; +import "google/api/source_info.proto"; +import "google/api/system_parameter.proto"; +import "google/api/usage.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/api.proto"; +import "google/protobuf/type.proto"; +import "google/protobuf/wrappers.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "ServiceProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Service` is the root object of Google service configuration schema. It +// describes basic information about a service, such as the name and the +// title, and delegates other aspects to sub-sections. Each sub-section is +// either a proto message or a repeated proto message that configures a +// specific aspect, such as auth. See each proto message definition for details. +// +// Example: +// +// type: google.api.Service +// config_version: 3 +// name: calendar.googleapis.com +// title: Google Calendar API +// apis: +// - name: google.calendar.v3.Calendar +// authentication: +// providers: +// - id: google_calendar_auth +// jwks_uri: https://www.googleapis.com/oauth2/v1/certs +// issuer: https://securetoken.google.com +// rules: +// - selector: "*" +// requirements: +// provider_id: google_calendar_auth +message Service { + // This field is obsolete. Its value must be set to `3`. + google.protobuf.UInt32Value config_version = 20; + + // The service name, which is a DNS-like logical identifier for the + // service, such as `calendar.googleapis.com`. The service name + // typically goes through DNS verification to make sure the owner + // of the service also owns the DNS name. + string name = 1; + + // A unique ID for a specific instance of this message, typically assigned + // by the client for tracking purpose. Must be no longer than 63 characters + // and only lower case letters, digits, '.', '_' and '-' are allowed. If + // empty, the server may choose to generate one instead. + string id = 33; + + // The product title for this service. + string title = 2; + + // The Google project that owns this service. + string producer_project_id = 22; + + // A list of API interfaces exported by this service. Only the `name` field + // of the [google.protobuf.Api][google.protobuf.Api] needs to be provided by the configuration + // author, as the remaining fields will be derived from the IDL during the + // normalization process. It is an error to specify an API interface here + // which cannot be resolved against the associated IDL files. + repeated google.protobuf.Api apis = 3; + + // A list of all proto message types included in this API service. + // Types referenced directly or indirectly by the `apis` are + // automatically included. Messages which are not referenced but + // shall be included, such as types used by the `google.protobuf.Any` type, + // should be listed here by name. Example: + // + // types: + // - name: google.protobuf.Int32 + repeated google.protobuf.Type types = 4; + + // A list of all enum types included in this API service. Enums + // referenced directly or indirectly by the `apis` are automatically + // included. Enums which are not referenced but shall be included + // should be listed here by name. Example: + // + // enums: + // - name: google.someapi.v1.SomeEnum + repeated google.protobuf.Enum enums = 5; + + // Additional API documentation. + Documentation documentation = 6; + + // API backend configuration. + Backend backend = 8; + + // HTTP configuration. + Http http = 9; + + // Quota configuration. + Quota quota = 10; + + // Auth configuration. + Authentication authentication = 11; + + // Context configuration. + Context context = 12; + + // Configuration controlling usage of this service. + Usage usage = 15; + + // Configuration for network endpoints. If this is empty, then an endpoint + // with the same name as the service is automatically generated to service all + // defined APIs. + repeated Endpoint endpoints = 18; + + // Configuration for the service control plane. + Control control = 21; + + // Defines the logs used by this service. + repeated LogDescriptor logs = 23; + + // Defines the metrics used by this service. + repeated MetricDescriptor metrics = 24; + + // Defines the monitored resources used by this service. This is required + // by the [Service.monitoring][google.api.Service.monitoring] and [Service.logging][google.api.Service.logging] configurations. + repeated MonitoredResourceDescriptor monitored_resources = 25; + + // Billing configuration. + Billing billing = 26; + + // Logging configuration. + Logging logging = 27; + + // Monitoring configuration. + Monitoring monitoring = 28; + + // System parameter configuration. + SystemParameters system_parameters = 29; + + // Output only. The source information for this configuration if available. + SourceInfo source_info = 37; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/serviceconfig.yaml b/_13_sponge-dtm-cache/http/third_party/google/api/serviceconfig.yaml new file mode 100644 index 0000000..6d883d4 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/serviceconfig.yaml @@ -0,0 +1,24 @@ +type: google.api.Service +config_version: 1 +name: serviceconfig.googleapis.com +title: Service Config API + +types: +- name: google.api.ConfigChange +- name: google.api.Distribution +- name: google.api.DocumentationRule +- name: google.api.HttpBody +- name: google.api.LabelDescriptor +- name: google.api.Metric +- name: google.api.MonitoredResource +- name: google.api.MonitoredResourceDescriptor +- name: google.api.MonitoredResourceMetadata +- name: google.api.ResourceDescriptor +- name: google.api.ResourceReference +- name: google.api.Service + +enums: +- name: google.api.FieldBehavior + +documentation: + summary: Lets you define and config your API service. diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/BUILD.bazel b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/BUILD.bazel new file mode 100644 index 0000000..e69de29 diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/README.md b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/README.md new file mode 100644 index 0000000..3d9590e --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/README.md @@ -0,0 +1,126 @@ +Google Service Control provides control plane functionality to managed services, +such as logging, monitoring, and status checks. This page provides an overview +of what it does and how it works. + +## Why use Service Control? + +When you develop a cloud service, you typically start with the business +requirements and the architecture design, then proceed with API definition +and implementation. Before you put your service into production, you +need to deal with many control plane issues: + +* How to control access to your service. +* How to send logging and monitoring data to both consumers and producers. +* How to create and manage dashboards to visualize this data. +* How to automatically scale the control plane components with your service. + +Service Control is a mature and feature-rich control plane provider +that addresses these needs with high efficiency, high scalability, +and high availability. It provides a simple public API that can be accessed +from anywhere using JSON REST and gRPC clients, so when you move your service +from on-premise to a cloud provider, or from one cloud provider to another, +you don't need to change the control plane provider. + +Services built using Google Cloud Endpoints already take advantage of +Service Control. Cloud Endpoints sends logging and monitoring data +through Google Service Control for every request arriving at its +proxy. If you need to report any additional logging and monitoring data for +your Cloud Endpoints service, you can call the Service Control API directly +from your service. + +The Service Control API definition is open sourced and available on +[GitHub](https://github.com/googleapis/googleapis/tree/master/google/api/servicecontrol). +By changing the DNS name, you can easily use alternative implementations of +the Service Control API. + +## Architecture + +Google Service Control works with a set of *managed services* and their +*operations* (activities), *checks* whether an operation is allowed to proceed, +and *reports* completed operations. Behind the scenes, it leverages other +Google Cloud services, such as +[Google Service Management](/service-management), +[Stackdriver Logging](/logging), and [Stackdriver Monitoring](/monitoring), +while hiding their complexity from service producers. It enables service +producers to send telemetry data to their consumers. It uses caching, +batching, aggregation, and retries to deliver higher performance and +availability than the individual backend systems it encapsulates. + +
+
+ The overall architecture of a service that uses Google Service Control. +
+
Figure 1: Using Google Service Control.
+
+ +The Service Control API provides two methods: + +* [`services.check`](/service-control/reference/rest/v1/services/check), used for: + * Ensuring valid consumer status + * Validating API keys +* [`services.report`](/service-control/reference/rest/v1/services/report), used for: + * Sending logs to Stackdriver Logging + * Sending metrics to Stackdriver Monitoring + +We’ll look at these in more detail in the rest of this overview. + +## Managed services + +A [managed service](/service-management/reference/rest/v1/services) is +a network service managed by +[Google Service Management](/service-management). Each managed service has a +unique name, such as `example.googleapis.com`, which must be a valid +fully-qualified DNS name, as per RFC 1035. + +For example: + +* Google Cloud Pub/Sub (`pubsub.googleapis.com`) +* Google Cloud Vision (`vision.googleapis.com`) +* Google Cloud Bigtable (`bigtable.googleapis.com`) +* Google Cloud Datastore (`datastore.googleapis.com`) + +Google Service Management manages the lifecycle of each service’s +configuration, which is used to customize Google Service Control's behavior. +Service configurations are also used by Google Cloud Console +for displaying APIs and their settings, enabling/disabling APIs, and more. + +## Operations + +Google Service Control uses the generic concept of an *operation* +to represent the +activities of a managed service, such as API calls and resource usage. Each +operation is associated with a managed service and a specific service +consumer, and has a set of properties that describe the operation, such as +the API method name and resource usage amount. For more information, see the +[Operation definition](/service-control/rest/v1/Operation). + +## Check + +The [`services.check`](/service-control/reference/rest/v1/services/check) +method determines whether an operation should be allowed to proceed +for a managed service. + +For example: + +* Check if the consumer is still active. +* Check if the consumer has enabled the service. +* Check if the API key is still valid. + +By performing multiple checks within a single method call, it provides +better performance, higher reliability, and reduced development cost to +service producers compared to checking with multiple backend systems. + +## Report + +The [`services.report`](/service-control/reference/rest/v1/services/report) +method reports completed operations for +a managed service to backend systems, such as logging and monitoring. The +reported data can be seen in Google API Console and Google Cloud Console, +and retrieved with appropriate APIs, such as the Stackdriver Logging and +Stackdriver Monitoring APIs. + +## Next steps + +* Read our [Getting Started guide](/service-control/getting-started) to find out + how to set up and use the Google Service Control API. diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/BUILD.bazel b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/BUILD.bazel new file mode 100644 index 0000000..5d5326e --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/BUILD.bazel @@ -0,0 +1,45 @@ +# This file was automatically generated by BuildFileGenerator + +# This is an API workspace, having public visibility by default makes perfect sense. +package(default_visibility = ["//visibility:public"]) + +############################################################################## +# Common +############################################################################## +load("@rules_proto//proto:defs.bzl", "proto_library") + +proto_library( + name = "servicecontrol_proto", + srcs = [ + "check_error.proto", + "distribution.proto", + "http_request.proto", + "log_entry.proto", + "metric_value.proto", + "operation.proto", + "quota_controller.proto", + "service_controller.proto", + ], + deps = [ + "//google/api:annotations_proto", + "//google/api:client_proto", + "//google/logging/type:type_proto", + "//google/rpc:status_proto", + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:duration_proto", + "@com_google_protobuf//:struct_proto", + "@com_google_protobuf//:timestamp_proto", + ], +) + +############################################################################## +# C++ +############################################################################## +# Put your C++ code here +load("@com_google_googleapis_imports//:imports.bzl", "cc_proto_library") +cc_proto_library( + name = "servicecontrol_cc_proto", + deps = [ + ":servicecontrol_proto", + ], +) diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/check_error.proto b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/check_error.proto new file mode 100644 index 0000000..b165ca7 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/check_error.proto @@ -0,0 +1,124 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.servicecontrol.v1; + +import "google/rpc/status.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.ServiceControl.V1"; +option go_package = "google.golang.org/genproto/googleapis/api/servicecontrol/v1;servicecontrol"; +option java_multiple_files = true; +option java_outer_classname = "CheckErrorProto"; +option java_package = "com.google.api.servicecontrol.v1"; +option php_namespace = "Google\\Cloud\\ServiceControl\\V1"; +option ruby_package = "Google::Cloud::ServiceControl::V1"; + +// Defines the errors to be returned in +// [google.api.servicecontrol.v1.CheckResponse.check_errors][google.api.servicecontrol.v1.CheckResponse.check_errors]. +message CheckError { + // Error codes for Check responses. + enum Code { + // This is never used in `CheckResponse`. + ERROR_CODE_UNSPECIFIED = 0; + + // The consumer's project id, network container, or resource container was + // not found. Same as [google.rpc.Code.NOT_FOUND][google.rpc.Code.NOT_FOUND]. + NOT_FOUND = 5; + + // The consumer doesn't have access to the specified resource. + // Same as [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED]. + PERMISSION_DENIED = 7; + + // Quota check failed. Same as [google.rpc.Code.RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED]. + RESOURCE_EXHAUSTED = 8; + + // The consumer hasn't activated the service. + SERVICE_NOT_ACTIVATED = 104; + + // The consumer cannot access the service because billing is disabled. + BILLING_DISABLED = 107; + + // The consumer's project has been marked as deleted (soft deletion). + PROJECT_DELETED = 108; + + // The consumer's project number or id does not represent a valid project. + PROJECT_INVALID = 114; + + // The input consumer info does not represent a valid consumer folder or + // organization. + CONSUMER_INVALID = 125; + + // The IP address of the consumer is invalid for the specific consumer + // project. + IP_ADDRESS_BLOCKED = 109; + + // The referer address of the consumer request is invalid for the specific + // consumer project. + REFERER_BLOCKED = 110; + + // The client application of the consumer request is invalid for the + // specific consumer project. + CLIENT_APP_BLOCKED = 111; + + // The API targeted by this request is invalid for the specified consumer + // project. + API_TARGET_BLOCKED = 122; + + // The consumer's API key is invalid. + API_KEY_INVALID = 105; + + // The consumer's API Key has expired. + API_KEY_EXPIRED = 112; + + // The consumer's API Key was not found in config record. + API_KEY_NOT_FOUND = 113; + + // The credential in the request can not be verified. + INVALID_CREDENTIAL = 123; + + // The backend server for looking up project id/number is unavailable. + NAMESPACE_LOOKUP_UNAVAILABLE = 300; + + // The backend server for checking service status is unavailable. + SERVICE_STATUS_UNAVAILABLE = 301; + + // The backend server for checking billing status is unavailable. + BILLING_STATUS_UNAVAILABLE = 302; + + // Cloud Resource Manager backend server is unavailable. + CLOUD_RESOURCE_MANAGER_BACKEND_UNAVAILABLE = 305; + } + + // The error code. + Code code = 1; + + // Subject to whom this error applies. See the specific code enum for more + // details on this field. For example: + // + // - "project:" + // - "folder:" + // - "organization:" + string subject = 4; + + // Free-form text providing details on the error cause of the error. + string detail = 2; + + // Contains public information about the check error. If available, + // `status.code` will be non zero and client can propagate it out as public + // error. + google.rpc.Status status = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/distribution.proto b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/distribution.proto new file mode 100644 index 0000000..91f8338 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/distribution.proto @@ -0,0 +1,161 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.servicecontrol.v1; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.ServiceControl.V1"; +option go_package = "google.golang.org/genproto/googleapis/api/servicecontrol/v1;servicecontrol"; +option java_multiple_files = true; +option java_outer_classname = "DistributionProto"; +option java_package = "com.google.api.servicecontrol.v1"; +option php_namespace = "Google\\Cloud\\ServiceControl\\V1"; +option ruby_package = "Google::Cloud::ServiceControl::V1"; + +// Distribution represents a frequency distribution of double-valued sample +// points. It contains the size of the population of sample points plus +// additional optional information: +// +// - the arithmetic mean of the samples +// - the minimum and maximum of the samples +// - the sum-squared-deviation of the samples, used to compute variance +// - a histogram of the values of the sample points +message Distribution { + // Describing buckets with constant width. + message LinearBuckets { + // The number of finite buckets. With the underflow and overflow buckets, + // the total number of buckets is `num_finite_buckets` + 2. + // See comments on `bucket_options` for details. + int32 num_finite_buckets = 1; + + // The i'th linear bucket covers the interval + // [offset + (i-1) * width, offset + i * width) + // where i ranges from 1 to num_finite_buckets, inclusive. + // Must be strictly positive. + double width = 2; + + // The i'th linear bucket covers the interval + // [offset + (i-1) * width, offset + i * width) + // where i ranges from 1 to num_finite_buckets, inclusive. + double offset = 3; + } + + // Describing buckets with exponentially growing width. + message ExponentialBuckets { + // The number of finite buckets. With the underflow and overflow buckets, + // the total number of buckets is `num_finite_buckets` + 2. + // See comments on `bucket_options` for details. + int32 num_finite_buckets = 1; + + // The i'th exponential bucket covers the interval + // [scale * growth_factor^(i-1), scale * growth_factor^i) + // where i ranges from 1 to num_finite_buckets inclusive. + // Must be larger than 1.0. + double growth_factor = 2; + + // The i'th exponential bucket covers the interval + // [scale * growth_factor^(i-1), scale * growth_factor^i) + // where i ranges from 1 to num_finite_buckets inclusive. + // Must be > 0. + double scale = 3; + } + + // Describing buckets with arbitrary user-provided width. + message ExplicitBuckets { + // 'bound' is a list of strictly increasing boundaries between + // buckets. Note that a list of length N-1 defines N buckets because + // of fenceposting. See comments on `bucket_options` for details. + // + // The i'th finite bucket covers the interval + // [bound[i-1], bound[i]) + // where i ranges from 1 to bound_size() - 1. Note that there are no + // finite buckets at all if 'bound' only contains a single element; in + // that special case the single bound defines the boundary between the + // underflow and overflow buckets. + // + // bucket number lower bound upper bound + // i == 0 (underflow) -inf bound[i] + // 0 < i < bound_size() bound[i-1] bound[i] + // i == bound_size() (overflow) bound[i-1] +inf + repeated double bounds = 1; + } + + // The total number of samples in the distribution. Must be >= 0. + int64 count = 1; + + // The arithmetic mean of the samples in the distribution. If `count` is + // zero then this field must be zero. + double mean = 2; + + // The minimum of the population of values. Ignored if `count` is zero. + double minimum = 3; + + // The maximum of the population of values. Ignored if `count` is zero. + double maximum = 4; + + // The sum of squared deviations from the mean: + // Sum[i=1..count]((x_i - mean)^2) + // where each x_i is a sample values. If `count` is zero then this field + // must be zero, otherwise validation of the request fails. + double sum_of_squared_deviation = 5; + + // The number of samples in each histogram bucket. `bucket_counts` are + // optional. If present, they must sum to the `count` value. + // + // The buckets are defined below in `bucket_option`. There are N buckets. + // `bucket_counts[0]` is the number of samples in the underflow bucket. + // `bucket_counts[1]` to `bucket_counts[N-1]` are the numbers of samples + // in each of the finite buckets. And `bucket_counts[N] is the number + // of samples in the overflow bucket. See the comments of `bucket_option` + // below for more details. + // + // Any suffix of trailing zeros may be omitted. + repeated int64 bucket_counts = 6; + + // Defines the buckets in the histogram. `bucket_option` and `bucket_counts` + // must be both set, or both unset. + // + // Buckets are numbered in the range of [0, N], with a total of N+1 buckets. + // There must be at least two buckets (a single-bucket histogram gives + // no information that isn't already provided by `count`). + // + // The first bucket is the underflow bucket which has a lower bound + // of -inf. The last bucket is the overflow bucket which has an + // upper bound of +inf. All other buckets (if any) are called "finite" + // buckets because they have finite lower and upper bounds. As described + // below, there are three ways to define the finite buckets. + // + // (1) Buckets with constant width. + // (2) Buckets with exponentially growing widths. + // (3) Buckets with arbitrary user-provided widths. + // + // In all cases, the buckets cover the entire real number line (-inf, + // +inf). Bucket upper bounds are exclusive and lower bounds are + // inclusive. The upper bound of the underflow bucket is equal to the + // lower bound of the smallest finite bucket; the lower bound of the + // overflow bucket is equal to the upper bound of the largest finite + // bucket. + oneof bucket_option { + // Buckets with constant width. + LinearBuckets linear_buckets = 7; + + // Buckets with exponentially growing width. + ExponentialBuckets exponential_buckets = 8; + + // Buckets with arbitrary user-provided width. + ExplicitBuckets explicit_buckets = 9; + } +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/http_request.proto b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/http_request.proto new file mode 100644 index 0000000..ce1d5fb --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/http_request.proto @@ -0,0 +1,93 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.servicecontrol.v1; + +import "google/protobuf/duration.proto"; + +option csharp_namespace = "Google.Cloud.ServiceControl.V1"; +option go_package = "google.golang.org/genproto/googleapis/api/servicecontrol/v1;servicecontrol"; +option java_multiple_files = true; +option java_outer_classname = "HttpRequestProto"; +option java_package = "com.google.api.servicecontrol.v1"; +option php_namespace = "Google\\Cloud\\ServiceControl\\V1"; +option ruby_package = "Google::Cloud::ServiceControl::V1"; + +// A common proto for logging HTTP requests. Only contains semantics +// defined by the HTTP specification. Product-specific logging +// information MUST be defined in a separate message. +message HttpRequest { + // The request method. Examples: `"GET"`, `"HEAD"`, `"PUT"`, `"POST"`. + string request_method = 1; + + // The scheme (http, https), the host name, the path, and the query + // portion of the URL that was requested. + // Example: `"http://example.com/some/info?color=red"`. + string request_url = 2; + + // The size of the HTTP request message in bytes, including the request + // headers and the request body. + int64 request_size = 3; + + // The response code indicating the status of the response. + // Examples: 200, 404. + int32 status = 4; + + // The size of the HTTP response message sent back to the client, in bytes, + // including the response headers and the response body. + int64 response_size = 5; + + // The user agent sent by the client. Example: + // `"Mozilla/4.0 (compatible; MSIE 6.0; Windows 98; Q312461; .NET + // CLR 1.0.3705)"`. + string user_agent = 6; + + // The IP address (IPv4 or IPv6) of the client that issued the HTTP + // request. Examples: `"192.168.1.1"`, `"FE80::0202:B3FF:FE1E:8329"`. + string remote_ip = 7; + + // The IP address (IPv4 or IPv6) of the origin server that the request was + // sent to. + string server_ip = 13; + + // The referer URL of the request, as defined in + // [HTTP/1.1 Header Field + // Definitions](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html). + string referer = 8; + + // The request processing latency on the server, from the time the request was + // received until the response was sent. + google.protobuf.Duration latency = 14; + + // Whether or not a cache lookup was attempted. + bool cache_lookup = 11; + + // Whether or not an entity was served from cache + // (with or without validation). + bool cache_hit = 9; + + // Whether or not the response was validated with the origin server before + // being served from cache. This field is only meaningful if `cache_hit` is + // True. + bool cache_validated_with_origin_server = 10; + + // The number of HTTP response bytes inserted into cache. Set only when a + // cache fill was attempted. + int64 cache_fill_bytes = 12; + + // Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", "websocket" + string protocol = 15; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/log_entry.proto b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/log_entry.proto new file mode 100644 index 0000000..c89f461 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/log_entry.proto @@ -0,0 +1,126 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.servicecontrol.v1; + +import "google/api/servicecontrol/v1/http_request.proto"; +import "google/logging/type/log_severity.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.ServiceControl.V1"; +option go_package = "google.golang.org/genproto/googleapis/api/servicecontrol/v1;servicecontrol"; +option java_multiple_files = true; +option java_outer_classname = "LogEntryProto"; +option java_package = "com.google.api.servicecontrol.v1"; +option php_namespace = "Google\\Cloud\\ServiceControl\\V1"; +option ruby_package = "Google::Cloud::ServiceControl::V1"; + +// An individual log entry. +message LogEntry { + // Required. The log to which this log entry belongs. Examples: `"syslog"`, + // `"book_log"`. + string name = 10; + + // The time the event described by the log entry occurred. If + // omitted, defaults to operation start time. + google.protobuf.Timestamp timestamp = 11; + + // The severity of the log entry. The default value is + // `LogSeverity.DEFAULT`. + google.logging.type.LogSeverity severity = 12; + + // Optional. Information about the HTTP request associated with this + // log entry, if applicable. + HttpRequest http_request = 14; + + // Optional. Resource name of the trace associated with the log entry, if any. + // If this field contains a relative resource name, you can assume the name is + // relative to `//tracing.googleapis.com`. Example: + // `projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824` + string trace = 15; + + // A unique ID for the log entry used for deduplication. If omitted, + // the implementation will generate one based on operation_id. + string insert_id = 4; + + // A set of user-defined (key, value) data that provides additional + // information about the log entry. + map labels = 13; + + // The log entry payload, which can be one of multiple types. + oneof payload { + // The log entry payload, represented as a protocol buffer that is + // expressed as a JSON object. The only accepted type currently is + // [AuditLog][google.cloud.audit.AuditLog]. + google.protobuf.Any proto_payload = 2; + + // The log entry payload, represented as a Unicode string (UTF-8). + string text_payload = 3; + + // The log entry payload, represented as a structure that + // is expressed as a JSON object. + google.protobuf.Struct struct_payload = 6; + } + + // Optional. Information about an operation associated with the log entry, if + // applicable. + LogEntryOperation operation = 16; + + // Optional. Source code location information associated with the log entry, + // if any. + LogEntrySourceLocation source_location = 17; +} + +// Additional information about a potentially long-running operation with which +// a log entry is associated. +message LogEntryOperation { + // Optional. An arbitrary operation identifier. Log entries with the + // same identifier are assumed to be part of the same operation. + string id = 1; + + // Optional. An arbitrary producer identifier. The combination of + // `id` and `producer` must be globally unique. Examples for `producer`: + // `"MyDivision.MyBigCompany.com"`, `"github.com/MyProject/MyApplication"`. + string producer = 2; + + // Optional. Set this to True if this is the first log entry in the operation. + bool first = 3; + + // Optional. Set this to True if this is the last log entry in the operation. + bool last = 4; +} + +// Additional information about the source code location that produced the log +// entry. +message LogEntrySourceLocation { + // Optional. Source file name. Depending on the runtime environment, this + // might be a simple name or a fully-qualified name. + string file = 1; + + // Optional. Line within the source file. 1-based; 0 indicates no line number + // available. + int64 line = 2; + + // Optional. Human-readable name of the function or method being invoked, with + // optional context such as the class or package name. This information may be + // used in contexts such as the logs viewer, where a file and line number are + // less meaningful. The format can vary by language. For example: + // `qual.if.ied.Class.method` (Java), `dir/package.func` (Go), `function` + // (Python). + string function = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/metric_value.proto b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/metric_value.proto new file mode 100644 index 0000000..e160e79 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/metric_value.proto @@ -0,0 +1,79 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.servicecontrol.v1; + +import "google/api/servicecontrol/v1/distribution.proto"; +import "google/protobuf/timestamp.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.ServiceControl.V1"; +option go_package = "google.golang.org/genproto/googleapis/api/servicecontrol/v1;servicecontrol"; +option java_multiple_files = true; +option java_outer_classname = "MetricValueSetProto"; +option java_package = "com.google.api.servicecontrol.v1"; +option php_namespace = "Google\\Cloud\\ServiceControl\\V1"; +option ruby_package = "Google::Cloud::ServiceControl::V1"; + +// Represents a single metric value. +message MetricValue { + // The labels describing the metric value. + // See comments on [google.api.servicecontrol.v1.Operation.labels][google.api.servicecontrol.v1.Operation.labels] for + // the overriding relationship. + // Note that this map must not contain monitored resource labels. + map labels = 1; + + // The start of the time period over which this metric value's measurement + // applies. The time period has different semantics for different metric + // types (cumulative, delta, and gauge). See the metric definition + // documentation in the service configuration for details. + google.protobuf.Timestamp start_time = 2; + + // The end of the time period over which this metric value's measurement + // applies. + google.protobuf.Timestamp end_time = 3; + + // The value. The type of value used in the request must + // agree with the metric definition in the service configuration, otherwise + // the MetricValue is rejected. + oneof value { + // A boolean value. + bool bool_value = 4; + + // A signed 64-bit integer value. + int64 int64_value = 5; + + // A double precision floating point value. + double double_value = 6; + + // A text string value. + string string_value = 7; + + // A distribution value. + Distribution distribution_value = 8; + } +} + +// Represents a set of metric values in the same metric. +// Each metric value in the set should have a unique combination of start time, +// end time, and label values. +message MetricValueSet { + // The metric name defined in the service configuration. + string metric_name = 1; + + // The values in this metric. + repeated MetricValue metric_values = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/operation.proto b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/operation.proto new file mode 100644 index 0000000..4bc6717 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/operation.proto @@ -0,0 +1,121 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.servicecontrol.v1; + +import "google/api/servicecontrol/v1/log_entry.proto"; +import "google/api/servicecontrol/v1/metric_value.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.ServiceControl.V1"; +option go_package = "google.golang.org/genproto/googleapis/api/servicecontrol/v1;servicecontrol"; +option java_multiple_files = true; +option java_outer_classname = "OperationProto"; +option java_package = "com.google.api.servicecontrol.v1"; +option php_namespace = "Google\\Cloud\\ServiceControl\\V1"; +option ruby_package = "Google::Cloud::ServiceControl::V1"; + +// Represents information regarding an operation. +message Operation { + // Defines the importance of the data contained in the operation. + enum Importance { + // The API implementation may cache and aggregate the data. + // The data may be lost when rare and unexpected system failures occur. + LOW = 0; + + // The API implementation doesn't cache and aggregate the data. + // If the method returns successfully, it's guaranteed that the data has + // been persisted in durable storage. + HIGH = 1; + } + + // Identity of the operation. This must be unique within the scope of the + // service that generated the operation. If the service calls + // Check() and Report() on the same operation, the two calls should carry + // the same id. + // + // UUID version 4 is recommended, though not required. + // In scenarios where an operation is computed from existing information + // and an idempotent id is desirable for deduplication purpose, UUID version 5 + // is recommended. See RFC 4122 for details. + string operation_id = 1; + + // Fully qualified name of the operation. Reserved for future use. + string operation_name = 2; + + // Identity of the consumer who is using the service. + // This field should be filled in for the operations initiated by a + // consumer, but not for service-initiated operations that are + // not related to a specific consumer. + // + // - This can be in one of the following formats: + // - project:PROJECT_ID, + // - project`_`number:PROJECT_NUMBER, + // - projects/PROJECT_ID or PROJECT_NUMBER, + // - folders/FOLDER_NUMBER, + // - organizations/ORGANIZATION_NUMBER, + // - api`_`key:API_KEY. + string consumer_id = 3; + + // Required. Start time of the operation. + google.protobuf.Timestamp start_time = 4; + + // End time of the operation. + // Required when the operation is used in [ServiceController.Report][google.api.servicecontrol.v1.ServiceController.Report], + // but optional when the operation is used in [ServiceController.Check][google.api.servicecontrol.v1.ServiceController.Check]. + google.protobuf.Timestamp end_time = 5; + + // Labels describing the operation. Only the following labels are allowed: + // + // - Labels describing monitored resources as defined in + // the service configuration. + // - Default labels of metric values. When specified, labels defined in the + // metric value override these default. + // - The following labels defined by Google Cloud Platform: + // - `cloud.googleapis.com/location` describing the location where the + // operation happened, + // - `servicecontrol.googleapis.com/user_agent` describing the user agent + // of the API request, + // - `servicecontrol.googleapis.com/service_agent` describing the service + // used to handle the API request (e.g. ESP), + // - `servicecontrol.googleapis.com/platform` describing the platform + // where the API is served, such as App Engine, Compute Engine, or + // Kubernetes Engine. + map labels = 6; + + // Represents information about this operation. Each MetricValueSet + // corresponds to a metric defined in the service configuration. + // The data type used in the MetricValueSet must agree with + // the data type specified in the metric definition. + // + // Within a single operation, it is not allowed to have more than one + // MetricValue instances that have the same metric names and identical + // label value combinations. If a request has such duplicated MetricValue + // instances, the entire request is rejected with + // an invalid argument error. + repeated MetricValueSet metric_value_sets = 7; + + // Represents information to be logged. + repeated LogEntry log_entries = 8; + + // DO NOT USE. This is an experimental field. + Importance importance = 11; + + // Unimplemented. + repeated google.protobuf.Any extensions = 16; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/quota_controller.proto b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/quota_controller.proto new file mode 100644 index 0000000..8882c52 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/quota_controller.proto @@ -0,0 +1,239 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.servicecontrol.v1; + +import "google/api/annotations.proto"; +import "google/api/servicecontrol/v1/metric_value.proto"; +import "google/api/client.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.ServiceControl.V1"; +option go_package = "google.golang.org/genproto/googleapis/api/servicecontrol/v1;servicecontrol"; +option java_multiple_files = true; +option java_outer_classname = "QuotaControllerProto"; +option java_package = "com.google.api.servicecontrol.v1"; +option php_namespace = "Google\\Cloud\\ServiceControl\\V1"; +option ruby_package = "Google::Cloud::ServiceControl::V1"; + +// [Google Quota Control API](/service-control/overview) +// +// Allows clients to allocate and release quota against a [managed +// service](https://cloud.google.com/service-management/reference/rpc/google.api/servicemanagement.v1#google.api.servicemanagement.v1.ManagedService). +service QuotaController { + option (google.api.default_host) = "servicecontrol.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/servicecontrol"; + + // Attempts to allocate quota for the specified consumer. It should be called + // before the operation is executed. + // + // This method requires the `servicemanagement.services.quota` + // permission on the specified service. For more information, see + // [Cloud IAM](https://cloud.google.com/iam). + // + // **NOTE:** The client **must** fail-open on server errors `INTERNAL`, + // `UNKNOWN`, `DEADLINE_EXCEEDED`, and `UNAVAILABLE`. To ensure system + // reliability, the server may inject these errors to prohibit any hard + // dependency on the quota functionality. + rpc AllocateQuota(AllocateQuotaRequest) returns (AllocateQuotaResponse) { + option (google.api.http) = { + post: "/v1/services/{service_name}:allocateQuota" + body: "*" + }; + } +} + +// Request message for the AllocateQuota method. +message AllocateQuotaRequest { + // Name of the service as specified in the service configuration. For example, + // `"pubsub.googleapis.com"`. + // + // See [google.api.Service][google.api.Service] for the definition of a service name. + string service_name = 1; + + // Operation that describes the quota allocation. + QuotaOperation allocate_operation = 2; + + // Specifies which version of service configuration should be used to process + // the request. If unspecified or no matching version can be found, the latest + // one will be used. + string service_config_id = 4; +} + +// Represents information regarding a quota operation. +message QuotaOperation { + // Supported quota modes. + enum QuotaMode { + // Guard against implicit default. Must not be used. + UNSPECIFIED = 0; + + // For AllocateQuota request, allocates quota for the amount specified in + // the service configuration or specified using the quota metrics. If the + // amount is higher than the available quota, allocation error will be + // returned and no quota will be allocated. + // If multiple quotas are part of the request, and one fails, none of the + // quotas are allocated or released. + NORMAL = 1; + + // The operation allocates quota for the amount specified in the service + // configuration or specified using the quota metrics. If the amount is + // higher than the available quota, request does not fail but all available + // quota will be allocated. + // For rate quota, BEST_EFFORT will continue to deduct from other groups + // even if one does not have enough quota. For allocation, it will find the + // minimum available amount across all groups and deduct that amount from + // all the affected groups. + BEST_EFFORT = 2; + + // For AllocateQuota request, only checks if there is enough quota + // available and does not change the available quota. No lock is placed on + // the available quota either. + CHECK_ONLY = 3; + + // Unimplemented. When used in AllocateQuotaRequest, this returns the + // effective quota limit(s) in the response, and no quota check will be + // performed. Not supported for other requests, and even for + // AllocateQuotaRequest, this is currently supported only for whitelisted + // services. + QUERY_ONLY = 4; + + // The operation allocates quota for the amount specified in the service + // configuration or specified using the quota metrics. If the requested + // amount is higher than the available quota, request does not fail and + // remaining quota would become negative (going over the limit) + // Not supported for Rate Quota. + ADJUST_ONLY = 5; + } + + // Identity of the operation. This is expected to be unique within the scope + // of the service that generated the operation, and guarantees idempotency in + // case of retries. + // + // In order to ensure best performance and latency in the Quota backends, + // operation_ids are optimally associated with time, so that related + // operations can be accessed fast in storage. For this reason, the + // recommended token for services that intend to operate at a high QPS is + // Unix time in nanos + UUID + string operation_id = 1; + + // Fully qualified name of the API method for which this quota operation is + // requested. This name is used for matching quota rules or metric rules and + // billing status rules defined in service configuration. + // + // This field should not be set if any of the following is true: + // (1) the quota operation is performed on non-API resources. + // (2) quota_metrics is set because the caller is doing quota override. + // + // Example of an RPC method name: + // google.example.library.v1.LibraryService.CreateShelf + string method_name = 2; + + // Identity of the consumer for whom this quota operation is being performed. + // + // This can be in one of the following formats: + // project:, + // project_number:, + // api_key:. + string consumer_id = 3; + + // Labels describing the operation. + map labels = 4; + + // Represents information about this operation. Each MetricValueSet + // corresponds to a metric defined in the service configuration. + // The data type used in the MetricValueSet must agree with + // the data type specified in the metric definition. + // + // Within a single operation, it is not allowed to have more than one + // MetricValue instances that have the same metric names and identical + // label value combinations. If a request has such duplicated MetricValue + // instances, the entire request is rejected with + // an invalid argument error. + // + // This field is mutually exclusive with method_name. + repeated MetricValueSet quota_metrics = 5; + + // Quota mode for this operation. + QuotaMode quota_mode = 6; +} + +// Response message for the AllocateQuota method. +message AllocateQuotaResponse { + // The same operation_id value used in the AllocateQuotaRequest. Used for + // logging and diagnostics purposes. + string operation_id = 1; + + // Indicates the decision of the allocate. + repeated QuotaError allocate_errors = 2; + + // Quota metrics to indicate the result of allocation. Depending on the + // request, one or more of the following metrics will be included: + // + // 1. Per quota group or per quota metric incremental usage will be specified + // using the following delta metric : + // "serviceruntime.googleapis.com/api/consumer/quota_used_count" + // + // 2. The quota limit reached condition will be specified using the following + // boolean metric : + // "serviceruntime.googleapis.com/quota/exceeded" + repeated MetricValueSet quota_metrics = 3; + + // ID of the actual config used to process the request. + string service_config_id = 4; +} + +// Represents error information for [QuotaOperation][google.api.servicecontrol.v1.QuotaOperation]. +message QuotaError { + // Error codes related to project config validations are deprecated since the + // quota controller methods do not perform these validations. Instead services + // have to call the Check method, without quota_properties field, to perform + // these validations before calling the quota controller methods. These + // methods check only for project deletion to be wipe out compliant. + enum Code { + // This is never used. + UNSPECIFIED = 0; + + // Quota allocation failed. + // Same as [google.rpc.Code.RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED]. + RESOURCE_EXHAUSTED = 8; + + // Consumer cannot access the service because the service requires active + // billing. + BILLING_NOT_ACTIVE = 107; + + // Consumer's project has been marked as deleted (soft deletion). + PROJECT_DELETED = 108; + + // Specified API key is invalid. + API_KEY_INVALID = 105; + + // Specified API Key has expired. + API_KEY_EXPIRED = 112; + } + + // Error code. + Code code = 1; + + // Subject to whom this error applies. See the specific enum for more details + // on this field. For example, "clientip:" or + // "project:". + string subject = 2; + + // Free-form text that provides details on the cause of the error. + string description = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/service_controller.proto b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/service_controller.proto new file mode 100644 index 0000000..4dbe7c7 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/service_controller.proto @@ -0,0 +1,249 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.servicecontrol.v1; + +import "google/api/annotations.proto"; +import "google/api/servicecontrol/v1/check_error.proto"; +import "google/api/servicecontrol/v1/operation.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/status.proto"; +import "google/api/client.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.ServiceControl.V1"; +option go_package = "google.golang.org/genproto/googleapis/api/servicecontrol/v1;servicecontrol"; +option java_multiple_files = true; +option java_outer_classname = "ServiceControllerProto"; +option java_package = "com.google.api.servicecontrol.v1"; +option objc_class_prefix = "GASC"; +option php_namespace = "Google\\Cloud\\ServiceControl\\V1"; +option ruby_package = "Google::Cloud::ServiceControl::V1"; + +// [Google Service Control API](/service-control/overview) +// +// Lets clients check and report operations against a [managed +// service](https://cloud.google.com/service-management/reference/rpc/google.api/servicemanagement.v1#google.api.servicemanagement.v1.ManagedService). +service ServiceController { + option (google.api.default_host) = "servicecontrol.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/servicecontrol"; + + // Checks whether an operation on a service should be allowed to proceed + // based on the configuration of the service and related policies. It must be + // called before the operation is executed. + // + // If feasible, the client should cache the check results and reuse them for + // 60 seconds. In case of any server errors, the client should rely on the + // cached results for much longer time to avoid outage. + // WARNING: There is general 60s delay for the configuration and policy + // propagation, therefore callers MUST NOT depend on the `Check` method having + // the latest policy information. + // + // NOTE: the [CheckRequest][google.api.servicecontrol.v1.CheckRequest] has the size limit of 64KB. + // + // This method requires the `servicemanagement.services.check` permission + // on the specified service. For more information, see + // [Cloud IAM](https://cloud.google.com/iam). + rpc Check(CheckRequest) returns (CheckResponse) { + option (google.api.http) = { + post: "/v1/services/{service_name}:check" + body: "*" + }; + } + + // Reports operation results to Google Service Control, such as logs and + // metrics. It should be called after an operation is completed. + // + // If feasible, the client should aggregate reporting data for up to 5 + // seconds to reduce API traffic. Limiting aggregation to 5 seconds is to + // reduce data loss during client crashes. Clients should carefully choose + // the aggregation time window to avoid data loss risk more than 0.01% + // for business and compliance reasons. + // + // NOTE: the [ReportRequest][google.api.servicecontrol.v1.ReportRequest] has the size limit (wire-format byte size) of + // 1MB. + // + // This method requires the `servicemanagement.services.report` permission + // on the specified service. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + rpc Report(ReportRequest) returns (ReportResponse) { + option (google.api.http) = { + post: "/v1/services/{service_name}:report" + body: "*" + }; + } +} + +// Request message for the Check method. +message CheckRequest { + // The service name as specified in its service configuration. For example, + // `"pubsub.googleapis.com"`. + // + // See + // [google.api.Service](https://cloud.google.com/service-management/reference/rpc/google.api#google.api.Service) + // for the definition of a service name. + string service_name = 1; + + // The operation to be checked. + Operation operation = 2; + + // Specifies which version of service configuration should be used to process + // the request. + // + // If unspecified or no matching version can be found, the + // latest one will be used. + string service_config_id = 4; +} + +// Response message for the Check method. +message CheckResponse { + // Contains additional information about the check operation. + message CheckInfo { + // A list of fields and label keys that are ignored by the server. + // The client doesn't need to send them for following requests to improve + // performance and allow better aggregation. + repeated string unused_arguments = 1; + + // Consumer info of this check. + ConsumerInfo consumer_info = 2; + } + + // `ConsumerInfo` provides information about the consumer. + message ConsumerInfo { + // The type of the consumer as defined in + // [Google Resource Manager](https://cloud.google.com/resource-manager/). + enum ConsumerType { + // This is never used. + CONSUMER_TYPE_UNSPECIFIED = 0; + + // The consumer is a Google Cloud Project. + PROJECT = 1; + + // The consumer is a Google Cloud Folder. + FOLDER = 2; + + // The consumer is a Google Cloud Organization. + ORGANIZATION = 3; + + // Service-specific resource container which is defined by the service + // producer to offer their users the ability to manage service control + // functionalities at a finer level of granularity than the PROJECT. + SERVICE_SPECIFIC = 4; + } + + // The Google cloud project number, e.g. 1234567890. A value of 0 indicates + // no project number is found. + // + // NOTE: This field is deprecated after we support flexible consumer + // id. New code should not depend on this field anymore. + int64 project_number = 1; + + // The type of the consumer which should have been defined in + // [Google Resource Manager](https://cloud.google.com/resource-manager/). + ConsumerType type = 2; + + // The consumer identity number, can be Google cloud project number, folder + // number or organization number e.g. 1234567890. A value of 0 indicates no + // consumer number is found. + int64 consumer_number = 3; + } + + // The same operation_id value used in the [CheckRequest][google.api.servicecontrol.v1.CheckRequest]. + // Used for logging and diagnostics purposes. + string operation_id = 1; + + // Indicate the decision of the check. + // + // If no check errors are present, the service should process the operation. + // Otherwise the service should use the list of errors to determine the + // appropriate action. + repeated CheckError check_errors = 2; + + // The actual config id used to process the request. + string service_config_id = 5; + + // The current service rollout id used to process the request. + string service_rollout_id = 11; + + // Feedback data returned from the server during processing a Check request. + CheckInfo check_info = 6; +} + +// Request message for the Report method. +message ReportRequest { + // The service name as specified in its service configuration. For example, + // `"pubsub.googleapis.com"`. + // + // See + // [google.api.Service](https://cloud.google.com/service-management/reference/rpc/google.api#google.api.Service) + // for the definition of a service name. + string service_name = 1; + + // Operations to be reported. + // + // Typically the service should report one operation per request. + // Putting multiple operations into a single request is allowed, but should + // be used only when multiple operations are natually available at the time + // of the report. + // + // There is no limit on the number of operations in the same ReportRequest, + // however the ReportRequest size should be no larger than 1MB. See + // [ReportResponse.report_errors][google.api.servicecontrol.v1.ReportResponse.report_errors] for partial failure behavior. + repeated Operation operations = 2; + + // Specifies which version of service config should be used to process the + // request. + // + // If unspecified or no matching version can be found, the + // latest one will be used. + string service_config_id = 3; +} + +// Response message for the Report method. +message ReportResponse { + // Represents the processing error of one [Operation][google.api.servicecontrol.v1.Operation] in the request. + message ReportError { + // The [Operation.operation_id][google.api.servicecontrol.v1.Operation.operation_id] value from the request. + string operation_id = 1; + + // Details of the error when processing the [Operation][google.api.servicecontrol.v1.Operation]. + google.rpc.Status status = 2; + } + + // Partial failures, one for each `Operation` in the request that failed + // processing. There are three possible combinations of the RPC status: + // + // 1. The combination of a successful RPC status and an empty `report_errors` + // list indicates a complete success where all `Operations` in the + // request are processed successfully. + // 2. The combination of a successful RPC status and a non-empty + // `report_errors` list indicates a partial success where some + // `Operations` in the request succeeded. Each + // `Operation` that failed processing has a corresponding item + // in this list. + // 3. A failed RPC status indicates a general non-deterministic failure. + // When this happens, it's impossible to know which of the + // 'Operations' in the request succeeded or failed. + repeated ReportError report_errors = 1; + + // The actual config id used to process the request. + string service_config_id = 2; + + // The current service rollout id used to process the request. + string service_rollout_id = 4; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/servicecontrol.yaml b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/servicecontrol.yaml new file mode 100644 index 0000000..a1cad56 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicecontrol/v1/servicecontrol.yaml @@ -0,0 +1,178 @@ +type: google.api.Service +config_version: 2 +name: servicecontrol.googleapis.com +title: Service Control API + +apis: +- name: google.api.servicecontrol.v1.QuotaController +- name: google.api.servicecontrol.v1.ServiceController + +documentation: + summary: |- + Provides control plane functionality to managed services, such as logging, + monitoring, and status checks. + overview: |- + Google Service Control provides control plane functionality to managed + services, such as logging, monitoring, and status checks. This page + provides an overview of what it does and how it works. + + ## Why use Service Control? + + When you develop a cloud service, you typically start with the business + requirements and the architecture design, then proceed with API definition + and implementation. Before you put your service into production, you + need to deal with many control plane issues: + + * How to control access to your service. + * How to send logging and monitoring data to both consumers and producers. + * How to create and manage dashboards to visualize this data. + * How to automatically scale the control plane components with your + service. + + Service Control is a mature and feature-rich control plane provider + that addresses these needs with high efficiency, high scalability, + and high availability. It provides a simple public API that can be + accessed from anywhere using JSON REST and gRPC clients, so when you move + your service from on-premise to a cloud provider, or from one cloud + provider to another, you don't need to change the control plane provider. + + Services built using Google Cloud Endpoints already take advantage of + Service Control. Cloud Endpoints sends logging and monitoring data + through Google Service Control for every request arriving at its + proxy. If you need to report any additional logging and monitoring data + for your Cloud Endpoints service, you can call the Service Control API + directly from your service. + + The Service Control API definition is open sourced and available on + [GitHub](https://github.com/googleapis/googleapis/tree/master/google/api/servicecontrol). By + changing the DNS name, you can easily use alternative implementations + of the Service Control API. + + ## Architecture + + Google Service Control works with a set of *managed services* and their + *operations* (activities), *checks* whether an operation is allowed to + proceed, and *reports* completed operations. Behind the scenes, it + leverages other + Google Cloud services, such as + [Google Service + Management](/service-infrastructure/docs/service-management/getting-started), [Stackdriver + Logging](/logging), and [Stackdriver Monitoring](/monitoring), while + hiding their complexity from service producers. It enables service + producers to send telemetry data to their consumers. It uses caching, + batching, aggregation, and retries to deliver higher performance and + availability than the individual backend systems it encapsulates. + +
+
+ The overall architecture of a service that uses Google Service
+    Control.
Figure 1: Using Google Service + Control.
+ + The Service Control API provides two methods: + + * + [`services.check`](/service-infrastructure/docs/service-control/reference/rest/v1/services/check), + used for: + * Ensuring valid consumer status + * Validating API keys + * + [`services.report`](/service-infrastructure/docs/service-control/reference/rest/v1/services/report), + used for: + * Sending logs to Stackdriver Logging + * Sending metrics to Stackdriver Monitoring + + We'll look at these in more detail in the rest of this overview. + + ## Managed services + + A [managed + service](/service-infrastructure/docs/service-management/reference/rest/v1/services) is + a network service managed by + [Google Service + Management](/service-infrastructure/docs/service-management/getting-started). Each + managed service has a unique name, such as `example.googleapis.com`, + which must be a valid fully-qualified DNS name, as per RFC 1035. + + For example: + + * Google Cloud Pub/Sub (`pubsub.googleapis.com`) + * Google Cloud Vision (`vision.googleapis.com`) + * Google Cloud Bigtable (`bigtable.googleapis.com`) + * Google Cloud Datastore (`datastore.googleapis.com`) + + Google Service Management manages the lifecycle of each service's + configuration, which is used to customize Google Service Control's + behavior. Service configurations are also used by Google Cloud Console for + displaying APIs and their settings, enabling/disabling APIs, and more. + + ## Operations + + Google Service Control uses the generic concept of an *operation* + to represent the activities of a managed service, such as API calls and + resource usage. Each operation is associated with a managed service and a + specific service consumer, and has a set of properties that describe the + operation, such as the API method name and resource usage amount. For more + information, see the + [Operation + definition](/service-infrastructure/docs/service-control/reference/rest/v1/Operation). ## + Check + + The + [`services.check`](/service-infrastructure/docs/service-control/reference/rest/v1/services/check) method + determines whether an operation should be allowed to proceed for a + managed service. + + For example: + + * Check if the consumer is still active. + * Check if the consumer has enabled the service. + * Check if the API key is still valid. + + By performing multiple checks within a single method call, it provides + better performance, higher reliability, and reduced development cost to + service producers compared to checking with multiple backend systems. + + ## Report + + The + [`services.report`](/service-infrastructure/docs/service-control/reference/rest/v1/services/report) method + reports completed operations for a managed service to backend + systems, such as logging and monitoring. The reported data can be seen in + Google API Console and Google Cloud Console, and retrieved with + appropriate APIs, such as the Stackdriver Logging and Stackdriver + Monitoring APIs. + + ## Next steps + + * Read our [Getting Started + guide](/service-infrastructure/docs/service-control/getting-started) to + find out how to set up and use the Google Service Control API. + +backend: + rules: + - selector: google.api.servicecontrol.v1.QuotaController.AllocateQuota + deadline: 10.0 + - selector: google.api.servicecontrol.v1.ServiceController.Check + deadline: 5.0 + - selector: google.api.servicecontrol.v1.ServiceController.Report + deadline: 16.0 + +authentication: + rules: + - selector: google.api.servicecontrol.v1.QuotaController.AllocateQuota + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/servicecontrol + - selector: google.api.servicecontrol.v1.ServiceController.Check + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/servicecontrol + - selector: google.api.servicecontrol.v1.ServiceController.Report + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/servicecontrol diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/BUILD.bazel b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/BUILD.bazel new file mode 100644 index 0000000..1e5b8c2 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/BUILD.bazel @@ -0,0 +1 @@ +exports_files(glob(["*.yaml"])) \ No newline at end of file diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/README.md b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/README.md new file mode 100644 index 0000000..e3e36df --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/README.md @@ -0,0 +1,102 @@ +Google Service Management manages a set of *services*. Service +Management allows *service producers* to +publish their services on Google Cloud Platform so that they can be discovered +and used by *service consumers*. It also handles the tasks of tracking +service lifecycle and programming various backend systems -- such as +[Stackdriver Logging](https://cloud.google.com/stackdriver), +[Stackdriver Monitoring](https://cloud.google.com/stackdriver) -- to support +the managed services. + +If you are a service producer, you can use the Google Service Management API +and [Google Cloud SDK (gcloud)](/sdk) to publish and manage your services. +Each managed service has a service configuration which declares various aspects +of the service such as its API surface, along with parameters to configure the +supporting backend +systems, such as logging and monitoring. If you build your service using +[Google Cloud Endpoints](https://cloud.google.com/endpoints/), the service +configuration will be handled automatically. + +If you are a service consumer and want to use a managed service, you can use the +Google Service Management API or [Google Cloud Console](https://console.cloud.google.com) +to activate the +service for your [Google developer project](https://developers.google.com/console/help/new/), +then start using its APIs and functions. + +## Managed services + +REST URL: `https://servicemanagement.googleapis.com/v1/services/{service-name}`
+REST schema is defined [here](/service-management/reference/rest/v1/services). + +A managed service refers to a network service managed by +Service Management. Each managed service has a unique name, such as +`example.googleapis.com`, which must be a valid fully-qualified DNS name, as per +RFC 1035. + +A managed service typically provides some REST APIs and/or other +functions to their service consumers, such as mobile apps or cloud services. + +Service producers can use methods, such as +[services.create](/service-management/reference/rest/v1/services/create), +[services.delete](/service-management/reference/rest/v1/services/delete), +[services.undelete](/service-management/reference/rest/v1/services/undelete), +to manipulate their managed services. + +## Service producers + +A service producer is the Google developer project responsible for publishing +and maintaining a managed service. Each managed service is owned by exactly one +service producer. + +## Service consumers + +A service consumer is a Google developer project that has enabled and can +invoke APIs on a managed service. A managed service can have many service +consumers. + +## Service configuration + +REST URL: `https://servicemanagement.googleapis.com/v1/services/{service-name}/configs/{config_id}`
+REST schema is defined [here](/service-management/reference/rest/v1/services.configs). + +Each managed service is described by a service configuration which covers a wide +range of features, including its name, title, RPC API definitions, +REST API definitions, documentation, authentication, and more. + +To change the configuration of a managed service, the service producer needs to +publish an updated service configuration to Service Management. +Service Management keeps a history of published +service configurations, making it possible to easily retrace how a service's +configuration evolved over time. Service configurations can be published using +the +[services.configs.create](/service-management/reference/rest/v1/services.configs/create) +or [services.configs.submit](/service-management/reference/rest/v1/services.configs/submit) +methods. + +Alternatively, `services.configs.submit` allows publishing an +[OpenAPI](https://github.com/OAI/OpenAPI-Specification) specification, formerly +known as the Swagger Specification, which is automatically converted to a +corresponding service configuration. + +## Service rollout + +REST URL: `https://servicemanagement.googleapis.com/v1/services/{service-name}/rollouts/{rollout-id}`
+REST schema is defined [here](/service-management/reference/rest/v1/services.rollouts). + +A `Rollout` defines how Google Service Management should deploy service +configurations to backend systems and how the configurations take effect at +runtime. It lets service producers specify multiple service configuration +versions to be deployed together, and a strategy that indicates how they +should be used. + +Updating a managed service's configuration can be dangerous, as a configuration +error can lead to a service outage. To mitigate risks, Service Management +supports gradual rollout of service configuration changes. This feature gives +service producers time to identity potential issues and rollback service +configuration changes in case of errors, thus minimizing the customer +impact of bad configurations. For example, you could specify that 5% of traffic +uses configuration 1, while the remaining 95% uses configuration 2. + +Service Management keeps a history of rollouts so that service +producers can undo to previous configuration versions. You can rollback a configuration +by initiating a new `Rollout` that clones a previously submitted +rollout record. \ No newline at end of file diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/BUILD.bazel b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/BUILD.bazel new file mode 100644 index 0000000..28bebe9 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/BUILD.bazel @@ -0,0 +1,386 @@ +# This file was automatically generated by BuildFileGenerator + +# This is an API workspace, having public visibility by default makes perfect sense. +package(default_visibility = ["//visibility:public"]) + +############################################################################## +# Common +############################################################################## +load("@rules_proto//proto:defs.bzl", "proto_library") +load("@com_google_googleapis_imports//:imports.bzl", "proto_library_with_info") + +proto_library( + name = "servicemanagement_proto", + srcs = [ + "resources.proto", + "servicemanager.proto", + ], + deps = [ + "//google/api:annotations_proto", + "//google/api:client_proto", + "//google/api:config_change_proto", + "//google/api:field_behavior_proto", + "//google/api:metric_proto", + "//google/api:quota_proto", + "//google/api:service_proto", + "//google/longrunning:operations_proto", + "//google/rpc:status_proto", + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:field_mask_proto", + "@com_google_protobuf//:timestamp_proto", + ], +) + +proto_library_with_info( + name = "servicemanagement_proto_with_info", + deps = [ + ":servicemanagement_proto", + "//google/cloud:common_resources_proto", + ], +) + +############################################################################## +# Java +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "java_gapic_assembly_gradle_pkg", + "java_gapic_library", + "java_gapic_test", + "java_grpc_library", + "java_proto_library", +) + +java_proto_library( + name = "servicemanagement_java_proto", + deps = [":servicemanagement_proto"], +) + +java_grpc_library( + name = "servicemanagement_java_grpc", + srcs = [":servicemanagement_proto"], + deps = [":servicemanagement_java_proto"], +) + +java_gapic_library( + name = "servicemanagement_java_gapic", + src = ":servicemanagement_proto_with_info", + gapic_yaml = "servicemanagement_gapic.yaml", + grpc_service_config = "servicemanagement_grpc_service_config.json", + package = "google.api.servicemanagement.v1", + service_yaml = "servicemanagement_v1.yaml", + test_deps = [ + ":servicemanagement_java_grpc", + ], + deps = [ + ":servicemanagement_java_proto", + "//google/api:api_java_proto", + ], +) + +java_gapic_test( + name = "servicemanagement_java_gapic_test_suite", + test_classes = [ + "com.google.cloud.api.servicemanagement.v1.ServiceManagerClientTest", + ], + runtime_deps = [":servicemanagement_java_gapic_test"], +) + +# Open Source Packages +java_gapic_assembly_gradle_pkg( + name = "google-cloud-api-servicemanagement-v1-java", + deps = [ + ":servicemanagement_java_gapic", + ":servicemanagement_java_grpc", + ":servicemanagement_java_proto", + ":servicemanagement_proto", + ], +) + +############################################################################## +# Go +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "go_gapic_assembly_pkg", + "go_gapic_library", + "go_proto_library", + "go_test", +) + +go_proto_library( + name = "servicemanagement_go_proto", + compilers = ["@io_bazel_rules_go//proto:go_grpc"], + importpath = "google.golang.org/genproto/googleapis/api/servicemanagement/v1", + protos = [":servicemanagement_proto"], + deps = [ + "//google/api:annotations_go_proto", + "//google/api:configchange_go_proto", + "//google/api:metric_go_proto", + "//google/api:serviceconfig_go_proto", + "//google/longrunning:longrunning_go_proto", + "//google/rpc:status_go_proto", + ], +) + +go_gapic_library( + name = "servicemanagement_go_gapic", + srcs = [":servicemanagement_proto_with_info"], + grpc_service_config = "servicemanagement_grpc_service_config.json", + importpath = "cloud.google.com/go/api/servicemanagement/apiv1;servicemanagement", + service_yaml = "servicemanagement_v1.yaml", + deps = [ + ":servicemanagement_go_proto", + "//google/api:serviceconfig_go_proto", + "@com_google_cloud_go//longrunning/autogen:go_default_library", + "//google/longrunning:longrunning_go_proto", + "@com_google_cloud_go//longrunning:go_default_library", + "@io_bazel_rules_go//proto/wkt:any_go_proto", + ], +) + +go_test( + name = "servicemanagement_go_gapic_test", + srcs = [":servicemanagement_go_gapic_srcjar_test"], + embed = [":servicemanagement_go_gapic"], + importpath = "cloud.google.com/go/api/servicemanagement/apiv1", +) + +# Open Source Packages +go_gapic_assembly_pkg( + name = "gapi-cloud-api-servicemanagement-v1-go", + deps = [ + ":servicemanagement_go_gapic", + ":servicemanagement_go_gapic_srcjar-test.srcjar", + ":servicemanagement_go_proto", + ], +) + +############################################################################## +# Python +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "moved_proto_library", + "py_gapic_assembly_pkg", + "py_gapic_library", + "py_grpc_library", + "py_proto_library", +) + +moved_proto_library( + name = "servicemanagement_moved_proto", + srcs = [":servicemanagement_proto"], + deps = [ + "//google/api:annotations_proto", + "//google/api:client_proto", + "//google/api:config_change_proto", + "//google/api:field_behavior_proto", + "//google/api:metric_proto", + "//google/api:quota_proto", + "//google/api:service_proto", + "//google/longrunning:operations_proto", + "//google/rpc:status_proto", + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:field_mask_proto", + "@com_google_protobuf//:timestamp_proto", + ], +) + +py_proto_library( + name = "servicemanagement_py_proto", + plugin = "@protoc_docs_plugin//:docs_plugin", + deps = [":servicemanagement_moved_proto"], +) + +py_grpc_library( + name = "servicemanagement_py_grpc", + srcs = [":servicemanagement_moved_proto"], + deps = [":servicemanagement_py_proto"], +) + +py_gapic_library( + name = "servicemanagement_py_gapic", + src = ":servicemanagement_proto_with_info", + gapic_yaml = "servicemanagement_gapic.yaml", + grpc_service_config = "servicemanagement_grpc_service_config.json", + package = "google.api.servicemanagement.v1", + service_yaml = "servicemanagement_v1.yaml", + deps = [ + ":servicemanagement_py_grpc", + ":servicemanagement_py_proto", + ], +) + +# Open Source Packages +py_gapic_assembly_pkg( + name = "api-servicemanagement-v1-py", + deps = [ + ":servicemanagement_py_gapic", + ":servicemanagement_py_grpc", + ":servicemanagement_py_proto", + ], +) + +############################################################################## +# PHP +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "php_gapic_assembly_pkg", + "php_gapic_library", + "php_grpc_library", + "php_proto_library", +) + +php_proto_library( + name = "servicemanagement_php_proto", + deps = [":servicemanagement_proto"], +) + +php_grpc_library( + name = "servicemanagement_php_grpc", + srcs = [":servicemanagement_proto"], + deps = [":servicemanagement_php_proto"], +) + +php_gapic_library( + name = "servicemanagement_php_gapic", + src = ":servicemanagement_proto_with_info", + gapic_yaml = "servicemanagement_gapic.yaml", + grpc_service_config = "servicemanagement_grpc_service_config.json", + package = "google.api.servicemanagement.v1", + service_yaml = "servicemanagement_v1.yaml", + deps = [ + ":servicemanagement_php_grpc", + ":servicemanagement_php_proto", + ], +) + +# Open Source Packages +php_gapic_assembly_pkg( + name = "google-cloud-api-servicemanagement-v1-php", + deps = [ + ":servicemanagement_php_gapic", + ":servicemanagement_php_grpc", + ":servicemanagement_php_proto", + ], +) + +############################################################################## +# Node.js +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "nodejs_gapic_assembly_pkg", + "nodejs_gapic_library", +) + +nodejs_gapic_library( + name = "servicemanagement_nodejs_gapic", + src = ":servicemanagement_proto_with_info", + grpc_service_config = "servicemanagement_grpc_service_config.json", + package = "google.api.servicemanagement.v1", + service_yaml = "servicemanagement_v1.yaml", + deps = [], +) + +nodejs_gapic_assembly_pkg( + name = "api-servicemanagement-v1-nodejs", + deps = [ + ":servicemanagement_nodejs_gapic", + ":servicemanagement_proto", + ], +) + +############################################################################## +# Ruby +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "ruby_gapic_assembly_pkg", + "ruby_gapic_library", + "ruby_grpc_library", + "ruby_proto_library", +) + +ruby_proto_library( + name = "servicemanagement_ruby_proto", + deps = [":servicemanagement_proto"], +) + +ruby_grpc_library( + name = "servicemanagement_ruby_grpc", + srcs = [":servicemanagement_proto"], + deps = [":servicemanagement_ruby_proto"], +) + +ruby_gapic_library( + name = "servicemanagement_ruby_gapic", + src = ":servicemanagement_proto_with_info", + gapic_yaml = "servicemanagement_gapic.yaml", + package = "google.api.servicemanagement.v1", + service_yaml = "servicemanagement_v1.yaml", + deps = [ + ":servicemanagement_ruby_grpc", + ":servicemanagement_ruby_proto", + ], +) + +# Open Source Packages +ruby_gapic_assembly_pkg( + name = "google-cloud-api-servicemanagement-v1-ruby", + deps = [ + ":servicemanagement_ruby_gapic", + ":servicemanagement_ruby_grpc", + ":servicemanagement_ruby_proto", + ], +) + +############################################################################## +# C# +############################################################################## +load( + "@com_google_googleapis_imports//:imports.bzl", + "csharp_gapic_assembly_pkg", + "csharp_gapic_library", + "csharp_grpc_library", + "csharp_proto_library", +) + +csharp_proto_library( + name = "servicemanagement_csharp_proto", + deps = [":servicemanagement_proto"], +) + +csharp_grpc_library( + name = "servicemanagement_csharp_grpc", + srcs = [":servicemanagement_proto"], + deps = [":servicemanagement_csharp_proto"], +) + +csharp_gapic_library( + name = "servicemanagement_csharp_gapic", + srcs = [":servicemanagement_proto_with_info"], + grpc_service_config = "servicemanagement_grpc_service_config.json", + common_resources_config = "@gax_dotnet//:Google.Api.Gax/ResourceNames/CommonResourcesConfig.json", + deps = [ + ":servicemanagement_csharp_grpc", + ":servicemanagement_csharp_proto", + ], +) + +# Open Source Packages +csharp_gapic_assembly_pkg( + name = "google-cloud-api-servicemanagement-v1-csharp", + deps = [ + ":servicemanagement_csharp_gapic", + ":servicemanagement_csharp_grpc", + ":servicemanagement_csharp_proto", + ], +) + +############################################################################## +# C++ +############################################################################## +# Put your C++ rules here diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/resources.proto b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/resources.proto new file mode 100644 index 0000000..37ac300 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/resources.proto @@ -0,0 +1,304 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.api.servicemanagement.v1; + +import "google/api/annotations.proto"; +import "google/api/config_change.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/api/quota.proto"; +import "google/api/service.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/status.proto"; + +option csharp_namespace = "Google.Cloud.ServiceManagement.V1"; +option go_package = "google.golang.org/genproto/googleapis/api/servicemanagement/v1;servicemanagement"; +option java_multiple_files = true; +option java_outer_classname = "ResourcesProto"; +option java_package = "com.google.api.servicemanagement.v1"; +option objc_class_prefix = "GASM"; +option php_namespace = "Google\\Cloud\\ServiceManagement\\V1"; +option ruby_package = "Google::Cloud::ServiceManagement::V1"; + +// The full representation of a Service that is managed by +// Google Service Management. +message ManagedService { + // The name of the service. See the [overview](/service-management/overview) + // for naming requirements. + string service_name = 2; + + // ID of the project that produces and owns this service. + string producer_project_id = 3; +} + +// The metadata associated with a long running operation resource. +message OperationMetadata { + // Represents the status of one operation step. + message Step { + // The short description of the step. + string description = 2; + + // The status code. + Status status = 4; + } + + // Code describes the status of the operation (or one of its steps). + enum Status { + // Unspecifed code. + STATUS_UNSPECIFIED = 0; + + // The operation or step has completed without errors. + DONE = 1; + + // The operation or step has not started yet. + NOT_STARTED = 2; + + // The operation or step is in progress. + IN_PROGRESS = 3; + + // The operation or step has completed with errors. If the operation is + // rollbackable, the rollback completed with errors too. + FAILED = 4; + + // The operation or step has completed with cancellation. + CANCELLED = 5; + } + + // The full name of the resources that this operation is directly + // associated with. + repeated string resource_names = 1; + + // Detailed status information for each step. The order is undetermined. + repeated Step steps = 2; + + // Percentage of completion of this operation, ranging from 0 to 100. + int32 progress_percentage = 3; + + // The start time of the operation. + google.protobuf.Timestamp start_time = 4; +} + +// Represents a diagnostic message (error or warning) +message Diagnostic { + // The kind of diagnostic information possible. + enum Kind { + // Warnings and errors + WARNING = 0; + + // Only errors + ERROR = 1; + } + + // File name and line number of the error or warning. + string location = 1; + + // The kind of diagnostic information provided. + Kind kind = 2; + + // Message describing the error or warning. + string message = 3; +} + +// Represents a source file which is used to generate the service configuration +// defined by `google.api.Service`. +message ConfigSource { + // A unique ID for a specific instance of this message, typically assigned + // by the client for tracking purpose. If empty, the server may choose to + // generate one instead. + string id = 5; + + // Set of source configuration files that are used to generate a service + // configuration (`google.api.Service`). + repeated ConfigFile files = 2; +} + +// Generic specification of a source configuration file +message ConfigFile { + enum FileType { + // Unknown file type. + FILE_TYPE_UNSPECIFIED = 0; + + // YAML-specification of service. + SERVICE_CONFIG_YAML = 1; + + // OpenAPI specification, serialized in JSON. + OPEN_API_JSON = 2; + + // OpenAPI specification, serialized in YAML. + OPEN_API_YAML = 3; + + // FileDescriptorSet, generated by protoc. + // + // To generate, use protoc with imports and source info included. + // For an example test.proto file, the following command would put the value + // in a new file named out.pb. + // + // $protoc --include_imports --include_source_info test.proto -o out.pb + FILE_DESCRIPTOR_SET_PROTO = 4; + + // Uncompiled Proto file. Used for storage and display purposes only, + // currently server-side compilation is not supported. Should match the + // inputs to 'protoc' command used to generated FILE_DESCRIPTOR_SET_PROTO. A + // file of this type can only be included if at least one file of type + // FILE_DESCRIPTOR_SET_PROTO is included. + PROTO_FILE = 6; + } + + // The file name of the configuration file (full or relative path). + string file_path = 1; + + // The bytes that constitute the file. + bytes file_contents = 3; + + // The type of configuration file this represents. + FileType file_type = 4; +} + +// Represents a service configuration with its name and id. +message ConfigRef { + // Resource name of a service config. It must have the following + // format: "services/{service name}/configs/{config id}". + string name = 1; +} + +// Change report associated with a particular service configuration. +// +// It contains a list of ConfigChanges based on the comparison between +// two service configurations. +message ChangeReport { + // List of changes between two service configurations. + // The changes will be alphabetically sorted based on the identifier + // of each change. + // A ConfigChange identifier is a dot separated path to the configuration. + // Example: visibility.rules[selector='LibraryService.CreateBook'].restriction + repeated google.api.ConfigChange config_changes = 1; +} + +// A rollout resource that defines how service configuration versions are pushed +// to control plane systems. Typically, you create a new version of the +// service config, and then create a Rollout to push the service config. +message Rollout { + // Strategy that specifies how clients of Google Service Controller want to + // send traffic to use different config versions. This is generally + // used by API proxy to split traffic based on your configured percentage for + // each config version. + // + // One example of how to gradually rollout a new service configuration using + // this + // strategy: + // Day 1 + // + // Rollout { + // id: "example.googleapis.com/rollout_20160206" + // traffic_percent_strategy { + // percentages: { + // "example.googleapis.com/20160201": 70.00 + // "example.googleapis.com/20160206": 30.00 + // } + // } + // } + // + // Day 2 + // + // Rollout { + // id: "example.googleapis.com/rollout_20160207" + // traffic_percent_strategy: { + // percentages: { + // "example.googleapis.com/20160206": 100.00 + // } + // } + // } + message TrafficPercentStrategy { + // Maps service configuration IDs to their corresponding traffic percentage. + // Key is the service configuration ID, Value is the traffic percentage + // which must be greater than 0.0 and the sum must equal to 100.0. + map percentages = 1; + } + + // Strategy used to delete a service. This strategy is a placeholder only + // used by the system generated rollout to delete a service. + message DeleteServiceStrategy { + + } + + // Status of a Rollout. + enum RolloutStatus { + // No status specified. + ROLLOUT_STATUS_UNSPECIFIED = 0; + + // The Rollout is in progress. + IN_PROGRESS = 1; + + // The Rollout has completed successfully. + SUCCESS = 2; + + // The Rollout has been cancelled. This can happen if you have overlapping + // Rollout pushes, and the previous ones will be cancelled. + CANCELLED = 3; + + // The Rollout has failed and the rollback attempt has failed too. + FAILED = 4; + + // The Rollout has not started yet and is pending for execution. + PENDING = 5; + + // The Rollout has failed and rolled back to the previous successful + // Rollout. + FAILED_ROLLED_BACK = 6; + } + + // Optional. Unique identifier of this Rollout. Must be no longer than 63 characters + // and only lower case letters, digits, '.', '_' and '-' are allowed. + // + // If not specified by client, the server will generate one. The generated id + // will have the form of , where "date" is the create + // date in ISO 8601 format. "revision number" is a monotonically increasing + // positive number that is reset every day for each service. + // An example of the generated rollout_id is '2016-02-16r1' + string rollout_id = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Creation time of the rollout. Readonly. + google.protobuf.Timestamp create_time = 2; + + // The user who created the Rollout. Readonly. + string created_by = 3; + + // The status of this rollout. Readonly. In case of a failed rollout, + // the system will automatically rollback to the current Rollout + // version. Readonly. + RolloutStatus status = 4; + + // Strategy that defines which versions of service configurations should be + // pushed + // and how they should be used at runtime. + oneof strategy { + // Google Service Control selects service configurations based on + // traffic percentage. + TrafficPercentStrategy traffic_percent_strategy = 5; + + // The strategy associated with a rollout to delete a `ManagedService`. + // Readonly. + DeleteServiceStrategy delete_service_strategy = 200; + } + + // The name of the service associated with this Rollout. + string service_name = 8; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml new file mode 100644 index 0000000..7086218 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.legacy.yaml @@ -0,0 +1,300 @@ +type: com.google.api.codegen.ConfigProto +config_schema_version: 1.0.0 +# The settings of generated code in a specific language. +language_settings: + java: + package_name: com.google.cloud.api.servicemanagement.v1 + python: + package_name: google.cloud.api.servicemanagement_v1.gapic + go: + package_name: cloud.google.com/go/api/servicemanagement/apiv1 + csharp: + package_name: Google.Api.Servicemanagement.V1 + ruby: + package_name: Google::Cloud::Api::Servicemanagement::V1 + php: + package_name: Google\Cloud\Api\Servicemanagement\V1 + nodejs: + package_name: servicemanagement.v1 +# A list of API interface configurations. +interfaces: +- name: google.api.servicemanagement.v1.ServiceManager + # A list of resource collection configurations. + # Consists of a name_pattern and an entity_name. + # The name_pattern is a pattern to describe the names of the resources of this + # collection, using the platform's conventions for URI patterns. A generator + # may use this to generate methods to compose and decompose such names. The + # pattern should use named placeholders as in `shelves/{shelf}/books/{book}`; + # those will be taken as hints for the parameter names of the generated + # methods. If empty, no name methods are generated. + # The entity_name is the name to be used as a basis for generated methods and + # classes. + smoke_test: + method: ListServices + init_fields: + - producer_project_id=$PROJECT_ID + collections: [] + # Definition for retryable codes. + retry_codes_def: + - name: idempotent + retry_codes: + - UNAVAILABLE + - DEADLINE_EXCEEDED + - name: non_idempotent + retry_codes: [] + # Definition for retry/backoff parameters. + retry_params_def: + - name: default + initial_retry_delay_millis: 100 + retry_delay_multiplier: 1.3 + max_retry_delay_millis: 60000 + initial_rpc_timeout_millis: 20000 + rpc_timeout_multiplier: 1 + max_rpc_timeout_millis: 20000 + total_timeout_millis: 600000 + # A list of method configurations. + # Common properties: + # + # name - The simple name of the method. + # + # flattening - Specifies the configuration for parameter flattening. + # Describes the parameter groups for which a generator should produce method + # overloads which allow a client to directly pass request message fields as + # method parameters. This information may or may not be used, depending on + # the target language. + # Consists of groups, which each represent a list of parameters to be + # flattened. Each parameter listed must be a field of the request message. + # + # required_fields - Fields that are always required for a request to be + # valid. + # + # resource_name_treatment - An enum that specifies how to treat the resource + # name formats defined in the field_name_patterns and + # response_field_name_patterns fields. + # UNSET: default value + # NONE: the collection configs will not be used by the generated code. + # VALIDATE: string fields will be validated by the client against the + # specified resource name formats. + # STATIC_TYPES: the client will use generated types for resource names. + # + # page_streaming - Specifies the configuration for paging. + # Describes information for generating a method which transforms a paging + # list RPC into a stream of resources. + # Consists of a request and a response. + # The request specifies request information of the list method. It defines + # which fields match the paging pattern in the request. The request consists + # of a page_size_field and a token_field. The page_size_field is the name of + # the optional field specifying the maximum number of elements to be + # returned in the response. The token_field is the name of the field in the + # request containing the page token. + # The response specifies response information of the list method. It defines + # which fields match the paging pattern in the response. The response + # consists of a token_field and a resources_field. The token_field is the + # name of the field in the response containing the next page token. The + # resources_field is the name of the field in the response containing the + # list of resources belonging to the page. + # + # retry_codes_name - Specifies the configuration for retryable codes. The + # name must be defined in interfaces.retry_codes_def. + # + # retry_params_name - Specifies the configuration for retry/backoff + # parameters. The name must be defined in interfaces.retry_params_def. + # + # field_name_patterns - Maps the field name of the request type to + # entity_name of interfaces.collections. + # Specifies the string pattern that the field must follow. + # + # timeout_millis - Specifies the default timeout for a non-retrying call. If + # the call is retrying, refer to retry_params_name instead. + methods: + - name: ListServices + flattening: + groups: + - parameters: + - producer_project_id + - consumer_id + required_fields: + page_streaming: + request: + page_size_field: page_size + token_field: page_token + response: + token_field: next_page_token + resources_field: services + retry_codes_name: idempotent + retry_params_name: default + timeout_millis: 10000 + - name: GetService + flattening: + groups: + - parameters: + - service_name + required_fields: + - service_name + retry_codes_name: idempotent + retry_params_name: default + timeout_millis: 10000 + - name: CreateService + flattening: + groups: + - parameters: + - service + required_fields: + - service + retry_codes_name: non_idempotent + retry_params_name: default + timeout_millis: 20000 + - name: DeleteService + flattening: + groups: + - parameters: + - service_name + required_fields: + - service_name + retry_codes_name: idempotent + retry_params_name: default + timeout_millis: 60000 + - name: UndeleteService + flattening: + groups: + - parameters: + - service_name + required_fields: + - service_name + retry_codes_name: non_idempotent + retry_params_name: default + # REVIEW: Could this operation take a long time? + timeout_millis: 60000 + - name: ListServiceConfigs + flattening: + groups: + - parameters: + - service_name + required_fields: + - service_name + page_streaming: + request: + page_size_field: page_size + token_field: page_token + response: + token_field: next_page_token + resources_field: service_configs + retry_codes_name: idempotent + retry_params_name: default + timeout_millis: 10000 + - name: GetServiceConfig + flattening: + groups: + - parameters: + - service_name + - config_id + - view + required_fields: + - service_name + - config_id + retry_codes_name: idempotent + retry_params_name: default + timeout_millis: 10000 + - name: CreateServiceConfig + flattening: + groups: + - parameters: + - service_name + - service_config + required_fields: + - service_name + - service_config + retry_codes_name: non_idempotent + retry_params_name: default + timeout_millis: 20000 + - name: SubmitConfigSource + flattening: + groups: + - parameters: + - service_name + - config_source + - validate_only + required_fields: + - service_name + - config_source + retry_codes_name: non_idempotent + retry_params_name: default + timeout_millis: 10000 + - name: ListServiceRollouts + flattening: + groups: + - parameters: + - service_name + - filter + required_fields: + - service_name + page_streaming: + request: + page_size_field: page_size + token_field: page_token + response: + token_field: next_page_token + resources_field: rollouts + retry_codes_name: idempotent + retry_params_name: default + timeout_millis: 10000 + - name: GetServiceRollout + flattening: + groups: + - parameters: + - service_name + - rollout_id + required_fields: + - service_name + - rollout_id + retry_codes_name: idempotent + retry_params_name: default + timeout_millis: 10000 + - name: CreateServiceRollout + flattening: + groups: + - parameters: + - service_name + - rollout + required_fields: + - service_name + - rollout + retry_codes_name: non_idempotent + retry_params_name: default + timeout_millis: 10000 + - name: GenerateConfigReport + flattening: + groups: + - parameters: + - new_config + - old_config + required_fields: + - new_config + - old_config + retry_codes_name: non_idempotent + retry_params_name: default + timeout_millis: 10000 + - name: EnableService + flattening: + groups: + - parameters: + - service_name + - consumer_id + required_fields: + - service_name + - consumer_id + retry_codes_name: idempotent + retry_params_name: default + timeout_millis: 10000 + - name: DisableService + flattening: + groups: + - parameters: + - service_name + - consumer_id + required_fields: + - service_name + - consumer_id + retry_codes_name: idempotent + retry_params_name: default + timeout_millis: 10000 diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml new file mode 100644 index 0000000..e698565 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_gapic.yaml @@ -0,0 +1,18 @@ +type: com.google.api.codegen.ConfigProto +config_schema_version: 2.0.0 +# The settings of generated code in a specific language. +language_settings: + java: + package_name: com.google.cloud.api.servicemanagement.v1 + python: + package_name: google.cloud.api.servicemanagement_v1.gapic + go: + package_name: cloud.google.com/go/api/servicemanagement/apiv1 + csharp: + package_name: Google.Api.Servicemanagement.V1 + ruby: + package_name: Google::Cloud::Api::Servicemanagement::V1 + php: + package_name: Google\Cloud\Api\Servicemanagement\V1 + nodejs: + package_name: servicemanagement.v1 diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json new file mode 100644 index 0000000..e2ec9d5 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_grpc_service_config.json @@ -0,0 +1,12 @@ +{ + "methodConfig": [ + { + "name": [ + { + "service": "google.api.servicemanagement.v1.ServiceManager" + } + ], + "timeout": "10s" + } + ] +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml new file mode 100644 index 0000000..43640dd --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanagement_v1.yaml @@ -0,0 +1,275 @@ +type: google.api.Service +config_version: 2 +name: servicemanagement.googleapis.com +title: Service Management API + +apis: +- name: google.api.servicemanagement.v1.ServiceManager + +types: +- name: google.api.servicemanagement.v1.ConfigRef +- name: google.api.servicemanagement.v1.ConfigSource +- name: google.api.servicemanagement.v1.DisableServiceResponse +- name: google.api.servicemanagement.v1.EnableServiceResponse +- name: google.api.servicemanagement.v1.OperationMetadata +- name: google.api.servicemanagement.v1.Rollout +- name: google.api.servicemanagement.v1.SubmitConfigSourceResponse +- name: google.api.servicemanagement.v1.UndeleteServiceResponse + +documentation: + summary: |- + Google Service Management allows service producers to publish their + services on Google Cloud Platform so that they can be discovered and used + by service consumers. + overview: |- + Google Service Management manages a set of *services*. Service Management + allows *service producers* to + publish their services on Google Cloud Platform so that they can be + discovered and used by *service consumers*. It also handles the tasks of + tracking + service lifecycle and programming various backend systems -- such as + [Stackdriver Logging](https://cloud.google.com/stackdriver), + [Stackdriver Monitoring](https://cloud.google.com/stackdriver) -- to + support the managed services. + + If you are a service producer, you can use the Google Service Management + API and [Google Cloud SDK (gcloud)](/sdk) to publish and manage your + services. + Each managed service has a service configuration which declares various + aspects of the service such as its API surface, along with parameters to + configure the supporting backend systems, such as logging and monitoring. + If you build your service using + [Google Cloud Endpoints](https://cloud.google.com/endpoints/), the service + configuration will be handled automatically. + + If you are a service consumer and want to use a managed service, you can + use the Google Service Management API or [Google Cloud + Console](https://console.cloud.google.com) to activate the service for + your [Google developer + project](https://developers.google.com/console/help/new/), then start + using its APIs and functions. + + ## Managed services + + REST URL: + `https://servicemanagement.googleapis.com/v1/services/{service-name}`
REST schema is defined + [here](/service-management/reference/rest/v1/services). + + A managed service refers to a network service managed by + Service Management. Each managed service has a unique name, such as + `example.googleapis.com`, which must be a valid fully-qualified DNS name, + as per RFC 1035. + + A managed service typically provides some REST APIs and/or other + functions to their service consumers, such as mobile apps or cloud + services. + + Service producers can use methods, such as + [services.create](/service-management/reference/rest/v1/services/create), + [services.delete](/service-management/reference/rest/v1/services/delete), + [services.undelete](/service-management/reference/rest/v1/services/undelete), to + manipulate their managed services. + + ## Service producers + + A service producer is the Google developer project responsible for + publishing and maintaining a managed service. Each managed service is + owned by exactly one service producer. + + ## Service consumers + + A service consumer is a Google developer project that has enabled and can + invoke APIs on a managed service. A managed service can have many service + consumers. + + ## Service configuration + + REST URL: + `https://servicemanagement.googleapis.com/v1/services/{service-name}/configs/{config_id}` +
REST schema is defined + [here](/service-management/reference/rest/v1/services.configs). + + Each managed service is described by a service configuration which covers + a wide range of features, including its name, title, RPC API + definitions, + REST API definitions, documentation, authentication, and more. + + To change the configuration of a managed service, the service producer + needs to publish an updated service configuration to Service + Management. + Service Management keeps a history of published + service configurations, making it possible to easily retrace how a + service's configuration evolved over time. Service configurations can be + published + using the [services.configs.create](/service-management/reference/rest/v1/services.configs/create) or + [services.configs.submit](/service-management/reference/rest/v1/services.configs/submit) methods. Alternatively, + `services.configs.submit` allows publishing an + [OpenAPI](https://github.com/OAI/OpenAPI-Specification) specification, + formerly known as the Swagger Specification, which is automatically + converted to a corresponding service configuration. + + ## Service rollout + + REST URL: + `https://servicemanagement.googleapis.com/v1/services/{service-name}/rollouts/{rollout-id}` +
REST schema is defined + [here](/service-management/reference/rest/v1/services.rollouts). + + A `Rollout` defines how Google Service Management should deploy service + configurations to backend systems and how the configurations take effect + at runtime. It lets service producers specify multiple service + configuration + versions to be deployed together, and a strategy that indicates how they + should be used. + + Updating a managed service's configuration can be dangerous, as a + configuration error can lead to a service outage. To mitigate risks, + Service Management + supports gradual rollout of service configuration changes. This feature + gives service producers time to identity potential issues and rollback + service + configuration changes in case of errors, thus minimizing the customer + impact of bad configurations. For example, you could specify that 5% of + traffic uses configuration 1, while the remaining 95% uses configuration + 2. + + Service Management keeps a history of rollouts so that service + producers can undo to previous configuration versions. You can rollback a + configuration by initiating a new `Rollout` that clones a previously + submitted + rollout record. + rules: + - selector: google.iam.v1.IAMPolicy.GetIamPolicy + description: |- + Gets the access control policy for a resource. Returns an empty policy + if the resource exists and does not have a policy set. + + - selector: google.iam.v1.IAMPolicy.SetIamPolicy + description: |- + Sets the access control policy on the specified resource. Replaces + any existing policy. + + Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and + PERMISSION_DENIED + + - selector: google.iam.v1.IAMPolicy.TestIamPermissions + description: |- + Returns permissions that a caller has on the specified resource. If the + resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for authorization + checking. This operation may "fail open" without warning. + + - selector: google.longrunning.Operations.ListOperations + description: Lists service operations that match the specified filter in the request. + +backend: + rules: + - selector: 'google.api.servicemanagement.v1.ServiceManager.*' + deadline: 10.0 + - selector: google.api.servicemanagement.v1.ServiceManager.CreateServiceConfig + deadline: 20.0 + - selector: 'google.iam.v1.IAMPolicy.*' + deadline: 10.0 + - selector: 'google.longrunning.Operations.*' + deadline: 10.0 + +http: + rules: + - selector: google.iam.v1.IAMPolicy.GetIamPolicy + post: '/v1/{resource=services/*}:getIamPolicy' + body: '*' + additional_bindings: + - post: '/v1/{resource=services/*/consumers/*}:getIamPolicy' + body: '*' + - selector: google.iam.v1.IAMPolicy.SetIamPolicy + post: '/v1/{resource=services/*}:setIamPolicy' + body: '*' + additional_bindings: + - post: '/v1/{resource=services/*/consumers/*}:setIamPolicy' + body: '*' + - selector: google.iam.v1.IAMPolicy.TestIamPermissions + post: '/v1/{resource=services/*}:testIamPermissions' + body: '*' + additional_bindings: + - post: '/v1/{resource=services/*/consumers/*}:testIamPermissions' + body: '*' + - selector: google.longrunning.Operations.ListOperations + get: /v1/operations + +authentication: + rules: + - selector: 'google.api.servicemanagement.v1.ServiceManager.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/service.management + - selector: google.api.servicemanagement.v1.ServiceManager.GetService + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/service.management, + https://www.googleapis.com/auth/service.management.readonly + - selector: google.api.servicemanagement.v1.ServiceManager.GetServiceConfig + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/service.management, + https://www.googleapis.com/auth/service.management.readonly + - selector: google.api.servicemanagement.v1.ServiceManager.GetServiceRollout + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/service.management, + https://www.googleapis.com/auth/service.management.readonly + - selector: google.api.servicemanagement.v1.ServiceManager.ListServiceConfigs + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/service.management, + https://www.googleapis.com/auth/service.management.readonly + - selector: google.api.servicemanagement.v1.ServiceManager.ListServiceRollouts + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/service.management, + https://www.googleapis.com/auth/service.management.readonly + - selector: google.api.servicemanagement.v1.ServiceManager.ListServices + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/service.management, + https://www.googleapis.com/auth/service.management.readonly + - selector: google.iam.v1.IAMPolicy.GetIamPolicy + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/service.management, + https://www.googleapis.com/auth/service.management.readonly + - selector: google.iam.v1.IAMPolicy.SetIamPolicy + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/service.management + - selector: google.iam.v1.IAMPolicy.TestIamPermissions + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/service.management, + https://www.googleapis.com/auth/service.management.readonly + - selector: 'google.longrunning.Operations.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/service.management diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanager.proto b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanager.proto new file mode 100644 index 0000000..f94739f --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/servicemanagement/v1/servicemanager.proto @@ -0,0 +1,560 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api.servicemanagement.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/service.proto"; +import "google/api/servicemanagement/v1/resources.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/field_mask.proto"; +import "google/rpc/status.proto"; + +option csharp_namespace = "Google.Cloud.ServiceManagement.V1"; +option go_package = "google.golang.org/genproto/googleapis/api/servicemanagement/v1;servicemanagement"; +option java_multiple_files = true; +option java_outer_classname = "ServiceManagerProto"; +option java_package = "com.google.api.servicemanagement.v1"; +option objc_class_prefix = "GASM"; +option php_namespace = "Google\\Cloud\\ServiceManagement\\V1"; +option ruby_package = "Google::Cloud::ServiceManagement::V1"; + +// [Google Service Management API](/service-management/overview) +service ServiceManager { + option (google.api.default_host) = "servicemanagement.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-platform.read-only," + "https://www.googleapis.com/auth/service.management," + "https://www.googleapis.com/auth/service.management.readonly"; + + // Lists managed services. + // + // Returns all public services. For authenticated users, also returns all + // services the calling user has "servicemanagement.services.get" permission + // for. + // + // **BETA:** If the caller specifies the `consumer_id`, it returns only the + // services enabled on the consumer. The `consumer_id` must have the format + // of "project:{PROJECT-ID}". + rpc ListServices(ListServicesRequest) returns (ListServicesResponse) { + option (google.api.http) = { + get: "/v1/services" + }; + option (google.api.method_signature) = "producer_project_id,consumer_id"; + } + + // Gets a managed service. Authentication is required unless the service is + // public. + rpc GetService(GetServiceRequest) returns (ManagedService) { + option (google.api.http) = { + get: "/v1/services/{service_name}" + }; + option (google.api.method_signature) = "service_name"; + } + + // Creates a new managed service. + // Please note one producer project can own no more than 20 services. + // + // Operation + rpc CreateService(CreateServiceRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/services" + body: "service" + }; + option (google.api.method_signature) = "service"; + option (google.longrunning.operation_info) = { + response_type: "google.api.servicemanagement.v1.ManagedService" + metadata_type: "google.api.servicemanagement.v1.OperationMetadata" + }; + } + + // Deletes a managed service. This method will change the service to the + // `Soft-Delete` state for 30 days. Within this period, service producers may + // call [UndeleteService][google.api.servicemanagement.v1.ServiceManager.UndeleteService] to restore the service. + // After 30 days, the service will be permanently deleted. + // + // Operation + rpc DeleteService(DeleteServiceRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + delete: "/v1/services/{service_name}" + }; + option (google.api.method_signature) = "service_name"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "google.api.servicemanagement.v1.OperationMetadata" + }; + } + + // Revives a previously deleted managed service. The method restores the + // service using the configuration at the time the service was deleted. + // The target service must exist and must have been deleted within the + // last 30 days. + // + // Operation + rpc UndeleteService(UndeleteServiceRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/services/{service_name}:undelete" + }; + option (google.api.method_signature) = "service_name"; + option (google.longrunning.operation_info) = { + response_type: "google.api.servicemanagement.v1.UndeleteServiceResponse" + metadata_type: "google.api.servicemanagement.v1.OperationMetadata" + }; + } + + // Lists the history of the service configuration for a managed service, + // from the newest to the oldest. + rpc ListServiceConfigs(ListServiceConfigsRequest) returns (ListServiceConfigsResponse) { + option (google.api.http) = { + get: "/v1/services/{service_name}/configs" + }; + option (google.api.method_signature) = "service_name"; + } + + // Gets a service configuration (version) for a managed service. + rpc GetServiceConfig(GetServiceConfigRequest) returns (google.api.Service) { + option (google.api.http) = { + get: "/v1/services/{service_name}/configs/{config_id}" + additional_bindings { + get: "/v1/services/{service_name}/config" + } + }; + option (google.api.method_signature) = "service_name,config_id,view"; + } + + // Creates a new service configuration (version) for a managed service. + // This method only stores the service configuration. To roll out the service + // configuration to backend systems please call + // [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + // + // Only the 100 most recent service configurations and ones referenced by + // existing rollouts are kept for each service. The rest will be deleted + // eventually. + rpc CreateServiceConfig(CreateServiceConfigRequest) returns (google.api.Service) { + option (google.api.http) = { + post: "/v1/services/{service_name}/configs" + body: "service_config" + }; + option (google.api.method_signature) = "service_name,service_config"; + } + + // Creates a new service configuration (version) for a managed service based + // on + // user-supplied configuration source files (for example: OpenAPI + // Specification). This method stores the source configurations as well as the + // generated service configuration. To rollout the service configuration to + // other services, + // please call [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + // + // Only the 100 most recent configuration sources and ones referenced by + // existing service configurtions are kept for each service. The rest will be + // deleted eventually. + // + // Operation + rpc SubmitConfigSource(SubmitConfigSourceRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/services/{service_name}/configs:submit" + body: "*" + }; + option (google.api.method_signature) = "service_name,config_source,validate_only"; + option (google.longrunning.operation_info) = { + response_type: "google.api.servicemanagement.v1.SubmitConfigSourceResponse" + metadata_type: "google.api.servicemanagement.v1.OperationMetadata" + }; + } + + // Lists the history of the service configuration rollouts for a managed + // service, from the newest to the oldest. + rpc ListServiceRollouts(ListServiceRolloutsRequest) returns (ListServiceRolloutsResponse) { + option (google.api.http) = { + get: "/v1/services/{service_name}/rollouts" + }; + option (google.api.method_signature) = "service_name,filter"; + } + + // Gets a service configuration [rollout][google.api.servicemanagement.v1.Rollout]. + rpc GetServiceRollout(GetServiceRolloutRequest) returns (Rollout) { + option (google.api.http) = { + get: "/v1/services/{service_name}/rollouts/{rollout_id}" + }; + option (google.api.method_signature) = "service_name,rollout_id"; + } + + // Creates a new service configuration rollout. Based on rollout, the + // Google Service Management will roll out the service configurations to + // different backend services. For example, the logging configuration will be + // pushed to Google Cloud Logging. + // + // Please note that any previous pending and running Rollouts and associated + // Operations will be automatically cancelled so that the latest Rollout will + // not be blocked by previous Rollouts. + // + // Only the 100 most recent (in any state) and the last 10 successful (if not + // already part of the set of 100 most recent) rollouts are kept for each + // service. The rest will be deleted eventually. + // + // Operation + rpc CreateServiceRollout(CreateServiceRolloutRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/services/{service_name}/rollouts" + body: "rollout" + }; + option (google.api.method_signature) = "service_name,rollout"; + option (google.longrunning.operation_info) = { + response_type: "google.api.servicemanagement.v1.Rollout" + metadata_type: "google.api.servicemanagement.v1.OperationMetadata" + }; + } + + // Generates and returns a report (errors, warnings and changes from + // existing configurations) associated with + // GenerateConfigReportRequest.new_value + // + // If GenerateConfigReportRequest.old_value is specified, + // GenerateConfigReportRequest will contain a single ChangeReport based on the + // comparison between GenerateConfigReportRequest.new_value and + // GenerateConfigReportRequest.old_value. + // If GenerateConfigReportRequest.old_value is not specified, this method + // will compare GenerateConfigReportRequest.new_value with the last pushed + // service configuration. + rpc GenerateConfigReport(GenerateConfigReportRequest) returns (GenerateConfigReportResponse) { + option (google.api.http) = { + post: "/v1/services:generateConfigReport" + body: "*" + }; + option (google.api.method_signature) = "new_config,old_config"; + } + + // Enables a [service][google.api.servicemanagement.v1.ManagedService] for a project, so it can be used + // for the project. See + // [Cloud Auth Guide](https://cloud.google.com/docs/authentication) for + // more information. + // + // Operation + rpc EnableService(EnableServiceRequest) returns (google.longrunning.Operation) { + option deprecated = true; + option (google.api.http) = { + post: "/v1/services/{service_name}:enable" + body: "*" + }; + option (google.api.method_signature) = "service_name,consumer_id"; + option (google.longrunning.operation_info) = { + response_type: "google.api.servicemanagement.v1.EnableServiceResponse" + metadata_type: "google.api.servicemanagement.v1.OperationMetadata" + }; + } + + // Disables a [service][google.api.servicemanagement.v1.ManagedService] for a project, so it can no longer be + // be used for the project. It prevents accidental usage that may cause + // unexpected billing charges or security leaks. + // + // Operation + rpc DisableService(DisableServiceRequest) returns (google.longrunning.Operation) { + option deprecated = true; + option (google.api.http) = { + post: "/v1/services/{service_name}:disable" + body: "*" + }; + option (google.api.method_signature) = "service_name,consumer_id"; + option (google.longrunning.operation_info) = { + response_type: "google.api.servicemanagement.v1.DisableServiceResponse" + metadata_type: "google.api.servicemanagement.v1.OperationMetadata" + }; + } +} + +// Request message for `ListServices` method. +message ListServicesRequest { + // Include services produced by the specified project. + string producer_project_id = 1; + + // The max number of items to include in the response list. Page size is 50 + // if not specified. Maximum value is 100. + int32 page_size = 5; + + // Token identifying which result to start with; returned by a previous list + // call. + string page_token = 6; + + // Include services consumed by the specified consumer. + // + // The Google Service Management implementation accepts the following + // forms: + // - project: + string consumer_id = 7 [deprecated = true]; +} + +// Response message for `ListServices` method. +message ListServicesResponse { + // The returned services will only have the name field set. + repeated ManagedService services = 1; + + // Token that can be passed to `ListServices` to resume a paginated query. + string next_page_token = 2; +} + +// Request message for `GetService` method. +message GetServiceRequest { + // Required. The name of the service. See the `ServiceManager` overview for naming + // requirements. For example: `example.googleapis.com`. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for CreateService method. +message CreateServiceRequest { + // Required. Initial values for the service resource. + ManagedService service = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for DeleteService method. +message DeleteServiceRequest { + // Required. The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for UndeleteService method. +message UndeleteServiceRequest { + // Required. The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Response message for UndeleteService method. +message UndeleteServiceResponse { + // Revived service resource. + ManagedService service = 1; +} + +// Request message for GetServiceConfig method. +message GetServiceConfigRequest { + enum ConfigView { + // Server response includes all fields except SourceInfo. + BASIC = 0; + + // Server response includes all fields including SourceInfo. + // SourceFiles are of type 'google.api.servicemanagement.v1.ConfigFile' + // and are only available for configs created using the + // SubmitConfigSource method. + FULL = 1; + } + + // Required. The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The id of the service configuration resource. + // + // This field must be specified for the server to return all fields, including + // `SourceInfo`. + string config_id = 2 [(google.api.field_behavior) = REQUIRED]; + + // Specifies which parts of the Service Config should be returned in the + // response. + ConfigView view = 3; +} + +// Request message for ListServiceConfigs method. +message ListServiceConfigsRequest { + // Required. The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; + + // The token of the page to retrieve. + string page_token = 2; + + // The max number of items to include in the response list. Page size is 50 + // if not specified. Maximum value is 100. + int32 page_size = 3; +} + +// Response message for ListServiceConfigs method. +message ListServiceConfigsResponse { + // The list of service configuration resources. + repeated google.api.Service service_configs = 1; + + // The token of the next page of results. + string next_page_token = 2; +} + +// Request message for CreateServiceConfig method. +message CreateServiceConfigRequest { + // Required. The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The service configuration resource. + google.api.Service service_config = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for SubmitConfigSource method. +message SubmitConfigSourceRequest { + // Required. The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The source configuration for the service. + ConfigSource config_source = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. If set, this will result in the generation of a + // `google.api.Service` configuration based on the `ConfigSource` provided, + // but the generated config and the sources will NOT be persisted. + bool validate_only = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response message for SubmitConfigSource method. +message SubmitConfigSourceResponse { + // The generated service configuration. + google.api.Service service_config = 1; +} + +// Request message for 'CreateServiceRollout' +message CreateServiceRolloutRequest { + // Required. The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The rollout resource. The `service_name` field is output only. + Rollout rollout = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for 'ListServiceRollouts' +message ListServiceRolloutsRequest { + // Required. The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; + + // The token of the page to retrieve. + string page_token = 2; + + // The max number of items to include in the response list. Page size is 50 + // if not specified. Maximum value is 100. + int32 page_size = 3; + + // Required. Use `filter` to return subset of rollouts. + // The following filters are supported: + // -- To limit the results to only those in + // [status](google.api.servicemanagement.v1.RolloutStatus) 'SUCCESS', + // use filter='status=SUCCESS' + // -- To limit the results to those in + // [status](google.api.servicemanagement.v1.RolloutStatus) 'CANCELLED' + // or 'FAILED', use filter='status=CANCELLED OR status=FAILED' + string filter = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// Response message for ListServiceRollouts method. +message ListServiceRolloutsResponse { + // The list of rollout resources. + repeated Rollout rollouts = 1; + + // The token of the next page of results. + string next_page_token = 2; +} + +// Request message for GetServiceRollout method. +message GetServiceRolloutRequest { + // Required. The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The id of the rollout resource. + string rollout_id = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for EnableService method. +message EnableServiceRequest { + // Required. Name of the service to enable. Specifying an unknown service name will + // cause the request to fail. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The identity of consumer resource which service enablement will be + // applied to. + // + // The Google Service Management implementation accepts the following + // forms: + // - "project:" + // + // Note: this is made compatible with + // google.api.servicecontrol.v1.Operation.consumer_id. + string consumer_id = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Operation payload for EnableService method. +message EnableServiceResponse { + +} + +// Request message for DisableService method. +message DisableServiceRequest { + // Required. Name of the service to disable. Specifying an unknown service name + // will cause the request to fail. + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The identity of consumer resource which service disablement will be + // applied to. + // + // The Google Service Management implementation accepts the following + // forms: + // - "project:" + // + // Note: this is made compatible with + // google.api.servicecontrol.v1.Operation.consumer_id. + string consumer_id = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Operation payload for DisableService method. +message DisableServiceResponse { + +} + +// Request message for GenerateConfigReport method. +message GenerateConfigReportRequest { + // Required. Service configuration for which we want to generate the report. + // For this version of API, the supported types are + // [google.api.servicemanagement.v1.ConfigRef][google.api.servicemanagement.v1.ConfigRef], + // [google.api.servicemanagement.v1.ConfigSource][google.api.servicemanagement.v1.ConfigSource], + // and [google.api.Service][google.api.Service] + google.protobuf.Any new_config = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Service configuration against which the comparison will be done. + // For this version of API, the supported types are + // [google.api.servicemanagement.v1.ConfigRef][google.api.servicemanagement.v1.ConfigRef], + // [google.api.servicemanagement.v1.ConfigSource][google.api.servicemanagement.v1.ConfigSource], + // and [google.api.Service][google.api.Service] + google.protobuf.Any old_config = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response message for GenerateConfigReport method. +message GenerateConfigReportResponse { + // Name of the service this report belongs to. + string service_name = 1; + + // ID of the service configuration this report belongs to. + string id = 2; + + // list of ChangeReport, each corresponding to comparison between two + // service configurations. + repeated ChangeReport change_reports = 3; + + // Errors / Linter warnings associated with the service definition this + // report + // belongs to. + repeated Diagnostic diagnostics = 4; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/source_info.proto b/_13_sponge-dtm-cache/http/third_party/google/api/source_info.proto new file mode 100644 index 0000000..3174209 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/source_info.proto @@ -0,0 +1,31 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/any.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "SourceInfoProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Source information used to create a Service Config +message SourceInfo { + // All files used during config generation. + repeated google.protobuf.Any source_files = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/system_parameter.proto b/_13_sponge-dtm-cache/http/third_party/google/api/system_parameter.proto new file mode 100644 index 0000000..50d428d --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/system_parameter.proto @@ -0,0 +1,95 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "SystemParameterProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// ### System parameter configuration +// +// A system parameter is a special kind of parameter defined by the API +// system, not by an individual API. It is typically mapped to an HTTP header +// and/or a URL query parameter. This configuration specifies which methods +// change the names of the system parameters. +message SystemParameters { + // Define system parameters. + // + // The parameters defined here will override the default parameters + // implemented by the system. If this field is missing from the service + // config, default system parameters will be used. Default system parameters + // and names is implementation-dependent. + // + // Example: define api key for all methods + // + // system_parameters + // rules: + // - selector: "*" + // parameters: + // - name: api_key + // url_query_parameter: api_key + // + // + // Example: define 2 api key names for a specific method. + // + // system_parameters + // rules: + // - selector: "/ListShelves" + // parameters: + // - name: api_key + // http_header: Api-Key1 + // - name: api_key + // http_header: Api-Key2 + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated SystemParameterRule rules = 1; +} + +// Define a system parameter rule mapping system parameter definitions to +// methods. +message SystemParameterRule { + // Selects the methods to which this rule applies. Use '*' to indicate all + // methods in all APIs. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // Define parameters. Multiple names may be defined for a parameter. + // For a given method call, only one of them should be used. If multiple + // names are used the behavior is implementation-dependent. + // If none of the specified names are present the behavior is + // parameter-dependent. + repeated SystemParameter parameters = 2; +} + +// Define a parameter's name and location. The parameter may be passed as either +// an HTTP header or a URL query parameter, and if both are passed the behavior +// is implementation-dependent. +message SystemParameter { + // Define the name of the parameter, such as "api_key" . It is case sensitive. + string name = 1; + + // Define the HTTP header name to use for the parameter. It is case + // insensitive. + string http_header = 2; + + // Define the URL query parameter name to use for the parameter. It is case + // sensitive. + string url_query_parameter = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/api/usage.proto b/_13_sponge-dtm-cache/http/third_party/google/api/usage.proto new file mode 100644 index 0000000..14d34c4 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/api/usage.proto @@ -0,0 +1,89 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "UsageProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Configuration controlling usage of a service. +message Usage { + // Requirements that must be satisfied before a consumer project can use the + // service. Each requirement is of the form /; + // for example 'serviceusage.googleapis.com/billing-enabled'. + repeated string requirements = 1; + + // A list of usage rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated UsageRule rules = 6; + + // The full resource name of a channel used for sending notifications to the + // service producer. + // + // Google Service Management currently only supports + // [Google Cloud Pub/Sub](https://cloud.google.com/pubsub) as a notification + // channel. To use Google Cloud Pub/Sub as the channel, this must be the name + // of a Cloud Pub/Sub topic that uses the Cloud Pub/Sub topic name format + // documented in https://cloud.google.com/pubsub/docs/overview. + string producer_notification_channel = 7; +} + +// Usage configuration rules for the service. +// +// NOTE: Under development. +// +// +// Use this rule to configure unregistered calls for the service. Unregistered +// calls are calls that do not contain consumer project identity. +// (Example: calls that do not contain an API key). +// By default, API methods do not allow unregistered calls, and each method call +// must be identified by a consumer project identity. Use this rule to +// allow/disallow unregistered calls. +// +// Example of an API that wants to allow unregistered calls for entire service. +// +// usage: +// rules: +// - selector: "*" +// allow_unregistered_calls: true +// +// Example of a method that wants to allow unregistered calls. +// +// usage: +// rules: +// - selector: "google.example.library.v1.LibraryService.CreateBook" +// allow_unregistered_calls: true +message UsageRule { + // Selects the methods to which this rule applies. Use '*' to indicate all + // methods in all APIs. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // If true, the selected method allows unregistered calls, e.g. calls + // that don't identify any user or application. + bool allow_unregistered_calls = 2; + + // If true, the selected method should skip service control and the control + // plane features, such as quota and billing, will not be available. + // This flag is used by Google Cloud Endpoints to bypass checks for internal + // methods, such as service health check methods. + bool skip_service_control = 3; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/annotations.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/annotations.proto new file mode 100644 index 0000000..85c361b --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/annotations.proto @@ -0,0 +1,31 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/http.proto"; +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See `HttpRule`. + HttpRule http = 72295728; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/any.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/any.proto new file mode 100644 index 0000000..6ed8a23 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/any.proto @@ -0,0 +1,158 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/anypb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "AnyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// `Any` contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// Protobuf library provides support to pack/unpack Any values in the form +// of utility functions or additional generated methods of the Any type. +// +// Example 1: Pack and unpack a message in C++. +// +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } +// +// Example 2: Pack and unpack a message in Java. +// +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } +// +// The pack methods provided by protobuf library will by default use +// 'type.googleapis.com/full.type.name' as the type URL and the unpack +// methods only use the fully qualified type name after the last '/' +// in the type URL, for example "foo.bar.com/x/y.z" will yield type +// name "y.z". +// +// +// JSON +// ==== +// The JSON representation of an `Any` value uses the regular +// representation of the deserialized, embedded message, with an +// additional field `@type` which contains the type URL. Example: +// +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } +// +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } +// +// If the embedded message type is well-known and has a custom JSON +// representation, that representation will be embedded adding a field +// `value` which holds the custom JSON in addition to the `@type` +// field. Example (for message [google.protobuf.Duration][]): +// +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// +message Any { + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + // + string type_url = 1; + + // Must be a valid serialized protocol buffer of the above specified type. + bytes value = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/api.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/api.proto new file mode 100644 index 0000000..3d598fc --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/api.proto @@ -0,0 +1,208 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +import "google/protobuf/source_context.proto"; +import "google/protobuf/type.proto"; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "ApiProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option go_package = "google.golang.org/protobuf/types/known/apipb"; + +// Api is a light-weight descriptor for an API Interface. +// +// Interfaces are also described as "protocol buffer services" in some contexts, +// such as by the "service" keyword in a .proto file, but they are different +// from API Services, which represent a concrete implementation of an interface +// as opposed to simply a description of methods and bindings. They are also +// sometimes simply referred to as "APIs" in other contexts, such as the name of +// this message itself. See https://cloud.google.com/apis/design/glossary for +// detailed terminology. +message Api { + // The fully qualified name of this interface, including package name + // followed by the interface's simple name. + string name = 1; + + // The methods of this interface, in unspecified order. + repeated Method methods = 2; + + // Any metadata attached to the interface. + repeated Option options = 3; + + // A version string for this interface. If specified, must have the form + // `major-version.minor-version`, as in `1.10`. If the minor version is + // omitted, it defaults to zero. If the entire version field is empty, the + // major version is derived from the package name, as outlined below. If the + // field is not empty, the version in the package name will be verified to be + // consistent with what is provided here. + // + // The versioning schema uses [semantic + // versioning](http://semver.org) where the major version number + // indicates a breaking change and the minor version an additive, + // non-breaking change. Both version numbers are signals to users + // what to expect from different versions, and should be carefully + // chosen based on the product plan. + // + // The major version is also reflected in the package name of the + // interface, which must end in `v`, as in + // `google.feature.v1`. For major versions 0 and 1, the suffix can + // be omitted. Zero major versions must only be used for + // experimental, non-GA interfaces. + // + // + string version = 4; + + // Source context for the protocol buffer service represented by this + // message. + SourceContext source_context = 5; + + // Included interfaces. See [Mixin][]. + repeated Mixin mixins = 6; + + // The source syntax of the service. + Syntax syntax = 7; +} + +// Method represents a method of an API interface. +message Method { + // The simple name of this method. + string name = 1; + + // A URL of the input message type. + string request_type_url = 2; + + // If true, the request is streamed. + bool request_streaming = 3; + + // The URL of the output message type. + string response_type_url = 4; + + // If true, the response is streamed. + bool response_streaming = 5; + + // Any metadata attached to the method. + repeated Option options = 6; + + // The source syntax of this method. + Syntax syntax = 7; +} + +// Declares an API Interface to be included in this interface. The including +// interface must redeclare all the methods from the included interface, but +// documentation and options are inherited as follows: +// +// - If after comment and whitespace stripping, the documentation +// string of the redeclared method is empty, it will be inherited +// from the original method. +// +// - Each annotation belonging to the service config (http, +// visibility) which is not set in the redeclared method will be +// inherited. +// +// - If an http annotation is inherited, the path pattern will be +// modified as follows. Any version prefix will be replaced by the +// version of the including interface plus the [root][] path if +// specified. +// +// Example of a simple mixin: +// +// package google.acl.v1; +// service AccessControl { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v1/{resource=**}:getAcl"; +// } +// } +// +// package google.storage.v2; +// service Storage { +// rpc GetAcl(GetAclRequest) returns (Acl); +// +// // Get a data record. +// rpc GetData(GetDataRequest) returns (Data) { +// option (google.api.http).get = "/v2/{resource=**}"; +// } +// } +// +// Example of a mixin configuration: +// +// apis: +// - name: google.storage.v2.Storage +// mixins: +// - name: google.acl.v1.AccessControl +// +// The mixin construct implies that all methods in `AccessControl` are +// also declared with same name and request/response types in +// `Storage`. A documentation generator or annotation processor will +// see the effective `Storage.GetAcl` method after inheriting +// documentation and annotations as follows: +// +// service Storage { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v2/{resource=**}:getAcl"; +// } +// ... +// } +// +// Note how the version in the path pattern changed from `v1` to `v2`. +// +// If the `root` field in the mixin is specified, it should be a +// relative path under which inherited HTTP paths are placed. Example: +// +// apis: +// - name: google.storage.v2.Storage +// mixins: +// - name: google.acl.v1.AccessControl +// root: acls +// +// This implies the following inherited HTTP annotation: +// +// service Storage { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; +// } +// ... +// } +message Mixin { + // The fully qualified name of the interface which is included. + string name = 1; + + // If non-empty specifies a path under which inherited HTTP paths + // are rooted. + string root = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/compiler/plugin.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/compiler/plugin.proto new file mode 100644 index 0000000..9242aac --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/compiler/plugin.proto @@ -0,0 +1,183 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// WARNING: The plugin interface is currently EXPERIMENTAL and is subject to +// change. +// +// protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is +// just a program that reads a CodeGeneratorRequest from stdin and writes a +// CodeGeneratorResponse to stdout. +// +// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead +// of dealing with the raw protocol defined here. +// +// A plugin executable needs only to be placed somewhere in the path. The +// plugin should be named "protoc-gen-$NAME", and will then be used when the +// flag "--${NAME}_out" is passed to protoc. + +syntax = "proto2"; + +package google.protobuf.compiler; +option java_package = "com.google.protobuf.compiler"; +option java_outer_classname = "PluginProtos"; + +option go_package = "google.golang.org/protobuf/types/pluginpb"; + +import "google/protobuf/descriptor.proto"; + +// The version number of protocol compiler. +message Version { + optional int32 major = 1; + optional int32 minor = 2; + optional int32 patch = 3; + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + optional string suffix = 4; +} + +// An encoded CodeGeneratorRequest is written to the plugin's stdin. +message CodeGeneratorRequest { + // The .proto files that were explicitly listed on the command-line. The + // code generator should generate code only for these files. Each file's + // descriptor will be included in proto_file, below. + repeated string file_to_generate = 1; + + // The generator parameter passed on the command-line. + optional string parameter = 2; + + // FileDescriptorProtos for all files in files_to_generate and everything + // they import. The files will appear in topological order, so each file + // appears before any file that imports it. + // + // protoc guarantees that all proto_files will be written after + // the fields above, even though this is not technically guaranteed by the + // protobuf wire format. This theoretically could allow a plugin to stream + // in the FileDescriptorProtos and handle them one by one rather than read + // the entire set into memory at once. However, as of this writing, this + // is not similarly optimized on protoc's end -- it will store all fields in + // memory at once before sending them to the plugin. + // + // Type names of fields and extensions in the FileDescriptorProto are always + // fully qualified. + repeated FileDescriptorProto proto_file = 15; + + // The version number of protocol compiler. + optional Version compiler_version = 3; + +} + +// The plugin writes an encoded CodeGeneratorResponse to stdout. +message CodeGeneratorResponse { + // Error message. If non-empty, code generation failed. The plugin process + // should exit with status code zero even if it reports an error in this way. + // + // This should be used to indicate errors in .proto files which prevent the + // code generator from generating correct code. Errors which indicate a + // problem in protoc itself -- such as the input CodeGeneratorRequest being + // unparseable -- should be reported by writing a message to stderr and + // exiting with a non-zero status code. + optional string error = 1; + + // A bitmask of supported features that the code generator supports. + // This is a bitwise "or" of values from the Feature enum. + optional uint64 supported_features = 2; + + // Sync with code_generator.h. + enum Feature { + FEATURE_NONE = 0; + FEATURE_PROTO3_OPTIONAL = 1; + } + + // Represents a single generated file. + message File { + // The file name, relative to the output directory. The name must not + // contain "." or ".." components and must be relative, not be absolute (so, + // the file cannot lie outside the output directory). "/" must be used as + // the path separator, not "\". + // + // If the name is omitted, the content will be appended to the previous + // file. This allows the generator to break large files into small chunks, + // and allows the generated text to be streamed back to protoc so that large + // files need not reside completely in memory at one time. Note that as of + // this writing protoc does not optimize for this -- it will read the entire + // CodeGeneratorResponse before writing files to disk. + optional string name = 1; + + // If non-empty, indicates that the named file should already exist, and the + // content here is to be inserted into that file at a defined insertion + // point. This feature allows a code generator to extend the output + // produced by another code generator. The original generator may provide + // insertion points by placing special annotations in the file that look + // like: + // @@protoc_insertion_point(NAME) + // The annotation can have arbitrary text before and after it on the line, + // which allows it to be placed in a comment. NAME should be replaced with + // an identifier naming the point -- this is what other generators will use + // as the insertion_point. Code inserted at this point will be placed + // immediately above the line containing the insertion point (thus multiple + // insertions to the same point will come out in the order they were added). + // The double-@ is intended to make it unlikely that the generated code + // could contain things that look like insertion points by accident. + // + // For example, the C++ code generator places the following line in the + // .pb.h files that it generates: + // // @@protoc_insertion_point(namespace_scope) + // This line appears within the scope of the file's package namespace, but + // outside of any particular class. Another plugin can then specify the + // insertion_point "namespace_scope" to generate additional classes or + // other declarations that should be placed in this scope. + // + // Note that if the line containing the insertion point begins with + // whitespace, the same whitespace will be added to every line of the + // inserted text. This is useful for languages like Python, where + // indentation matters. In these languages, the insertion point comment + // should be indented the same amount as any inserted code will need to be + // in order to work correctly in that context. + // + // The code generator that generates the initial file and the one which + // inserts into it must both run as part of a single invocation of protoc. + // Code generators are executed in the order in which they appear on the + // command line. + // + // If |insertion_point| is present, |name| must also be present. + optional string insertion_point = 2; + + // The file contents. + optional string content = 15; + + // Information describing the file content being inserted. If an insertion + // point is used, this information will be appropriately offset and inserted + // into the code generation metadata for the generated files. + optional GeneratedCodeInfo generated_code_info = 16; + } + repeated File file = 15; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/descriptor.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/descriptor.proto new file mode 100644 index 0000000..9f0ce6c --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/descriptor.proto @@ -0,0 +1,909 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + + +syntax = "proto2"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/descriptorpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2" and "proto3". + optional string syntax = 12; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + } + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REQUIRED = 2; + LABEL_REPEATED = 3; + } + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; + + // If true, this is a proto3 "optional". When a proto3 field is optional, it + // tracks presence regardless of field type. + // + // When proto3_optional is true, this field must be belong to a oneof to + // signal to old proto3 clients that presence is tracked for this field. This + // oneof is known as a "synthetic" oneof, and this field must be its sole + // member (each proto3 optional field gets its own synthetic oneof). Synthetic + // oneofs exist in the descriptor only, and do not generate any API. Synthetic + // oneofs must be ordered after all "real" oneofs. + // + // For message fields, proto3_optional doesn't create any semantic change, + // since non-repeated message fields always track presence. However it still + // indicates the semantic detail of whether the user wrote "optional" or not. + // This can be useful for round-tripping the .proto file. For consistency we + // give message fields a synthetic oneof also, even though it is not required + // to track presence. This is especially important because the parser can't + // tell if a field is a message or an enum, so it must always create a + // synthetic oneof. + // + // Proto2 optional fields do not set this flag, because they already indicate + // optional with `LABEL_OPTIONAL`. + optional bool proto3_optional = 17; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default = false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default = false]; +} + + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + optional string java_outer_classname = 8; + + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default = false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + optional bool java_string_check_utf8 = 27 [default = false]; + + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default = SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + + + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default = false]; + optional bool java_generic_services = 17 [default = false]; + optional bool py_generic_services = 18 [default = false]; + optional bool php_generic_services = 42 [default = false]; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default = false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default = true]; + + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be + // used for determining the namespace. + optional string php_metadata_namespace = 44; + + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + optional string ruby_package = 45; + + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default = false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default = false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default = false]; + + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementations still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + optional bool lazy = 5 [default = false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default = false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default = false]; + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype +} + +message OneofOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default = false]; + + reserved 5; // javanano_as_lite + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default = false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = 34 + [default = IDEMPOTENCY_UNKNOWN]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + // "foo.(bar.baz).qux". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendant. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed = true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed = true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to qux. + // // + // // Another line attached to qux. + // optional double qux = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to qux or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed = true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified offset. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + } +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/duration.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/duration.proto new file mode 100644 index 0000000..81c3e36 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/duration.proto @@ -0,0 +1,116 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/durationpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DurationProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// A Duration represents a signed, fixed-length span of time represented +// as a count of seconds and fractions of seconds at nanosecond +// resolution. It is independent of any calendar and concepts like "day" +// or "month". It is related to Timestamp in that the difference between +// two Timestamp values is a Duration and it can be added or subtracted +// from a Timestamp. Range is approximately +-10,000 years. +// +// # Examples +// +// Example 1: Compute Duration from two Timestamps in pseudo code. +// +// Timestamp start = ...; +// Timestamp end = ...; +// Duration duration = ...; +// +// duration.seconds = end.seconds - start.seconds; +// duration.nanos = end.nanos - start.nanos; +// +// if (duration.seconds < 0 && duration.nanos > 0) { +// duration.seconds += 1; +// duration.nanos -= 1000000000; +// } else if (duration.seconds > 0 && duration.nanos < 0) { +// duration.seconds -= 1; +// duration.nanos += 1000000000; +// } +// +// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. +// +// Timestamp start = ...; +// Duration duration = ...; +// Timestamp end = ...; +// +// end.seconds = start.seconds + duration.seconds; +// end.nanos = start.nanos + duration.nanos; +// +// if (end.nanos < 0) { +// end.seconds -= 1; +// end.nanos += 1000000000; +// } else if (end.nanos >= 1000000000) { +// end.seconds += 1; +// end.nanos -= 1000000000; +// } +// +// Example 3: Compute Duration from datetime.timedelta in Python. +// +// td = datetime.timedelta(days=3, minutes=10) +// duration = Duration() +// duration.FromTimedelta(td) +// +// # JSON Mapping +// +// In JSON format, the Duration type is encoded as a string rather than an +// object, where the string ends in the suffix "s" (indicating seconds) and +// is preceded by the number of seconds, with nanoseconds expressed as +// fractional seconds. For example, 3 seconds with 0 nanoseconds should be +// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +// microsecond should be expressed in JSON format as "3.000001s". +// +// +message Duration { + // Signed seconds of the span of time. Must be from -315,576,000,000 + // to +315,576,000,000 inclusive. Note: these bounds are computed from: + // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + int64 seconds = 1; + + // Signed fractions of a second at nanosecond resolution of the span + // of time. Durations less than one second are represented with a 0 + // `seconds` field and a positive or negative `nanos` field. For durations + // of one second or more, a non-zero value for the `nanos` field must be + // of the same sign as the `seconds` field. Must be from -999,999,999 + // to +999,999,999 inclusive. + int32 nanos = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/empty.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/empty.proto new file mode 100644 index 0000000..5f992de --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/empty.proto @@ -0,0 +1,52 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/emptypb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "EmptyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// A generic empty message that you can re-use to avoid defining duplicated +// empty messages in your APIs. A typical example is to use it as the request +// or the response type of an API method. For instance: +// +// service Foo { +// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); +// } +// +// The JSON representation for `Empty` is empty JSON object `{}`. +message Empty {} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/field_mask.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/field_mask.proto new file mode 100644 index 0000000..6b5104f --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/field_mask.proto @@ -0,0 +1,245 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "FieldMaskProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option go_package = "google.golang.org/protobuf/types/known/fieldmaskpb"; +option cc_enable_arenas = true; + +// `FieldMask` represents a set of symbolic field paths, for example: +// +// paths: "f.a" +// paths: "f.b.d" +// +// Here `f` represents a field in some root message, `a` and `b` +// fields in the message found in `f`, and `d` a field found in the +// message in `f.b`. +// +// Field masks are used to specify a subset of fields that should be +// returned by a get operation or modified by an update operation. +// Field masks also have a custom JSON encoding (see below). +// +// # Field Masks in Projections +// +// When used in the context of a projection, a response message or +// sub-message is filtered by the API to only contain those fields as +// specified in the mask. For example, if the mask in the previous +// example is applied to a response message as follows: +// +// f { +// a : 22 +// b { +// d : 1 +// x : 2 +// } +// y : 13 +// } +// z: 8 +// +// The result will not contain specific values for fields x,y and z +// (their value will be set to the default, and omitted in proto text +// output): +// +// +// f { +// a : 22 +// b { +// d : 1 +// } +// } +// +// A repeated field is not allowed except at the last position of a +// paths string. +// +// If a FieldMask object is not present in a get operation, the +// operation applies to all fields (as if a FieldMask of all fields +// had been specified). +// +// Note that a field mask does not necessarily apply to the +// top-level response message. In case of a REST get operation, the +// field mask applies directly to the response, but in case of a REST +// list operation, the mask instead applies to each individual message +// in the returned resource list. In case of a REST custom method, +// other definitions may be used. Where the mask applies will be +// clearly documented together with its declaration in the API. In +// any case, the effect on the returned resource/resources is required +// behavior for APIs. +// +// # Field Masks in Update Operations +// +// A field mask in update operations specifies which fields of the +// targeted resource are going to be updated. The API is required +// to only change the values of the fields as specified in the mask +// and leave the others untouched. If a resource is passed in to +// describe the updated values, the API ignores the values of all +// fields not covered by the mask. +// +// If a repeated field is specified for an update operation, new values will +// be appended to the existing repeated field in the target resource. Note that +// a repeated field is only allowed in the last position of a `paths` string. +// +// If a sub-message is specified in the last position of the field mask for an +// update operation, then new value will be merged into the existing sub-message +// in the target resource. +// +// For example, given the target message: +// +// f { +// b { +// d: 1 +// x: 2 +// } +// c: [1] +// } +// +// And an update message: +// +// f { +// b { +// d: 10 +// } +// c: [2] +// } +// +// then if the field mask is: +// +// paths: ["f.b", "f.c"] +// +// then the result will be: +// +// f { +// b { +// d: 10 +// x: 2 +// } +// c: [1, 2] +// } +// +// An implementation may provide options to override this default behavior for +// repeated and message fields. +// +// In order to reset a field's value to the default, the field must +// be in the mask and set to the default value in the provided resource. +// Hence, in order to reset all fields of a resource, provide a default +// instance of the resource and set all fields in the mask, or do +// not provide a mask as described below. +// +// If a field mask is not present on update, the operation applies to +// all fields (as if a field mask of all fields has been specified). +// Note that in the presence of schema evolution, this may mean that +// fields the client does not know and has therefore not filled into +// the request will be reset to their default. If this is unwanted +// behavior, a specific service may require a client to always specify +// a field mask, producing an error if not. +// +// As with get operations, the location of the resource which +// describes the updated values in the request message depends on the +// operation kind. In any case, the effect of the field mask is +// required to be honored by the API. +// +// ## Considerations for HTTP REST +// +// The HTTP kind of an update operation which uses a field mask must +// be set to PATCH instead of PUT in order to satisfy HTTP semantics +// (PUT must only be used for full updates). +// +// # JSON Encoding of Field Masks +// +// In JSON, a field mask is encoded as a single string where paths are +// separated by a comma. Fields name in each path are converted +// to/from lower-camel naming conventions. +// +// As an example, consider the following message declarations: +// +// message Profile { +// User user = 1; +// Photo photo = 2; +// } +// message User { +// string display_name = 1; +// string address = 2; +// } +// +// In proto a field mask for `Profile` may look as such: +// +// mask { +// paths: "user.display_name" +// paths: "photo" +// } +// +// In JSON, the same mask is represented as below: +// +// { +// mask: "user.displayName,photo" +// } +// +// # Field Masks and Oneof Fields +// +// Field masks treat fields in oneofs just as regular fields. Consider the +// following message: +// +// message SampleMessage { +// oneof test_oneof { +// string name = 4; +// SubMessage sub_message = 9; +// } +// } +// +// The field mask can be: +// +// mask { +// paths: "name" +// } +// +// Or: +// +// mask { +// paths: "sub_message" +// } +// +// Note that oneof type names ("test_oneof" in this case) cannot be used in +// paths. +// +// ## Field Mask Verification +// +// The implementation of any API method which has a FieldMask type field in the +// request should verify the included field paths, and return an +// `INVALID_ARGUMENT` error if any path is unmappable. +message FieldMask { + // The set of field mask paths. + repeated string paths = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/source_context.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/source_context.proto new file mode 100644 index 0000000..06bfc43 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/source_context.proto @@ -0,0 +1,48 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "SourceContextProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option go_package = "google.golang.org/protobuf/types/known/sourcecontextpb"; + +// `SourceContext` represents information about the source of a +// protobuf element, like the file in which it is defined. +message SourceContext { + // The path-qualified name of the .proto file that contained the associated + // protobuf element. For example: `"google/protobuf/source_context.proto"`. + string file_name = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/struct.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/struct.proto new file mode 100644 index 0000000..545215c --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/struct.proto @@ -0,0 +1,95 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/structpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "StructProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// `Struct` represents a structured data value, consisting of fields +// which map to dynamically typed values. In some languages, `Struct` +// might be supported by a native representation. For example, in +// scripting languages like JS a struct is represented as an +// object. The details of that representation are described together +// with the proto support for the language. +// +// The JSON representation for `Struct` is JSON object. +message Struct { + // Unordered map of dynamically typed values. + map fields = 1; +} + +// `Value` represents a dynamically typed value which can be either +// null, a number, a string, a boolean, a recursive struct value, or a +// list of values. A producer of value is expected to set one of that +// variants, absence of any variant indicates an error. +// +// The JSON representation for `Value` is JSON value. +message Value { + // The kind of value. + oneof kind { + // Represents a null value. + NullValue null_value = 1; + // Represents a double value. + double number_value = 2; + // Represents a string value. + string string_value = 3; + // Represents a boolean value. + bool bool_value = 4; + // Represents a structured value. + Struct struct_value = 5; + // Represents a repeated `Value`. + ListValue list_value = 6; + } +} + +// `NullValue` is a singleton enumeration to represent the null value for the +// `Value` type union. +// +// The JSON representation for `NullValue` is JSON `null`. +enum NullValue { + // Null value. + NULL_VALUE = 0; +} + +// `ListValue` is a wrapper around a repeated field of values. +// +// The JSON representation for `ListValue` is JSON array. +message ListValue { + // Repeated field of dynamically typed values. + repeated Value values = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/timestamp.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/timestamp.proto new file mode 100644 index 0000000..3b2df6d --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/timestamp.proto @@ -0,0 +1,147 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/timestamppb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "TimestampProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// A Timestamp represents a point in time independent of any time zone or local +// calendar, encoded as a count of seconds and fractions of seconds at +// nanosecond resolution. The count is relative to an epoch at UTC midnight on +// January 1, 1970, in the proleptic Gregorian calendar which extends the +// Gregorian calendar backwards to year one. +// +// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap +// second table is needed for interpretation, using a [24-hour linear +// smear](https://developers.google.com/time/smear). +// +// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By +// restricting to that range, we ensure that we can convert to and from [RFC +// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. +// +// # Examples +// +// Example 1: Compute Timestamp from POSIX `time()`. +// +// Timestamp timestamp; +// timestamp.set_seconds(time(NULL)); +// timestamp.set_nanos(0); +// +// Example 2: Compute Timestamp from POSIX `gettimeofday()`. +// +// struct timeval tv; +// gettimeofday(&tv, NULL); +// +// Timestamp timestamp; +// timestamp.set_seconds(tv.tv_sec); +// timestamp.set_nanos(tv.tv_usec * 1000); +// +// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. +// +// FILETIME ft; +// GetSystemTimeAsFileTime(&ft); +// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; +// +// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z +// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. +// Timestamp timestamp; +// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); +// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); +// +// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. +// +// long millis = System.currentTimeMillis(); +// +// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) +// .setNanos((int) ((millis % 1000) * 1000000)).build(); +// +// +// Example 5: Compute Timestamp from Java `Instant.now()`. +// +// Instant now = Instant.now(); +// +// Timestamp timestamp = +// Timestamp.newBuilder().setSeconds(now.getEpochSecond()) +// .setNanos(now.getNano()).build(); +// +// +// Example 6: Compute Timestamp from current time in Python. +// +// timestamp = Timestamp() +// timestamp.GetCurrentTime() +// +// # JSON Mapping +// +// In JSON format, the Timestamp type is encoded as a string in the +// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +// where {year} is always expressed using four digits while {month}, {day}, +// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +// is required. A proto3 JSON serializer should always use UTC (as indicated by +// "Z") when printing the Timestamp type and a proto3 JSON parser should be +// able to accept both UTC and other timezones (as indicated by an offset). +// +// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +// 01:30 UTC on January 15, 2017. +// +// In JavaScript, one can convert a Date object to this format using the +// standard +// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) +// method. In Python, a standard `datetime.datetime` object can be converted +// to this format using +// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with +// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use +// the Joda Time's [`ISODateTimeFormat.dateTime()`]( +// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D +// ) to obtain a formatter capable of generating timestamps in this format. +// +// +message Timestamp { + // Represents seconds of UTC time since Unix epoch + // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + // 9999-12-31T23:59:59Z inclusive. + int64 seconds = 1; + + // Non-negative fractions of a second at nanosecond resolution. Negative + // second values with fractions must still have non-negative nanos values + // that count forward in time. Must be from 0 to 999,999,999 + // inclusive. + int32 nanos = 2; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/type.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/type.proto new file mode 100644 index 0000000..d3f6a68 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/type.proto @@ -0,0 +1,187 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +import "google/protobuf/any.proto"; +import "google/protobuf/source_context.proto"; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option java_package = "com.google.protobuf"; +option java_outer_classname = "TypeProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option go_package = "google.golang.org/protobuf/types/known/typepb"; + +// A protocol buffer message type. +message Type { + // The fully qualified message name. + string name = 1; + // The list of fields. + repeated Field fields = 2; + // The list of types appearing in `oneof` definitions in this type. + repeated string oneofs = 3; + // The protocol buffer options. + repeated Option options = 4; + // The source context. + SourceContext source_context = 5; + // The source syntax. + Syntax syntax = 6; +} + +// A single field of a message type. +message Field { + // Basic field types. + enum Kind { + // Field type unknown. + TYPE_UNKNOWN = 0; + // Field type double. + TYPE_DOUBLE = 1; + // Field type float. + TYPE_FLOAT = 2; + // Field type int64. + TYPE_INT64 = 3; + // Field type uint64. + TYPE_UINT64 = 4; + // Field type int32. + TYPE_INT32 = 5; + // Field type fixed64. + TYPE_FIXED64 = 6; + // Field type fixed32. + TYPE_FIXED32 = 7; + // Field type bool. + TYPE_BOOL = 8; + // Field type string. + TYPE_STRING = 9; + // Field type group. Proto2 syntax only, and deprecated. + TYPE_GROUP = 10; + // Field type message. + TYPE_MESSAGE = 11; + // Field type bytes. + TYPE_BYTES = 12; + // Field type uint32. + TYPE_UINT32 = 13; + // Field type enum. + TYPE_ENUM = 14; + // Field type sfixed32. + TYPE_SFIXED32 = 15; + // Field type sfixed64. + TYPE_SFIXED64 = 16; + // Field type sint32. + TYPE_SINT32 = 17; + // Field type sint64. + TYPE_SINT64 = 18; + } + + // Whether a field is optional, required, or repeated. + enum Cardinality { + // For fields with unknown cardinality. + CARDINALITY_UNKNOWN = 0; + // For optional fields. + CARDINALITY_OPTIONAL = 1; + // For required fields. Proto2 syntax only. + CARDINALITY_REQUIRED = 2; + // For repeated fields. + CARDINALITY_REPEATED = 3; + } + + // The field type. + Kind kind = 1; + // The field cardinality. + Cardinality cardinality = 2; + // The field number. + int32 number = 3; + // The field name. + string name = 4; + // The field type URL, without the scheme, for message or enumeration + // types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. + string type_url = 6; + // The index of the field type in `Type.oneofs`, for message or enumeration + // types. The first type has index 1; zero means the type is not in the list. + int32 oneof_index = 7; + // Whether to use alternative packed wire representation. + bool packed = 8; + // The protocol buffer options. + repeated Option options = 9; + // The field JSON name. + string json_name = 10; + // The string value of the default value of this field. Proto2 syntax only. + string default_value = 11; +} + +// Enum type definition. +message Enum { + // Enum type name. + string name = 1; + // Enum value definitions. + repeated EnumValue enumvalue = 2; + // Protocol buffer options. + repeated Option options = 3; + // The source context. + SourceContext source_context = 4; + // The source syntax. + Syntax syntax = 5; +} + +// Enum value definition. +message EnumValue { + // Enum value name. + string name = 1; + // Enum value number. + int32 number = 2; + // Protocol buffer options. + repeated Option options = 3; +} + +// A protocol buffer option, which can be attached to a message, field, +// enumeration, etc. +message Option { + // The option's name. For protobuf built-in options (options defined in + // descriptor.proto), this is the short name. For example, `"map_entry"`. + // For custom options, it should be the fully-qualified name. For example, + // `"google.api.http"`. + string name = 1; + // The option's value packed in an Any message. If the value is a primitive, + // the corresponding wrapper type defined in google/protobuf/wrappers.proto + // should be used. If the value is an enum, it should be stored as an int32 + // value using the google.protobuf.Int32Value type. + Any value = 2; +} + +// The syntax in which a protocol buffer element is defined. +enum Syntax { + // Syntax `proto2`. + SYNTAX_PROTO2 = 0; + // Syntax `proto3`. + SYNTAX_PROTO3 = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/google/protobuf/wrappers.proto b/_13_sponge-dtm-cache/http/third_party/google/protobuf/wrappers.proto new file mode 100644 index 0000000..d49dd53 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/google/protobuf/wrappers.proto @@ -0,0 +1,123 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Wrappers for primitive (non-message) types. These types are useful +// for embedding primitives in the `google.protobuf.Any` type and for places +// where we need to distinguish between the absence of a primitive +// typed field and its default value. +// +// These wrappers have no meaningful use within repeated fields as they lack +// the ability to detect presence on individual elements. +// These wrappers have no meaningful use within a map or a oneof since +// individual entries of a map or fields of a oneof can already detect presence. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/wrapperspb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "WrappersProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// Wrapper message for `double`. +// +// The JSON representation for `DoubleValue` is JSON number. +message DoubleValue { + // The double value. + double value = 1; +} + +// Wrapper message for `float`. +// +// The JSON representation for `FloatValue` is JSON number. +message FloatValue { + // The float value. + float value = 1; +} + +// Wrapper message for `int64`. +// +// The JSON representation for `Int64Value` is JSON string. +message Int64Value { + // The int64 value. + int64 value = 1; +} + +// Wrapper message for `uint64`. +// +// The JSON representation for `UInt64Value` is JSON string. +message UInt64Value { + // The uint64 value. + uint64 value = 1; +} + +// Wrapper message for `int32`. +// +// The JSON representation for `Int32Value` is JSON number. +message Int32Value { + // The int32 value. + int32 value = 1; +} + +// Wrapper message for `uint32`. +// +// The JSON representation for `UInt32Value` is JSON number. +message UInt32Value { + // The uint32 value. + uint32 value = 1; +} + +// Wrapper message for `bool`. +// +// The JSON representation for `BoolValue` is JSON `true` and `false`. +message BoolValue { + // The bool value. + bool value = 1; +} + +// Wrapper message for `string`. +// +// The JSON representation for `StringValue` is JSON string. +message StringValue { + // The string value. + string value = 1; +} + +// Wrapper message for `bytes`. +// +// The JSON representation for `BytesValue` is JSON string. +message BytesValue { + // The bytes value. + bytes value = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/protoc-gen-openapiv2/options/annotations.proto b/_13_sponge-dtm-cache/http/third_party/protoc-gen-openapiv2/options/annotations.proto new file mode 100644 index 0000000..d63d3c8 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/protoc-gen-openapiv2/options/annotations.proto @@ -0,0 +1,44 @@ +syntax = "proto3"; + +package grpc.gateway.protoc_gen_openapiv2.options; + +import "google/protobuf/descriptor.proto"; +import "protoc-gen-openapiv2/options/openapiv2.proto"; + +option go_package = "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options"; + +extend google.protobuf.FileOptions { + // ID assigned by protobuf-global-extension-registry@google.com for gRPC-Gateway project. + // + // All IDs are the same, as assigned. It is okay that they are the same, as they extend + // different descriptor messages. + Swagger openapiv2_swagger = 1042; +} +extend google.protobuf.MethodOptions { + // ID assigned by protobuf-global-extension-registry@google.com for gRPC-Gateway project. + // + // All IDs are the same, as assigned. It is okay that they are the same, as they extend + // different descriptor messages. + Operation openapiv2_operation = 1042; +} +extend google.protobuf.MessageOptions { + // ID assigned by protobuf-global-extension-registry@google.com for gRPC-Gateway project. + // + // All IDs are the same, as assigned. It is okay that they are the same, as they extend + // different descriptor messages. + Schema openapiv2_schema = 1042; +} +extend google.protobuf.ServiceOptions { + // ID assigned by protobuf-global-extension-registry@google.com for gRPC-Gateway project. + // + // All IDs are the same, as assigned. It is okay that they are the same, as they extend + // different descriptor messages. + Tag openapiv2_tag = 1042; +} +extend google.protobuf.FieldOptions { + // ID assigned by protobuf-global-extension-registry@google.com for gRPC-Gateway project. + // + // All IDs are the same, as assigned. It is okay that they are the same, as they extend + // different descriptor messages. + JSONSchema openapiv2_field = 1042; +} diff --git a/_13_sponge-dtm-cache/http/third_party/protoc-gen-openapiv2/options/openapiv2.proto b/_13_sponge-dtm-cache/http/third_party/protoc-gen-openapiv2/options/openapiv2.proto new file mode 100644 index 0000000..9a17f02 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/protoc-gen-openapiv2/options/openapiv2.proto @@ -0,0 +1,720 @@ +syntax = "proto3"; + +package grpc.gateway.protoc_gen_openapiv2.options; + +import "google/protobuf/struct.proto"; + +option go_package = "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options"; + +// Scheme describes the schemes supported by the OpenAPI Swagger +// and Operation objects. +enum Scheme { + UNKNOWN = 0; + HTTP = 1; + HTTPS = 2; + WS = 3; + WSS = 4; +} + +// `Swagger` is a representation of OpenAPI v2 specification's Swagger object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#swaggerObject +// +// Example: +// +// option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { +// info: { +// title: "Echo API"; +// version: "1.0"; +// description: ""; +// contact: { +// name: "gRPC-Gateway project"; +// url: "https://github.com/grpc-ecosystem/grpc-gateway"; +// email: "none@example.com"; +// }; +// license: { +// name: "BSD 3-Clause License"; +// url: "https://github.com/grpc-ecosystem/grpc-gateway/blob/main/LICENSE"; +// }; +// }; +// schemes: HTTPS; +// consumes: "application/json"; +// produces: "application/json"; +// }; +// +message Swagger { + // Specifies the OpenAPI Specification version being used. It can be + // used by the OpenAPI UI and other clients to interpret the API listing. The + // value MUST be "2.0". + string swagger = 1; + // Provides metadata about the API. The metadata can be used by the + // clients if needed. + Info info = 2; + // The host (name or ip) serving the API. This MUST be the host only and does + // not include the scheme nor sub-paths. It MAY include a port. If the host is + // not included, the host serving the documentation is to be used (including + // the port). The host does not support path templating. + string host = 3; + // The base path on which the API is served, which is relative to the host. If + // it is not included, the API is served directly under the host. The value + // MUST start with a leading slash (/). The basePath does not support path + // templating. + // Note that using `base_path` does not change the endpoint paths that are + // generated in the resulting OpenAPI file. If you wish to use `base_path` + // with relatively generated OpenAPI paths, the `base_path` prefix must be + // manually removed from your `google.api.http` paths and your code changed to + // serve the API from the `base_path`. + string base_path = 4; + // The transfer protocol of the API. Values MUST be from the list: "http", + // "https", "ws", "wss". If the schemes is not included, the default scheme to + // be used is the one used to access the OpenAPI definition itself. + repeated Scheme schemes = 5; + // A list of MIME types the APIs can consume. This is global to all APIs but + // can be overridden on specific API calls. Value MUST be as described under + // Mime Types. + repeated string consumes = 6; + // A list of MIME types the APIs can produce. This is global to all APIs but + // can be overridden on specific API calls. Value MUST be as described under + // Mime Types. + repeated string produces = 7; + // field 8 is reserved for 'paths'. + reserved 8; + // field 9 is reserved for 'definitions', which at this time are already + // exposed as and customizable as proto messages. + reserved 9; + // An object to hold responses that can be used across operations. This + // property does not define global responses for all operations. + map responses = 10; + // Security scheme definitions that can be used across the specification. + SecurityDefinitions security_definitions = 11; + // A declaration of which security schemes are applied for the API as a whole. + // The list of values describes alternative security schemes that can be used + // (that is, there is a logical OR between the security requirements). + // Individual operations can override this definition. + repeated SecurityRequirement security = 12; + // A list of tags for API documentation control. Tags can be used for logical + // grouping of operations by resources or any other qualifier. + repeated Tag tags = 13; + // Additional external documentation. + ExternalDocumentation external_docs = 14; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ + map extensions = 15; +} + +// `Operation` is a representation of OpenAPI v2 specification's Operation object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#operationObject +// +// Example: +// +// service EchoService { +// rpc Echo(SimpleMessage) returns (SimpleMessage) { +// option (google.api.http) = { +// get: "/v1/example/echo/{id}" +// }; +// +// option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { +// summary: "Get a message."; +// operation_id: "getMessage"; +// tags: "echo"; +// responses: { +// key: "200" +// value: { +// description: "OK"; +// } +// } +// }; +// } +// } +message Operation { + // A list of tags for API documentation control. Tags can be used for logical + // grouping of operations by resources or any other qualifier. + repeated string tags = 1; + // A short summary of what the operation does. For maximum readability in the + // swagger-ui, this field SHOULD be less than 120 characters. + string summary = 2; + // A verbose explanation of the operation behavior. GFM syntax can be used for + // rich text representation. + string description = 3; + // Additional external documentation for this operation. + ExternalDocumentation external_docs = 4; + // Unique string used to identify the operation. The id MUST be unique among + // all operations described in the API. Tools and libraries MAY use the + // operationId to uniquely identify an operation, therefore, it is recommended + // to follow common programming naming conventions. + string operation_id = 5; + // A list of MIME types the operation can consume. This overrides the consumes + // definition at the OpenAPI Object. An empty value MAY be used to clear the + // global definition. Value MUST be as described under Mime Types. + repeated string consumes = 6; + // A list of MIME types the operation can produce. This overrides the produces + // definition at the OpenAPI Object. An empty value MAY be used to clear the + // global definition. Value MUST be as described under Mime Types. + repeated string produces = 7; + // field 8 is reserved for 'parameters'. + reserved 8; + // The list of possible responses as they are returned from executing this + // operation. + map responses = 9; + // The transfer protocol for the operation. Values MUST be from the list: + // "http", "https", "ws", "wss". The value overrides the OpenAPI Object + // schemes definition. + repeated Scheme schemes = 10; + // Declares this operation to be deprecated. Usage of the declared operation + // should be refrained. Default value is false. + bool deprecated = 11; + // A declaration of which security schemes are applied for this operation. The + // list of values describes alternative security schemes that can be used + // (that is, there is a logical OR between the security requirements). This + // definition overrides any declared top-level security. To remove a top-level + // security declaration, an empty array can be used. + repeated SecurityRequirement security = 12; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ + map extensions = 13; + // Custom parameters such as HTTP request headers. + // See: https://swagger.io/docs/specification/2-0/describing-parameters/ + // and https://swagger.io/specification/v2/#parameter-object. + Parameters parameters = 14; +} + +// `Parameters` is a representation of OpenAPI v2 specification's parameters object. +// Note: This technically breaks compatibility with the OpenAPI 2 definition structure as we only +// allow header parameters to be set here since we do not want users specifying custom non-header +// parameters beyond those inferred from the Protobuf schema. +// See: https://swagger.io/specification/v2/#parameter-object +message Parameters { + // `Headers` is one or more HTTP header parameter. + // See: https://swagger.io/docs/specification/2-0/describing-parameters/#header-parameters + repeated HeaderParameter headers = 1; +} + +// `HeaderParameter` a HTTP header parameter. +// See: https://swagger.io/specification/v2/#parameter-object +message HeaderParameter { + // `Type` is a a supported HTTP header type. + // See https://swagger.io/specification/v2/#parameterType. + enum Type { + UNKNOWN = 0; + STRING = 1; + NUMBER = 2; + INTEGER = 3; + BOOLEAN = 4; + } + + // `Name` is the header name. + string name = 1; + // `Description` is a short description of the header. + string description = 2; + // `Type` is the type of the object. The value MUST be one of "string", "number", "integer", or "boolean". The "array" type is not supported. + // See: https://swagger.io/specification/v2/#parameterType. + Type type = 3; + // `Format` The extending format for the previously mentioned type. + string format = 4; + // `Required` indicates if the header is optional + bool required = 5; + // field 6 is reserved for 'items', but in OpenAPI-specific way. + reserved 6; + // field 7 is reserved `Collection Format`. Determines the format of the array if type array is used. + reserved 7; +} + +// `Header` is a representation of OpenAPI v2 specification's Header object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#headerObject +// +message Header { + // `Description` is a short description of the header. + string description = 1; + // The type of the object. The value MUST be one of "string", "number", "integer", or "boolean". The "array" type is not supported. + string type = 2; + // `Format` The extending format for the previously mentioned type. + string format = 3; + // field 4 is reserved for 'items', but in OpenAPI-specific way. + reserved 4; + // field 5 is reserved `Collection Format` Determines the format of the array if type array is used. + reserved 5; + // `Default` Declares the value of the header that the server will use if none is provided. + // See: https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-6.2. + // Unlike JSON Schema this value MUST conform to the defined type for the header. + string default = 6; + // field 7 is reserved for 'maximum'. + reserved 7; + // field 8 is reserved for 'exclusiveMaximum'. + reserved 8; + // field 9 is reserved for 'minimum'. + reserved 9; + // field 10 is reserved for 'exclusiveMinimum'. + reserved 10; + // field 11 is reserved for 'maxLength'. + reserved 11; + // field 12 is reserved for 'minLength'. + reserved 12; + // 'Pattern' See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.2.3. + string pattern = 13; + // field 14 is reserved for 'maxItems'. + reserved 14; + // field 15 is reserved for 'minItems'. + reserved 15; + // field 16 is reserved for 'uniqueItems'. + reserved 16; + // field 17 is reserved for 'enum'. + reserved 17; + // field 18 is reserved for 'multipleOf'. + reserved 18; +} + +// `Response` is a representation of OpenAPI v2 specification's Response object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#responseObject +// +message Response { + // `Description` is a short description of the response. + // GFM syntax can be used for rich text representation. + string description = 1; + // `Schema` optionally defines the structure of the response. + // If `Schema` is not provided, it means there is no content to the response. + Schema schema = 2; + // `Headers` A list of headers that are sent with the response. + // `Header` name is expected to be a string in the canonical format of the MIME header key + // See: https://golang.org/pkg/net/textproto/#CanonicalMIMEHeaderKey + map headers = 3; + // `Examples` gives per-mimetype response examples. + // See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#example-object + map examples = 4; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ + map extensions = 5; +} + +// `Info` is a representation of OpenAPI v2 specification's Info object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#infoObject +// +// Example: +// +// option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { +// info: { +// title: "Echo API"; +// version: "1.0"; +// description: ""; +// contact: { +// name: "gRPC-Gateway project"; +// url: "https://github.com/grpc-ecosystem/grpc-gateway"; +// email: "none@example.com"; +// }; +// license: { +// name: "BSD 3-Clause License"; +// url: "https://github.com/grpc-ecosystem/grpc-gateway/blob/main/LICENSE"; +// }; +// }; +// ... +// }; +// +message Info { + // The title of the application. + string title = 1; + // A short description of the application. GFM syntax can be used for rich + // text representation. + string description = 2; + // The Terms of Service for the API. + string terms_of_service = 3; + // The contact information for the exposed API. + Contact contact = 4; + // The license information for the exposed API. + License license = 5; + // Provides the version of the application API (not to be confused + // with the specification version). + string version = 6; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ + map extensions = 7; +} + +// `Contact` is a representation of OpenAPI v2 specification's Contact object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#contactObject +// +// Example: +// +// option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { +// info: { +// ... +// contact: { +// name: "gRPC-Gateway project"; +// url: "https://github.com/grpc-ecosystem/grpc-gateway"; +// email: "none@example.com"; +// }; +// ... +// }; +// ... +// }; +// +message Contact { + // The identifying name of the contact person/organization. + string name = 1; + // The URL pointing to the contact information. MUST be in the format of a + // URL. + string url = 2; + // The email address of the contact person/organization. MUST be in the format + // of an email address. + string email = 3; +} + +// `License` is a representation of OpenAPI v2 specification's License object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#licenseObject +// +// Example: +// +// option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { +// info: { +// ... +// license: { +// name: "BSD 3-Clause License"; +// url: "https://github.com/grpc-ecosystem/grpc-gateway/blob/main/LICENSE"; +// }; +// ... +// }; +// ... +// }; +// +message License { + // The license name used for the API. + string name = 1; + // A URL to the license used for the API. MUST be in the format of a URL. + string url = 2; +} + +// `ExternalDocumentation` is a representation of OpenAPI v2 specification's +// ExternalDocumentation object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#externalDocumentationObject +// +// Example: +// +// option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { +// ... +// external_docs: { +// description: "More about gRPC-Gateway"; +// url: "https://github.com/grpc-ecosystem/grpc-gateway"; +// } +// ... +// }; +// +message ExternalDocumentation { + // A short description of the target documentation. GFM syntax can be used for + // rich text representation. + string description = 1; + // The URL for the target documentation. Value MUST be in the format + // of a URL. + string url = 2; +} + +// `Schema` is a representation of OpenAPI v2 specification's Schema object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#schemaObject +// +message Schema { + JSONSchema json_schema = 1; + // Adds support for polymorphism. The discriminator is the schema property + // name that is used to differentiate between other schema that inherit this + // schema. The property name used MUST be defined at this schema and it MUST + // be in the required property list. When used, the value MUST be the name of + // this schema or any schema that inherits it. + string discriminator = 2; + // Relevant only for Schema "properties" definitions. Declares the property as + // "read only". This means that it MAY be sent as part of a response but MUST + // NOT be sent as part of the request. Properties marked as readOnly being + // true SHOULD NOT be in the required list of the defined schema. Default + // value is false. + bool read_only = 3; + // field 4 is reserved for 'xml'. + reserved 4; + // Additional external documentation for this schema. + ExternalDocumentation external_docs = 5; + // A free-form property to include an example of an instance for this schema in JSON. + // This is copied verbatim to the output. + string example = 6; +} + +// `JSONSchema` represents properties from JSON Schema taken, and as used, in +// the OpenAPI v2 spec. +// +// This includes changes made by OpenAPI v2. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#schemaObject +// +// See also: https://cswr.github.io/JsonSchema/spec/basic_types/, +// https://github.com/json-schema-org/json-schema-spec/blob/master/schema.json +// +// Example: +// +// message SimpleMessage { +// option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_schema) = { +// json_schema: { +// title: "SimpleMessage" +// description: "A simple message." +// required: ["id"] +// } +// }; +// +// // Id represents the message identifier. +// string id = 1; [ +// (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = { +// description: "The unique identifier of the simple message." +// }]; +// } +// +message JSONSchema { + // field 1 is reserved for '$id', omitted from OpenAPI v2. + reserved 1; + // field 2 is reserved for '$schema', omitted from OpenAPI v2. + reserved 2; + // Ref is used to define an external reference to include in the message. + // This could be a fully qualified proto message reference, and that type must + // be imported into the protofile. If no message is identified, the Ref will + // be used verbatim in the output. + // For example: + // `ref: ".google.protobuf.Timestamp"`. + string ref = 3; + // field 4 is reserved for '$comment', omitted from OpenAPI v2. + reserved 4; + // The title of the schema. + string title = 5; + // A short description of the schema. + string description = 6; + string default = 7; + bool read_only = 8; + // A free-form property to include a JSON example of this field. This is copied + // verbatim to the output swagger.json. Quotes must be escaped. + // This property is the same for 2.0 and 3.0.0 https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/3.0.0.md#schemaObject https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#schemaObject + string example = 9; + double multiple_of = 10; + // Maximum represents an inclusive upper limit for a numeric instance. The + // value of MUST be a number, + double maximum = 11; + bool exclusive_maximum = 12; + // minimum represents an inclusive lower limit for a numeric instance. The + // value of MUST be a number, + double minimum = 13; + bool exclusive_minimum = 14; + uint64 max_length = 15; + uint64 min_length = 16; + string pattern = 17; + // field 18 is reserved for 'additionalItems', omitted from OpenAPI v2. + reserved 18; + // field 19 is reserved for 'items', but in OpenAPI-specific way. + // TODO(ivucica): add 'items'? + reserved 19; + uint64 max_items = 20; + uint64 min_items = 21; + bool unique_items = 22; + // field 23 is reserved for 'contains', omitted from OpenAPI v2. + reserved 23; + uint64 max_properties = 24; + uint64 min_properties = 25; + repeated string required = 26; + // field 27 is reserved for 'additionalProperties', but in OpenAPI-specific + // way. TODO(ivucica): add 'additionalProperties'? + reserved 27; + // field 28 is reserved for 'definitions', omitted from OpenAPI v2. + reserved 28; + // field 29 is reserved for 'properties', but in OpenAPI-specific way. + // TODO(ivucica): add 'additionalProperties'? + reserved 29; + // following fields are reserved, as the properties have been omitted from + // OpenAPI v2: + // patternProperties, dependencies, propertyNames, const + reserved 30 to 33; + // Items in 'array' must be unique. + repeated string array = 34; + + enum JSONSchemaSimpleTypes { + UNKNOWN = 0; + ARRAY = 1; + BOOLEAN = 2; + INTEGER = 3; + NULL = 4; + NUMBER = 5; + OBJECT = 6; + STRING = 7; + } + + repeated JSONSchemaSimpleTypes type = 35; + // `Format` + string format = 36; + // following fields are reserved, as the properties have been omitted from + // OpenAPI v2: contentMediaType, contentEncoding, if, then, else + reserved 37 to 41; + // field 42 is reserved for 'allOf', but in OpenAPI-specific way. + // TODO(ivucica): add 'allOf'? + reserved 42; + // following fields are reserved, as the properties have been omitted from + // OpenAPI v2: + // anyOf, oneOf, not + reserved 43 to 45; + // Items in `enum` must be unique https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.5.1 + repeated string enum = 46; + + // Additional field level properties used when generating the OpenAPI v2 file. + FieldConfiguration field_configuration = 1001; + + // 'FieldConfiguration' provides additional field level properties used when generating the OpenAPI v2 file. + // These properties are not defined by OpenAPIv2, but they are used to control the generation. + message FieldConfiguration { + // Alternative parameter name when used as path parameter. If set, this will + // be used as the complete parameter name when this field is used as a path + // parameter. Use this to avoid having auto generated path parameter names + // for overlapping paths. + string path_param_name = 47; + } + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ + map extensions = 48; +} + +// `Tag` is a representation of OpenAPI v2 specification's Tag object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#tagObject +// +message Tag { + // The name of the tag. Use it to allow override of the name of a + // global Tag object, then use that name to reference the tag throughout the + // OpenAPI file. + string name = 1; + // A short description for the tag. GFM syntax can be used for rich text + // representation. + string description = 2; + // Additional external documentation for this tag. + ExternalDocumentation external_docs = 3; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ + map extensions = 4; +} + +// `SecurityDefinitions` is a representation of OpenAPI v2 specification's +// Security Definitions object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securityDefinitionsObject +// +// A declaration of the security schemes available to be used in the +// specification. This does not enforce the security schemes on the operations +// and only serves to provide the relevant details for each scheme. +message SecurityDefinitions { + // A single security scheme definition, mapping a "name" to the scheme it + // defines. + map security = 1; +} + +// `SecurityScheme` is a representation of OpenAPI v2 specification's +// Security Scheme object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securitySchemeObject +// +// Allows the definition of a security scheme that can be used by the +// operations. Supported schemes are basic authentication, an API key (either as +// a header or as a query parameter) and OAuth2's common flows (implicit, +// password, application and access code). +message SecurityScheme { + // The type of the security scheme. Valid values are "basic", + // "apiKey" or "oauth2". + enum Type { + TYPE_INVALID = 0; + TYPE_BASIC = 1; + TYPE_API_KEY = 2; + TYPE_OAUTH2 = 3; + } + + // The location of the API key. Valid values are "query" or "header". + enum In { + IN_INVALID = 0; + IN_QUERY = 1; + IN_HEADER = 2; + } + + // The flow used by the OAuth2 security scheme. Valid values are + // "implicit", "password", "application" or "accessCode". + enum Flow { + FLOW_INVALID = 0; + FLOW_IMPLICIT = 1; + FLOW_PASSWORD = 2; + FLOW_APPLICATION = 3; + FLOW_ACCESS_CODE = 4; + } + + // The type of the security scheme. Valid values are "basic", + // "apiKey" or "oauth2". + Type type = 1; + // A short description for security scheme. + string description = 2; + // The name of the header or query parameter to be used. + // Valid for apiKey. + string name = 3; + // The location of the API key. Valid values are "query" or + // "header". + // Valid for apiKey. + In in = 4; + // The flow used by the OAuth2 security scheme. Valid values are + // "implicit", "password", "application" or "accessCode". + // Valid for oauth2. + Flow flow = 5; + // The authorization URL to be used for this flow. This SHOULD be in + // the form of a URL. + // Valid for oauth2/implicit and oauth2/accessCode. + string authorization_url = 6; + // The token URL to be used for this flow. This SHOULD be in the + // form of a URL. + // Valid for oauth2/password, oauth2/application and oauth2/accessCode. + string token_url = 7; + // The available scopes for the OAuth2 security scheme. + // Valid for oauth2. + Scopes scopes = 8; + // Custom properties that start with "x-" such as "x-foo" used to describe + // extra functionality that is not covered by the standard OpenAPI Specification. + // See: https://swagger.io/docs/specification/2-0/swagger-extensions/ + map extensions = 9; +} + +// `SecurityRequirement` is a representation of OpenAPI v2 specification's +// Security Requirement object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securityRequirementObject +// +// Lists the required security schemes to execute this operation. The object can +// have multiple security schemes declared in it which are all required (that +// is, there is a logical AND between the schemes). +// +// The name used for each property MUST correspond to a security scheme +// declared in the Security Definitions. +message SecurityRequirement { + // If the security scheme is of type "oauth2", then the value is a list of + // scope names required for the execution. For other security scheme types, + // the array MUST be empty. + message SecurityRequirementValue { + repeated string scope = 1; + } + // Each name must correspond to a security scheme which is declared in + // the Security Definitions. If the security scheme is of type "oauth2", + // then the value is a list of scope names required for the execution. + // For other security scheme types, the array MUST be empty. + map security_requirement = 1; +} + +// `Scopes` is a representation of OpenAPI v2 specification's Scopes object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#scopesObject +// +// Lists the available scopes for an OAuth2 security scheme. +message Scopes { + // Maps between a name of a scope to a short description of it (as the value + // of the property). + map scope = 1; +} diff --git a/_13_sponge-dtm-cache/http/third_party/tagger/tagger.proto b/_13_sponge-dtm-cache/http/third_party/tagger/tagger.proto new file mode 100644 index 0000000..1939fc9 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/tagger/tagger.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package tagger; + +import "google/protobuf/descriptor.proto"; + +option go_package = "github.com/srikrsna/protoc-gen-gotag/tagger;tagger"; + +// Tags are applied at the field level +extend google.protobuf.FieldOptions { + // Multiple Tags can be specified. + string tags = 847939; +} + +extend google.protobuf.OneofOptions { + // Multiple Tags can be specified. + string oneof_tags = 847939; +} diff --git a/_13_sponge-dtm-cache/http/third_party/validate/validate.proto b/_13_sponge-dtm-cache/http/third_party/validate/validate.proto new file mode 100644 index 0000000..705d382 --- /dev/null +++ b/_13_sponge-dtm-cache/http/third_party/validate/validate.proto @@ -0,0 +1,862 @@ +syntax = "proto2"; +package validate; + +option go_package = "github.com/envoyproxy/protoc-gen-validate/validate"; +option java_package = "io.envoyproxy.pgv.validate"; + +import "google/protobuf/descriptor.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; + +// Validation rules applied at the message level +extend google.protobuf.MessageOptions { + // Disabled nullifies any validation rules for this message, including any + // message fields associated with it that do support validation. + optional bool disabled = 1071; + // Ignore skips generation of validation methods for this message. + optional bool ignored = 1072; +} + +// Validation rules applied at the oneof level +extend google.protobuf.OneofOptions { + // Required ensures that exactly one the field options in a oneof is set; + // validation fails if no fields in the oneof are set. + optional bool required = 1071; +} + +// Validation rules applied at the field level +extend google.protobuf.FieldOptions { + // Rules specify the validations to be performed on this field. By default, + // no validation is performed against a field. + optional FieldRules rules = 1071; +} + +// FieldRules encapsulates the rules for each type of field. Depending on the +// field, the correct set should be used to ensure proper validations. +message FieldRules { + optional MessageRules message = 17; + oneof type { + // Scalar Field Types + FloatRules float = 1; + DoubleRules double = 2; + Int32Rules int32 = 3; + Int64Rules int64 = 4; + UInt32Rules uint32 = 5; + UInt64Rules uint64 = 6; + SInt32Rules sint32 = 7; + SInt64Rules sint64 = 8; + Fixed32Rules fixed32 = 9; + Fixed64Rules fixed64 = 10; + SFixed32Rules sfixed32 = 11; + SFixed64Rules sfixed64 = 12; + BoolRules bool = 13; + StringRules string = 14; + BytesRules bytes = 15; + + // Complex Field Types + EnumRules enum = 16; + RepeatedRules repeated = 18; + MapRules map = 19; + + // Well-Known Field Types + AnyRules any = 20; + DurationRules duration = 21; + TimestampRules timestamp = 22; + } +} + +// FloatRules describes the constraints applied to `float` values +message FloatRules { + // Const specifies that this field must be exactly the specified value + optional float const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional float lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional float lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional float gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional float gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated float in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated float not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// DoubleRules describes the constraints applied to `double` values +message DoubleRules { + // Const specifies that this field must be exactly the specified value + optional double const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional double lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional double lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional double gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional double gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated double in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated double not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// Int32Rules describes the constraints applied to `int32` values +message Int32Rules { + // Const specifies that this field must be exactly the specified value + optional int32 const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional int32 lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional int32 lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional int32 gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional int32 gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated int32 in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated int32 not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// Int64Rules describes the constraints applied to `int64` values +message Int64Rules { + // Const specifies that this field must be exactly the specified value + optional int64 const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional int64 lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional int64 lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional int64 gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional int64 gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated int64 in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated int64 not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// UInt32Rules describes the constraints applied to `uint32` values +message UInt32Rules { + // Const specifies that this field must be exactly the specified value + optional uint32 const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional uint32 lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional uint32 lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional uint32 gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional uint32 gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated uint32 in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated uint32 not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// UInt64Rules describes the constraints applied to `uint64` values +message UInt64Rules { + // Const specifies that this field must be exactly the specified value + optional uint64 const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional uint64 lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional uint64 lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional uint64 gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional uint64 gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated uint64 in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated uint64 not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// SInt32Rules describes the constraints applied to `sint32` values +message SInt32Rules { + // Const specifies that this field must be exactly the specified value + optional sint32 const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional sint32 lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional sint32 lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional sint32 gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional sint32 gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated sint32 in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated sint32 not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// SInt64Rules describes the constraints applied to `sint64` values +message SInt64Rules { + // Const specifies that this field must be exactly the specified value + optional sint64 const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional sint64 lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional sint64 lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional sint64 gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional sint64 gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated sint64 in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated sint64 not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// Fixed32Rules describes the constraints applied to `fixed32` values +message Fixed32Rules { + // Const specifies that this field must be exactly the specified value + optional fixed32 const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional fixed32 lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional fixed32 lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional fixed32 gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional fixed32 gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated fixed32 in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated fixed32 not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// Fixed64Rules describes the constraints applied to `fixed64` values +message Fixed64Rules { + // Const specifies that this field must be exactly the specified value + optional fixed64 const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional fixed64 lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional fixed64 lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional fixed64 gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional fixed64 gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated fixed64 in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated fixed64 not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// SFixed32Rules describes the constraints applied to `sfixed32` values +message SFixed32Rules { + // Const specifies that this field must be exactly the specified value + optional sfixed32 const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional sfixed32 lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional sfixed32 lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional sfixed32 gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional sfixed32 gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated sfixed32 in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated sfixed32 not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// SFixed64Rules describes the constraints applied to `sfixed64` values +message SFixed64Rules { + // Const specifies that this field must be exactly the specified value + optional sfixed64 const = 1; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional sfixed64 lt = 2; + + // Lte specifies that this field must be less than or equal to the + // specified value, inclusive + optional sfixed64 lte = 3; + + // Gt specifies that this field must be greater than the specified value, + // exclusive. If the value of Gt is larger than a specified Lt or Lte, the + // range is reversed. + optional sfixed64 gt = 4; + + // Gte specifies that this field must be greater than or equal to the + // specified value, inclusive. If the value of Gte is larger than a + // specified Lt or Lte, the range is reversed. + optional sfixed64 gte = 5; + + // In specifies that this field must be equal to one of the specified + // values + repeated sfixed64 in = 6; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated sfixed64 not_in = 7; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 8; +} + +// BoolRules describes the constraints applied to `bool` values +message BoolRules { + // Const specifies that this field must be exactly the specified value + optional bool const = 1; +} + +// StringRules describe the constraints applied to `string` values +message StringRules { + // Const specifies that this field must be exactly the specified value + optional string const = 1; + + // Len specifies that this field must be the specified number of + // characters (Unicode code points). Note that the number of + // characters may differ from the number of bytes in the string. + optional uint64 len = 19; + + // MinLen specifies that this field must be the specified number of + // characters (Unicode code points) at a minimum. Note that the number of + // characters may differ from the number of bytes in the string. + optional uint64 min_len = 2; + + // MaxLen specifies that this field must be the specified number of + // characters (Unicode code points) at a maximum. Note that the number of + // characters may differ from the number of bytes in the string. + optional uint64 max_len = 3; + + // LenBytes specifies that this field must be the specified number of bytes + optional uint64 len_bytes = 20; + + // MinBytes specifies that this field must be the specified number of bytes + // at a minimum + optional uint64 min_bytes = 4; + + // MaxBytes specifies that this field must be the specified number of bytes + // at a maximum + optional uint64 max_bytes = 5; + + // Pattern specifes that this field must match against the specified + // regular expression (RE2 syntax). The included expression should elide + // any delimiters. + optional string pattern = 6; + + // Prefix specifies that this field must have the specified substring at + // the beginning of the string. + optional string prefix = 7; + + // Suffix specifies that this field must have the specified substring at + // the end of the string. + optional string suffix = 8; + + // Contains specifies that this field must have the specified substring + // anywhere in the string. + optional string contains = 9; + + // NotContains specifies that this field cannot have the specified substring + // anywhere in the string. + optional string not_contains = 23; + + // In specifies that this field must be equal to one of the specified + // values + repeated string in = 10; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated string not_in = 11; + + // WellKnown rules provide advanced constraints against common string + // patterns + oneof well_known { + // Email specifies that the field must be a valid email address as + // defined by RFC 5322 + bool email = 12; + + // Hostname specifies that the field must be a valid hostname as + // defined by RFC 1034. This constraint does not support + // internationalized domain names (IDNs). + bool hostname = 13; + + // Ip specifies that the field must be a valid IP (v4 or v6) address. + // Valid IPv6 addresses should not include surrounding square brackets. + bool ip = 14; + + // Ipv4 specifies that the field must be a valid IPv4 address. + bool ipv4 = 15; + + // Ipv6 specifies that the field must be a valid IPv6 address. Valid + // IPv6 addresses should not include surrounding square brackets. + bool ipv6 = 16; + + // Uri specifies that the field must be a valid, absolute URI as defined + // by RFC 3986 + bool uri = 17; + + // UriRef specifies that the field must be a valid URI as defined by RFC + // 3986 and may be relative or absolute. + bool uri_ref = 18; + + // Address specifies that the field must be either a valid hostname as + // defined by RFC 1034 (which does not support internationalized domain + // names or IDNs), or it can be a valid IP (v4 or v6). + bool address = 21; + + // Uuid specifies that the field must be a valid UUID as defined by + // RFC 4122 + bool uuid = 22; + + // WellKnownRegex specifies a common well known pattern defined as a regex. + KnownRegex well_known_regex = 24; + } + + // This applies to regexes HTTP_HEADER_NAME and HTTP_HEADER_VALUE to enable + // strict header validation. + // By default, this is true, and HTTP header validations are RFC-compliant. + // Setting to false will enable a looser validations that only disallows + // \r\n\0 characters, which can be used to bypass header matching rules. + optional bool strict = 25 [default = true]; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 26; +} + +// WellKnownRegex contain some well-known patterns. +enum KnownRegex { + UNKNOWN = 0; + + // HTTP header name as defined by RFC 7230. + HTTP_HEADER_NAME = 1; + + // HTTP header value as defined by RFC 7230. + HTTP_HEADER_VALUE = 2; +} + +// BytesRules describe the constraints applied to `bytes` values +message BytesRules { + // Const specifies that this field must be exactly the specified value + optional bytes const = 1; + + // Len specifies that this field must be the specified number of bytes + optional uint64 len = 13; + + // MinLen specifies that this field must be the specified number of bytes + // at a minimum + optional uint64 min_len = 2; + + // MaxLen specifies that this field must be the specified number of bytes + // at a maximum + optional uint64 max_len = 3; + + // Pattern specifes that this field must match against the specified + // regular expression (RE2 syntax). The included expression should elide + // any delimiters. + optional string pattern = 4; + + // Prefix specifies that this field must have the specified bytes at the + // beginning of the string. + optional bytes prefix = 5; + + // Suffix specifies that this field must have the specified bytes at the + // end of the string. + optional bytes suffix = 6; + + // Contains specifies that this field must have the specified bytes + // anywhere in the string. + optional bytes contains = 7; + + // In specifies that this field must be equal to one of the specified + // values + repeated bytes in = 8; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated bytes not_in = 9; + + // WellKnown rules provide advanced constraints against common byte + // patterns + oneof well_known { + // Ip specifies that the field must be a valid IP (v4 or v6) address in + // byte format + bool ip = 10; + + // Ipv4 specifies that the field must be a valid IPv4 address in byte + // format + bool ipv4 = 11; + + // Ipv6 specifies that the field must be a valid IPv6 address in byte + // format + bool ipv6 = 12; + } + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 14; +} + +// EnumRules describe the constraints applied to enum values +message EnumRules { + // Const specifies that this field must be exactly the specified value + optional int32 const = 1; + + // DefinedOnly specifies that this field must be only one of the defined + // values for this enum, failing on any undefined value. + optional bool defined_only = 2; + + // In specifies that this field must be equal to one of the specified + // values + repeated int32 in = 3; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated int32 not_in = 4; +} + +// MessageRules describe the constraints applied to embedded message values. +// For message-type fields, validation is performed recursively. +message MessageRules { + // Skip specifies that the validation rules of this field should not be + // evaluated + optional bool skip = 1; + + // Required specifies that this field must be set + optional bool required = 2; +} + +// RepeatedRules describe the constraints applied to `repeated` values +message RepeatedRules { + // MinItems specifies that this field must have the specified number of + // items at a minimum + optional uint64 min_items = 1; + + // MaxItems specifies that this field must have the specified number of + // items at a maximum + optional uint64 max_items = 2; + + // Unique specifies that all elements in this field must be unique. This + // contraint is only applicable to scalar and enum types (messages are not + // supported). + optional bool unique = 3; + + // Items specifies the contraints to be applied to each item in the field. + // Repeated message fields will still execute validation against each item + // unless skip is specified here. + optional FieldRules items = 4; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 5; +} + +// MapRules describe the constraints applied to `map` values +message MapRules { + // MinPairs specifies that this field must have the specified number of + // KVs at a minimum + optional uint64 min_pairs = 1; + + // MaxPairs specifies that this field must have the specified number of + // KVs at a maximum + optional uint64 max_pairs = 2; + + // NoSparse specifies values in this field cannot be unset. This only + // applies to map's with message value types. + optional bool no_sparse = 3; + + // Keys specifies the constraints to be applied to each key in the field. + optional FieldRules keys = 4; + + // Values specifies the constraints to be applied to the value of each key + // in the field. Message values will still have their validations evaluated + // unless skip is specified here. + optional FieldRules values = 5; + + // IgnoreEmpty specifies that the validation rules of this field should be + // evaluated only if the field is not empty + optional bool ignore_empty = 6; +} + +// AnyRules describe constraints applied exclusively to the +// `google.protobuf.Any` well-known type +message AnyRules { + // Required specifies that this field must be set + optional bool required = 1; + + // In specifies that this field's `type_url` must be equal to one of the + // specified values. + repeated string in = 2; + + // NotIn specifies that this field's `type_url` must not be equal to any of + // the specified values. + repeated string not_in = 3; +} + +// DurationRules describe the constraints applied exclusively to the +// `google.protobuf.Duration` well-known type +message DurationRules { + // Required specifies that this field must be set + optional bool required = 1; + + // Const specifies that this field must be exactly the specified value + optional google.protobuf.Duration const = 2; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional google.protobuf.Duration lt = 3; + + // Lt specifies that this field must be less than the specified value, + // inclusive + optional google.protobuf.Duration lte = 4; + + // Gt specifies that this field must be greater than the specified value, + // exclusive + optional google.protobuf.Duration gt = 5; + + // Gte specifies that this field must be greater than the specified value, + // inclusive + optional google.protobuf.Duration gte = 6; + + // In specifies that this field must be equal to one of the specified + // values + repeated google.protobuf.Duration in = 7; + + // NotIn specifies that this field cannot be equal to one of the specified + // values + repeated google.protobuf.Duration not_in = 8; +} + +// TimestampRules describe the constraints applied exclusively to the +// `google.protobuf.Timestamp` well-known type +message TimestampRules { + // Required specifies that this field must be set + optional bool required = 1; + + // Const specifies that this field must be exactly the specified value + optional google.protobuf.Timestamp const = 2; + + // Lt specifies that this field must be less than the specified value, + // exclusive + optional google.protobuf.Timestamp lt = 3; + + // Lte specifies that this field must be less than the specified value, + // inclusive + optional google.protobuf.Timestamp lte = 4; + + // Gt specifies that this field must be greater than the specified value, + // exclusive + optional google.protobuf.Timestamp gt = 5; + + // Gte specifies that this field must be greater than the specified value, + // inclusive + optional google.protobuf.Timestamp gte = 6; + + // LtNow specifies that this must be less than the current time. LtNow + // can only be used with the Within rule. + optional bool lt_now = 7; + + // GtNow specifies that this must be greater than the current time. GtNow + // can only be used with the Within rule. + optional bool gt_now = 8; + + // Within specifies that this field must be within this duration of the + // current time. This constraint can be used alone or with the LtNow and + // GtNow rules. + optional google.protobuf.Duration within = 9; +} diff --git a/a_micro-grpc-http-protobuf/api/user/v1/user.pb.go b/a_micro-grpc-http-protobuf/api/user/v1/user.pb.go deleted file mode 100644 index c6c2ea8..0000000 --- a/a_micro-grpc-http-protobuf/api/user/v1/user.pb.go +++ /dev/null @@ -1,670 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.28.0 -// protoc v4.25.2 -// source: api/user/v1/user.proto - -package v1 - -import ( - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type RegisterRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Email string `protobuf:"bytes,1,opt,name=email,proto3" json:"email"` - Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password"` -} - -func (x *RegisterRequest) Reset() { - *x = RegisterRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_user_v1_user_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RegisterRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RegisterRequest) ProtoMessage() {} - -func (x *RegisterRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_user_v1_user_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RegisterRequest.ProtoReflect.Descriptor instead. -func (*RegisterRequest) Descriptor() ([]byte, []int) { - return file_api_user_v1_user_proto_rawDescGZIP(), []int{0} -} - -func (x *RegisterRequest) GetEmail() string { - if x != nil { - return x.Email - } - return "" -} - -func (x *RegisterRequest) GetPassword() string { - if x != nil { - return x.Password - } - return "" -} - -type RegisterReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` -} - -func (x *RegisterReply) Reset() { - *x = RegisterReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_user_v1_user_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RegisterReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RegisterReply) ProtoMessage() {} - -func (x *RegisterReply) ProtoReflect() protoreflect.Message { - mi := &file_api_user_v1_user_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RegisterReply.ProtoReflect.Descriptor instead. -func (*RegisterReply) Descriptor() ([]byte, []int) { - return file_api_user_v1_user_proto_rawDescGZIP(), []int{1} -} - -func (x *RegisterReply) GetId() uint64 { - if x != nil { - return x.Id - } - return 0 -} - -type LoginRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Email string `protobuf:"bytes,1,opt,name=email,proto3" json:"email"` - Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password"` -} - -func (x *LoginRequest) Reset() { - *x = LoginRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_user_v1_user_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LoginRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LoginRequest) ProtoMessage() {} - -func (x *LoginRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_user_v1_user_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LoginRequest.ProtoReflect.Descriptor instead. -func (*LoginRequest) Descriptor() ([]byte, []int) { - return file_api_user_v1_user_proto_rawDescGZIP(), []int{2} -} - -func (x *LoginRequest) GetEmail() string { - if x != nil { - return x.Email - } - return "" -} - -func (x *LoginRequest) GetPassword() string { - if x != nil { - return x.Password - } - return "" -} - -type LoginReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` - Token string `protobuf:"bytes,2,opt,name=token,proto3" json:"token"` -} - -func (x *LoginReply) Reset() { - *x = LoginReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_user_v1_user_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LoginReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LoginReply) ProtoMessage() {} - -func (x *LoginReply) ProtoReflect() protoreflect.Message { - mi := &file_api_user_v1_user_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LoginReply.ProtoReflect.Descriptor instead. -func (*LoginReply) Descriptor() ([]byte, []int) { - return file_api_user_v1_user_proto_rawDescGZIP(), []int{3} -} - -func (x *LoginReply) GetId() uint64 { - if x != nil { - return x.Id - } - return 0 -} - -func (x *LoginReply) GetToken() string { - if x != nil { - return x.Token - } - return "" -} - -type LogoutRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` - Token string `protobuf:"bytes,2,opt,name=token,proto3" json:"token"` -} - -func (x *LogoutRequest) Reset() { - *x = LogoutRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_user_v1_user_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LogoutRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LogoutRequest) ProtoMessage() {} - -func (x *LogoutRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_user_v1_user_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LogoutRequest.ProtoReflect.Descriptor instead. -func (*LogoutRequest) Descriptor() ([]byte, []int) { - return file_api_user_v1_user_proto_rawDescGZIP(), []int{4} -} - -func (x *LogoutRequest) GetId() uint64 { - if x != nil { - return x.Id - } - return 0 -} - -func (x *LogoutRequest) GetToken() string { - if x != nil { - return x.Token - } - return "" -} - -type LogoutReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *LogoutReply) Reset() { - *x = LogoutReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_user_v1_user_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LogoutReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LogoutReply) ProtoMessage() {} - -func (x *LogoutReply) ProtoReflect() protoreflect.Message { - mi := &file_api_user_v1_user_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LogoutReply.ProtoReflect.Descriptor instead. -func (*LogoutReply) Descriptor() ([]byte, []int) { - return file_api_user_v1_user_proto_rawDescGZIP(), []int{5} -} - -type ChangePasswordRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id"` - Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password"` -} - -func (x *ChangePasswordRequest) Reset() { - *x = ChangePasswordRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_user_v1_user_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ChangePasswordRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ChangePasswordRequest) ProtoMessage() {} - -func (x *ChangePasswordRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_user_v1_user_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ChangePasswordRequest.ProtoReflect.Descriptor instead. -func (*ChangePasswordRequest) Descriptor() ([]byte, []int) { - return file_api_user_v1_user_proto_rawDescGZIP(), []int{6} -} - -func (x *ChangePasswordRequest) GetId() uint64 { - if x != nil { - return x.Id - } - return 0 -} - -func (x *ChangePasswordRequest) GetPassword() string { - if x != nil { - return x.Password - } - return "" -} - -type ChangeRegisterReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *ChangeRegisterReply) Reset() { - *x = ChangeRegisterReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_user_v1_user_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ChangeRegisterReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ChangeRegisterReply) ProtoMessage() {} - -func (x *ChangeRegisterReply) ProtoReflect() protoreflect.Message { - mi := &file_api_user_v1_user_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ChangeRegisterReply.ProtoReflect.Descriptor instead. -func (*ChangeRegisterReply) Descriptor() ([]byte, []int) { - return file_api_user_v1_user_proto_rawDescGZIP(), []int{7} -} - -var File_api_user_v1_user_proto protoreflect.FileDescriptor - -var file_api_user_v1_user_proto_rawDesc = []byte{ - 0x0a, 0x16, 0x61, 0x70, 0x69, 0x2f, 0x75, 0x73, 0x65, 0x72, 0x2f, 0x76, 0x31, 0x2f, 0x75, 0x73, - 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x61, 0x70, 0x69, 0x2e, 0x75, 0x73, - 0x65, 0x72, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, - 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, - 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, - 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x55, 0x0a, 0x0f, - 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x1d, 0x0a, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x07, - 0xfa, 0x42, 0x04, 0x72, 0x02, 0x60, 0x01, 0x52, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x12, 0x23, - 0x0a, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x42, 0x07, 0xfa, 0x42, 0x04, 0x72, 0x02, 0x10, 0x06, 0x52, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, - 0x6f, 0x72, 0x64, 0x22, 0x1f, 0x0a, 0x0d, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, - 0x65, 0x70, 0x6c, 0x79, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, - 0x52, 0x02, 0x69, 0x64, 0x22, 0x52, 0x0a, 0x0c, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x72, 0x02, 0x60, 0x01, 0x52, 0x05, 0x65, 0x6d, - 0x61, 0x69, 0x6c, 0x12, 0x23, 0x0a, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x72, 0x02, 0x10, 0x06, 0x52, 0x08, - 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x22, 0x32, 0x0a, 0x0a, 0x4c, 0x6f, 0x67, 0x69, - 0x6e, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x04, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x47, 0x0a, 0x0d, - 0x4c, 0x6f, 0x67, 0x6f, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, - 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x32, 0x02, - 0x28, 0x01, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1d, 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x72, 0x02, 0x10, 0x14, 0x52, 0x05, - 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x0d, 0x0a, 0x0b, 0x4c, 0x6f, 0x67, 0x6f, 0x75, 0x74, 0x52, - 0x65, 0x70, 0x6c, 0x79, 0x22, 0x55, 0x0a, 0x15, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x61, - 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, - 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x32, 0x02, - 0x28, 0x01, 0x52, 0x02, 0x69, 0x64, 0x12, 0x23, 0x0a, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, - 0x72, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x72, 0x02, 0x10, - 0x06, 0x52, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x22, 0x15, 0x0a, 0x13, 0x43, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x70, - 0x6c, 0x79, 0x32, 0xa3, 0x04, 0x0a, 0x04, 0x75, 0x73, 0x65, 0x72, 0x12, 0x79, 0x0a, 0x08, 0x52, - 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x12, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x75, 0x73, - 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x75, 0x73, 0x65, 0x72, - 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, - 0x79, 0x22, 0x33, 0x92, 0x41, 0x10, 0x12, 0x06, 0xe6, 0xb3, 0xa8, 0xe5, 0x86, 0x8c, 0x1a, 0x06, - 0xe6, 0xb3, 0xa8, 0xe5, 0x86, 0x8c, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1a, 0x22, 0x15, 0x2f, 0x61, - 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x61, 0x75, 0x74, 0x68, 0x2f, 0x72, 0x65, 0x67, 0x69, 0x73, - 0x74, 0x65, 0x72, 0x3a, 0x01, 0x2a, 0x12, 0x6d, 0x0a, 0x05, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x12, - 0x19, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, - 0x67, 0x69, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x75, 0x73, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x52, 0x65, - 0x70, 0x6c, 0x79, 0x22, 0x30, 0x92, 0x41, 0x10, 0x12, 0x06, 0xe7, 0x99, 0xbb, 0xe5, 0xbd, 0x95, - 0x1a, 0x06, 0xe7, 0x99, 0xbb, 0xe5, 0xbd, 0x95, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x17, 0x22, 0x12, - 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x61, 0x75, 0x74, 0x68, 0x2f, 0x6c, 0x6f, 0x67, - 0x69, 0x6e, 0x3a, 0x01, 0x2a, 0x12, 0x83, 0x01, 0x0a, 0x06, 0x4c, 0x6f, 0x67, 0x6f, 0x75, 0x74, - 0x12, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, - 0x6f, 0x67, 0x6f, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x6f, 0x75, - 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x43, 0x92, 0x41, 0x22, 0x12, 0x06, 0xe7, 0x99, 0xbb, - 0xe5, 0x87, 0xba, 0x1a, 0x06, 0xe7, 0x99, 0xbb, 0xe5, 0x87, 0xba, 0x62, 0x10, 0x0a, 0x0e, 0x0a, - 0x0a, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68, 0x12, 0x00, 0x82, 0xd3, 0xe4, - 0x93, 0x02, 0x18, 0x22, 0x13, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x61, 0x75, 0x74, - 0x68, 0x2f, 0x6c, 0x6f, 0x67, 0x6f, 0x75, 0x74, 0x3a, 0x01, 0x2a, 0x12, 0xaa, 0x01, 0x0a, 0x0e, - 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x12, 0x22, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x68, 0x61, - 0x6e, 0x67, 0x65, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x2e, 0x76, 0x31, - 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, - 0x65, 0x70, 0x6c, 0x79, 0x22, 0x52, 0x92, 0x41, 0x2e, 0x12, 0x0c, 0xe4, 0xbf, 0xae, 0xe6, 0x94, - 0xb9, 0xe5, 0xaf, 0x86, 0xe7, 0xa0, 0x81, 0x1a, 0x0c, 0xe4, 0xbf, 0xae, 0xe6, 0x94, 0xb9, 0xe5, - 0xaf, 0x86, 0xe7, 0xa0, 0x81, 0x62, 0x10, 0x0a, 0x0e, 0x0a, 0x0a, 0x42, 0x65, 0x61, 0x72, 0x65, - 0x72, 0x41, 0x75, 0x74, 0x68, 0x12, 0x00, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1b, 0x22, 0x16, 0x2f, - 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x61, 0x73, - 0x73, 0x77, 0x6f, 0x72, 0x64, 0x3a, 0x01, 0x2a, 0x42, 0xb6, 0x01, 0x5a, 0x13, 0x75, 0x73, 0x65, - 0x72, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x75, 0x73, 0x65, 0x72, 0x2f, 0x76, 0x31, 0x3b, 0x76, 0x31, - 0x92, 0x41, 0x9d, 0x01, 0x12, 0x14, 0x0a, 0x0d, 0x75, 0x73, 0x65, 0x72, 0x20, 0x61, 0x70, 0x69, - 0x20, 0x64, 0x6f, 0x63, 0x73, 0x32, 0x03, 0x32, 0x2e, 0x30, 0x1a, 0x0e, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x68, 0x6f, 0x73, 0x74, 0x3a, 0x38, 0x30, 0x38, 0x30, 0x2a, 0x02, 0x01, 0x02, 0x32, 0x10, - 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, - 0x3a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, - 0x6f, 0x6e, 0x5a, 0x4d, 0x0a, 0x4b, 0x0a, 0x0a, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x41, 0x75, - 0x74, 0x68, 0x12, 0x3d, 0x08, 0x02, 0x12, 0x28, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x61, 0x20, - 0x22, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x72, 0x2d, 0x6a, 0x77, 0x74, - 0x2d, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x20, 0x74, 0x6f, 0x20, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x1a, 0x0d, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x02, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_api_user_v1_user_proto_rawDescOnce sync.Once - file_api_user_v1_user_proto_rawDescData = file_api_user_v1_user_proto_rawDesc -) - -func file_api_user_v1_user_proto_rawDescGZIP() []byte { - file_api_user_v1_user_proto_rawDescOnce.Do(func() { - file_api_user_v1_user_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_user_v1_user_proto_rawDescData) - }) - return file_api_user_v1_user_proto_rawDescData -} - -var file_api_user_v1_user_proto_msgTypes = make([]protoimpl.MessageInfo, 8) -var file_api_user_v1_user_proto_goTypes = []interface{}{ - (*RegisterRequest)(nil), // 0: api.user.v1.RegisterRequest - (*RegisterReply)(nil), // 1: api.user.v1.RegisterReply - (*LoginRequest)(nil), // 2: api.user.v1.LoginRequest - (*LoginReply)(nil), // 3: api.user.v1.LoginReply - (*LogoutRequest)(nil), // 4: api.user.v1.LogoutRequest - (*LogoutReply)(nil), // 5: api.user.v1.LogoutReply - (*ChangePasswordRequest)(nil), // 6: api.user.v1.ChangePasswordRequest - (*ChangeRegisterReply)(nil), // 7: api.user.v1.ChangeRegisterReply -} -var file_api_user_v1_user_proto_depIdxs = []int32{ - 0, // 0: api.user.v1.user.Register:input_type -> api.user.v1.RegisterRequest - 2, // 1: api.user.v1.user.Login:input_type -> api.user.v1.LoginRequest - 4, // 2: api.user.v1.user.Logout:input_type -> api.user.v1.LogoutRequest - 6, // 3: api.user.v1.user.ChangePassword:input_type -> api.user.v1.ChangePasswordRequest - 1, // 4: api.user.v1.user.Register:output_type -> api.user.v1.RegisterReply - 3, // 5: api.user.v1.user.Login:output_type -> api.user.v1.LoginReply - 5, // 6: api.user.v1.user.Logout:output_type -> api.user.v1.LogoutReply - 7, // 7: api.user.v1.user.ChangePassword:output_type -> api.user.v1.ChangeRegisterReply - 4, // [4:8] is the sub-list for method output_type - 0, // [0:4] is the sub-list for method input_type - 0, // [0:0] is the sub-list for extension type_name - 0, // [0:0] is the sub-list for extension extendee - 0, // [0:0] is the sub-list for field type_name -} - -func init() { file_api_user_v1_user_proto_init() } -func file_api_user_v1_user_proto_init() { - if File_api_user_v1_user_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_api_user_v1_user_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RegisterRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_user_v1_user_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RegisterReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_user_v1_user_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LoginRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_user_v1_user_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LoginReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_user_v1_user_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LogoutRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_user_v1_user_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LogoutReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_user_v1_user_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ChangePasswordRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_user_v1_user_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ChangeRegisterReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_api_user_v1_user_proto_rawDesc, - NumEnums: 0, - NumMessages: 8, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_api_user_v1_user_proto_goTypes, - DependencyIndexes: file_api_user_v1_user_proto_depIdxs, - MessageInfos: file_api_user_v1_user_proto_msgTypes, - }.Build() - File_api_user_v1_user_proto = out.File - file_api_user_v1_user_proto_rawDesc = nil - file_api_user_v1_user_proto_goTypes = nil - file_api_user_v1_user_proto_depIdxs = nil -} diff --git a/a_micro-grpc-http-protobuf/api/user/v1/user.pb.validate.go b/a_micro-grpc-http-protobuf/api/user/v1/user.pb.validate.go deleted file mode 100644 index 3ad8a62..0000000 --- a/a_micro-grpc-http-protobuf/api/user/v1/user.pb.validate.go +++ /dev/null @@ -1,1033 +0,0 @@ -// Code generated by protoc-gen-validate. DO NOT EDIT. -// source: api/user/v1/user.proto - -package v1 - -import ( - "bytes" - "errors" - "fmt" - "net" - "net/mail" - "net/url" - "regexp" - "sort" - "strings" - "time" - "unicode/utf8" - - "google.golang.org/protobuf/types/known/anypb" -) - -// ensure the imports are used -var ( - _ = bytes.MinRead - _ = errors.New("") - _ = fmt.Print - _ = utf8.UTFMax - _ = (*regexp.Regexp)(nil) - _ = (*strings.Reader)(nil) - _ = net.IPv4len - _ = time.Duration(0) - _ = (*url.URL)(nil) - _ = (*mail.Address)(nil) - _ = anypb.Any{} - _ = sort.Sort -) - -// Validate checks the field values on RegisterRequest with the rules defined -// in the proto definition for this message. If any rules are violated, the -// first error encountered is returned, or nil if there are no violations. -func (m *RegisterRequest) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on RegisterRequest with the rules -// defined in the proto definition for this message. If any rules are -// violated, the result is a list of violation errors wrapped in -// RegisterRequestMultiError, or nil if none found. -func (m *RegisterRequest) ValidateAll() error { - return m.validate(true) -} - -func (m *RegisterRequest) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if err := m._validateEmail(m.GetEmail()); err != nil { - err = RegisterRequestValidationError{ - field: "Email", - reason: "value must be a valid email address", - cause: err, - } - if !all { - return err - } - errors = append(errors, err) - } - - if utf8.RuneCountInString(m.GetPassword()) < 6 { - err := RegisterRequestValidationError{ - field: "Password", - reason: "value length must be at least 6 runes", - } - if !all { - return err - } - errors = append(errors, err) - } - - if len(errors) > 0 { - return RegisterRequestMultiError(errors) - } - - return nil -} - -func (m *RegisterRequest) _validateHostname(host string) error { - s := strings.ToLower(strings.TrimSuffix(host, ".")) - - if len(host) > 253 { - return errors.New("hostname cannot exceed 253 characters") - } - - for _, part := range strings.Split(s, ".") { - if l := len(part); l == 0 || l > 63 { - return errors.New("hostname part must be non-empty and cannot exceed 63 characters") - } - - if part[0] == '-' { - return errors.New("hostname parts cannot begin with hyphens") - } - - if part[len(part)-1] == '-' { - return errors.New("hostname parts cannot end with hyphens") - } - - for _, r := range part { - if (r < 'a' || r > 'z') && (r < '0' || r > '9') && r != '-' { - return fmt.Errorf("hostname parts can only contain alphanumeric characters or hyphens, got %q", string(r)) - } - } - } - - return nil -} - -func (m *RegisterRequest) _validateEmail(addr string) error { - a, err := mail.ParseAddress(addr) - if err != nil { - return err - } - addr = a.Address - - if len(addr) > 254 { - return errors.New("email addresses cannot exceed 254 characters") - } - - parts := strings.SplitN(addr, "@", 2) - - if len(parts[0]) > 64 { - return errors.New("email address local phrase cannot exceed 64 characters") - } - - return m._validateHostname(parts[1]) -} - -// RegisterRequestMultiError is an error wrapping multiple validation errors -// returned by RegisterRequest.ValidateAll() if the designated constraints -// aren't met. -type RegisterRequestMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m RegisterRequestMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m RegisterRequestMultiError) AllErrors() []error { return m } - -// RegisterRequestValidationError is the validation error returned by -// RegisterRequest.Validate if the designated constraints aren't met. -type RegisterRequestValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e RegisterRequestValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e RegisterRequestValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e RegisterRequestValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e RegisterRequestValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e RegisterRequestValidationError) ErrorName() string { return "RegisterRequestValidationError" } - -// Error satisfies the builtin error interface -func (e RegisterRequestValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sRegisterRequest.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = RegisterRequestValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = RegisterRequestValidationError{} - -// Validate checks the field values on RegisterReply with the rules defined in -// the proto definition for this message. If any rules are violated, the first -// error encountered is returned, or nil if there are no violations. -func (m *RegisterReply) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on RegisterReply with the rules defined -// in the proto definition for this message. If any rules are violated, the -// result is a list of violation errors wrapped in RegisterReplyMultiError, or -// nil if none found. -func (m *RegisterReply) ValidateAll() error { - return m.validate(true) -} - -func (m *RegisterReply) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - // no validation rules for Id - - if len(errors) > 0 { - return RegisterReplyMultiError(errors) - } - - return nil -} - -// RegisterReplyMultiError is an error wrapping multiple validation errors -// returned by RegisterReply.ValidateAll() if the designated constraints -// aren't met. -type RegisterReplyMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m RegisterReplyMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m RegisterReplyMultiError) AllErrors() []error { return m } - -// RegisterReplyValidationError is the validation error returned by -// RegisterReply.Validate if the designated constraints aren't met. -type RegisterReplyValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e RegisterReplyValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e RegisterReplyValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e RegisterReplyValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e RegisterReplyValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e RegisterReplyValidationError) ErrorName() string { return "RegisterReplyValidationError" } - -// Error satisfies the builtin error interface -func (e RegisterReplyValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sRegisterReply.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = RegisterReplyValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = RegisterReplyValidationError{} - -// Validate checks the field values on LoginRequest with the rules defined in -// the proto definition for this message. If any rules are violated, the first -// error encountered is returned, or nil if there are no violations. -func (m *LoginRequest) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on LoginRequest with the rules defined -// in the proto definition for this message. If any rules are violated, the -// result is a list of violation errors wrapped in LoginRequestMultiError, or -// nil if none found. -func (m *LoginRequest) ValidateAll() error { - return m.validate(true) -} - -func (m *LoginRequest) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if err := m._validateEmail(m.GetEmail()); err != nil { - err = LoginRequestValidationError{ - field: "Email", - reason: "value must be a valid email address", - cause: err, - } - if !all { - return err - } - errors = append(errors, err) - } - - if utf8.RuneCountInString(m.GetPassword()) < 6 { - err := LoginRequestValidationError{ - field: "Password", - reason: "value length must be at least 6 runes", - } - if !all { - return err - } - errors = append(errors, err) - } - - if len(errors) > 0 { - return LoginRequestMultiError(errors) - } - - return nil -} - -func (m *LoginRequest) _validateHostname(host string) error { - s := strings.ToLower(strings.TrimSuffix(host, ".")) - - if len(host) > 253 { - return errors.New("hostname cannot exceed 253 characters") - } - - for _, part := range strings.Split(s, ".") { - if l := len(part); l == 0 || l > 63 { - return errors.New("hostname part must be non-empty and cannot exceed 63 characters") - } - - if part[0] == '-' { - return errors.New("hostname parts cannot begin with hyphens") - } - - if part[len(part)-1] == '-' { - return errors.New("hostname parts cannot end with hyphens") - } - - for _, r := range part { - if (r < 'a' || r > 'z') && (r < '0' || r > '9') && r != '-' { - return fmt.Errorf("hostname parts can only contain alphanumeric characters or hyphens, got %q", string(r)) - } - } - } - - return nil -} - -func (m *LoginRequest) _validateEmail(addr string) error { - a, err := mail.ParseAddress(addr) - if err != nil { - return err - } - addr = a.Address - - if len(addr) > 254 { - return errors.New("email addresses cannot exceed 254 characters") - } - - parts := strings.SplitN(addr, "@", 2) - - if len(parts[0]) > 64 { - return errors.New("email address local phrase cannot exceed 64 characters") - } - - return m._validateHostname(parts[1]) -} - -// LoginRequestMultiError is an error wrapping multiple validation errors -// returned by LoginRequest.ValidateAll() if the designated constraints aren't met. -type LoginRequestMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m LoginRequestMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m LoginRequestMultiError) AllErrors() []error { return m } - -// LoginRequestValidationError is the validation error returned by -// LoginRequest.Validate if the designated constraints aren't met. -type LoginRequestValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e LoginRequestValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e LoginRequestValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e LoginRequestValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e LoginRequestValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e LoginRequestValidationError) ErrorName() string { return "LoginRequestValidationError" } - -// Error satisfies the builtin error interface -func (e LoginRequestValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sLoginRequest.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = LoginRequestValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = LoginRequestValidationError{} - -// Validate checks the field values on LoginReply with the rules defined in the -// proto definition for this message. If any rules are violated, the first -// error encountered is returned, or nil if there are no violations. -func (m *LoginReply) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on LoginReply with the rules defined in -// the proto definition for this message. If any rules are violated, the -// result is a list of violation errors wrapped in LoginReplyMultiError, or -// nil if none found. -func (m *LoginReply) ValidateAll() error { - return m.validate(true) -} - -func (m *LoginReply) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - // no validation rules for Id - - // no validation rules for Token - - if len(errors) > 0 { - return LoginReplyMultiError(errors) - } - - return nil -} - -// LoginReplyMultiError is an error wrapping multiple validation errors -// returned by LoginReply.ValidateAll() if the designated constraints aren't met. -type LoginReplyMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m LoginReplyMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m LoginReplyMultiError) AllErrors() []error { return m } - -// LoginReplyValidationError is the validation error returned by -// LoginReply.Validate if the designated constraints aren't met. -type LoginReplyValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e LoginReplyValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e LoginReplyValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e LoginReplyValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e LoginReplyValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e LoginReplyValidationError) ErrorName() string { return "LoginReplyValidationError" } - -// Error satisfies the builtin error interface -func (e LoginReplyValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sLoginReply.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = LoginReplyValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = LoginReplyValidationError{} - -// Validate checks the field values on LogoutRequest with the rules defined in -// the proto definition for this message. If any rules are violated, the first -// error encountered is returned, or nil if there are no violations. -func (m *LogoutRequest) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on LogoutRequest with the rules defined -// in the proto definition for this message. If any rules are violated, the -// result is a list of violation errors wrapped in LogoutRequestMultiError, or -// nil if none found. -func (m *LogoutRequest) ValidateAll() error { - return m.validate(true) -} - -func (m *LogoutRequest) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if m.GetId() < 1 { - err := LogoutRequestValidationError{ - field: "Id", - reason: "value must be greater than or equal to 1", - } - if !all { - return err - } - errors = append(errors, err) - } - - if utf8.RuneCountInString(m.GetToken()) < 20 { - err := LogoutRequestValidationError{ - field: "Token", - reason: "value length must be at least 20 runes", - } - if !all { - return err - } - errors = append(errors, err) - } - - if len(errors) > 0 { - return LogoutRequestMultiError(errors) - } - - return nil -} - -// LogoutRequestMultiError is an error wrapping multiple validation errors -// returned by LogoutRequest.ValidateAll() if the designated constraints -// aren't met. -type LogoutRequestMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m LogoutRequestMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m LogoutRequestMultiError) AllErrors() []error { return m } - -// LogoutRequestValidationError is the validation error returned by -// LogoutRequest.Validate if the designated constraints aren't met. -type LogoutRequestValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e LogoutRequestValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e LogoutRequestValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e LogoutRequestValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e LogoutRequestValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e LogoutRequestValidationError) ErrorName() string { return "LogoutRequestValidationError" } - -// Error satisfies the builtin error interface -func (e LogoutRequestValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sLogoutRequest.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = LogoutRequestValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = LogoutRequestValidationError{} - -// Validate checks the field values on LogoutReply with the rules defined in -// the proto definition for this message. If any rules are violated, the first -// error encountered is returned, or nil if there are no violations. -func (m *LogoutReply) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on LogoutReply with the rules defined in -// the proto definition for this message. If any rules are violated, the -// result is a list of violation errors wrapped in LogoutReplyMultiError, or -// nil if none found. -func (m *LogoutReply) ValidateAll() error { - return m.validate(true) -} - -func (m *LogoutReply) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if len(errors) > 0 { - return LogoutReplyMultiError(errors) - } - - return nil -} - -// LogoutReplyMultiError is an error wrapping multiple validation errors -// returned by LogoutReply.ValidateAll() if the designated constraints aren't met. -type LogoutReplyMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m LogoutReplyMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m LogoutReplyMultiError) AllErrors() []error { return m } - -// LogoutReplyValidationError is the validation error returned by -// LogoutReply.Validate if the designated constraints aren't met. -type LogoutReplyValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e LogoutReplyValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e LogoutReplyValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e LogoutReplyValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e LogoutReplyValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e LogoutReplyValidationError) ErrorName() string { return "LogoutReplyValidationError" } - -// Error satisfies the builtin error interface -func (e LogoutReplyValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sLogoutReply.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = LogoutReplyValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = LogoutReplyValidationError{} - -// Validate checks the field values on ChangePasswordRequest with the rules -// defined in the proto definition for this message. If any rules are -// violated, the first error encountered is returned, or nil if there are no violations. -func (m *ChangePasswordRequest) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on ChangePasswordRequest with the rules -// defined in the proto definition for this message. If any rules are -// violated, the result is a list of violation errors wrapped in -// ChangePasswordRequestMultiError, or nil if none found. -func (m *ChangePasswordRequest) ValidateAll() error { - return m.validate(true) -} - -func (m *ChangePasswordRequest) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if m.GetId() < 1 { - err := ChangePasswordRequestValidationError{ - field: "Id", - reason: "value must be greater than or equal to 1", - } - if !all { - return err - } - errors = append(errors, err) - } - - if utf8.RuneCountInString(m.GetPassword()) < 6 { - err := ChangePasswordRequestValidationError{ - field: "Password", - reason: "value length must be at least 6 runes", - } - if !all { - return err - } - errors = append(errors, err) - } - - if len(errors) > 0 { - return ChangePasswordRequestMultiError(errors) - } - - return nil -} - -// ChangePasswordRequestMultiError is an error wrapping multiple validation -// errors returned by ChangePasswordRequest.ValidateAll() if the designated -// constraints aren't met. -type ChangePasswordRequestMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m ChangePasswordRequestMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m ChangePasswordRequestMultiError) AllErrors() []error { return m } - -// ChangePasswordRequestValidationError is the validation error returned by -// ChangePasswordRequest.Validate if the designated constraints aren't met. -type ChangePasswordRequestValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e ChangePasswordRequestValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e ChangePasswordRequestValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e ChangePasswordRequestValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e ChangePasswordRequestValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e ChangePasswordRequestValidationError) ErrorName() string { - return "ChangePasswordRequestValidationError" -} - -// Error satisfies the builtin error interface -func (e ChangePasswordRequestValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sChangePasswordRequest.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = ChangePasswordRequestValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = ChangePasswordRequestValidationError{} - -// Validate checks the field values on ChangeRegisterReply with the rules -// defined in the proto definition for this message. If any rules are -// violated, the first error encountered is returned, or nil if there are no violations. -func (m *ChangeRegisterReply) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on ChangeRegisterReply with the rules -// defined in the proto definition for this message. If any rules are -// violated, the result is a list of violation errors wrapped in -// ChangeRegisterReplyMultiError, or nil if none found. -func (m *ChangeRegisterReply) ValidateAll() error { - return m.validate(true) -} - -func (m *ChangeRegisterReply) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if len(errors) > 0 { - return ChangeRegisterReplyMultiError(errors) - } - - return nil -} - -// ChangeRegisterReplyMultiError is an error wrapping multiple validation -// errors returned by ChangeRegisterReply.ValidateAll() if the designated -// constraints aren't met. -type ChangeRegisterReplyMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m ChangeRegisterReplyMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m ChangeRegisterReplyMultiError) AllErrors() []error { return m } - -// ChangeRegisterReplyValidationError is the validation error returned by -// ChangeRegisterReply.Validate if the designated constraints aren't met. -type ChangeRegisterReplyValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e ChangeRegisterReplyValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e ChangeRegisterReplyValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e ChangeRegisterReplyValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e ChangeRegisterReplyValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e ChangeRegisterReplyValidationError) ErrorName() string { - return "ChangeRegisterReplyValidationError" -} - -// Error satisfies the builtin error interface -func (e ChangeRegisterReplyValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sChangeRegisterReply.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = ChangeRegisterReplyValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = ChangeRegisterReplyValidationError{} diff --git a/a_micro-grpc-http-protobuf/api/user/v1/user.proto b/a_micro-grpc-http-protobuf/api/user/v1/user.proto deleted file mode 100644 index dacdb29..0000000 --- a/a_micro-grpc-http-protobuf/api/user/v1/user.proto +++ /dev/null @@ -1,141 +0,0 @@ -syntax = "proto3"; - -package api.user.v1; - -import "google/api/annotations.proto"; -import "protoc-gen-openapiv2/options/annotations.proto"; -import "validate/validate.proto"; - -option go_package = "user/api/user/v1;v1"; - -// Default settings for generating swagger documents -option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { - host: "localhost:8080" - base_path: "" - info: { - title: "user api docs"; - version: "2.0"; - } - schemes: HTTP; - schemes: HTTPS; - consumes: "application/json"; - produces: "application/json"; - security_definitions: { - security: { - key: "BearerAuth"; - value: { - type: TYPE_API_KEY; - in: IN_HEADER; - name: "Authorization"; - description: "Input a \"Bearer your-jwt-token\" to Value"; - } - } - } -}; - -service user { - // 注册 - rpc Register(RegisterRequest) returns (RegisterReply) { - option (google.api.http) = { - post: "/api/v1/auth/register" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - summary: "注册", - description: "注册", - }; - } - // 登录 - rpc Login(LoginRequest) returns (LoginReply) { - option (google.api.http) = { - post: "/api/v1/auth/login" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - summary: "登录", - description: "登录", - }; - } - // 登出 - rpc Logout(LogoutRequest) returns (LogoutReply) { - option (google.api.http) = { - post: "/api/v1/auth/logout" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - summary: "登出", - description: "登出", - security: { - security_requirement: { - key: "BearerAuth"; - value: {} - } - } - }; - } - // 修改密码 - rpc ChangePassword(ChangePasswordRequest) returns (ChangeRegisterReply) { - option (google.api.http) = { - post: "/api/v1/changePassword" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - summary: "修改密码", - description: "修改密码", - security: { - security_requirement: { - key: "BearerAuth"; - value: {} - } - } - }; - } -} - -// Some notes on defining fields under message: -// (1) Fill in the validate rules https://github.com/envoyproxy/protoc-gen-validate#constraint-rules -// (2) When using the protoc-gen-openapiv2 plugin, if the defined fields are snake case, -// you must add annotations for snake case names, such as string foo_bar = 1 [json_name = "foo_bar"], -// to ensure that the front end and back end JSON naming is consistent. -// (3) If the route contains the path parameter, such as /api/v1/userExample/{id}, the defined -// message must contain the name of the path parameter and the name should be -// added with a new tag, such as int64 id = 1 [(tagger.tags) = "uri:\"id\""]; -// (4) If the request url is followed by a query parameter, such as /api/v1/getUserExample?name=Tom, -// a form tag must be added when defining the query parameter in the message, -// such as string name = 1 [(tagger.tags) = "form:\"name\""]. - - -message RegisterRequest { - string email = 1 [(validate.rules).string.email = true]; - string password = 2 [(validate.rules).string.min_len = 6]; -} - -message RegisterReply { - uint64 id = 1; -} - -message LoginRequest { - string email = 1 [(validate.rules).string.email = true]; - string password = 2 [(validate.rules).string.min_len = 6]; -} - -message LoginReply { - uint64 id = 1; - string token = 2; -} - -message LogoutRequest { - uint64 id = 1 [(validate.rules).uint64.gte = 1]; - string token = 2 [(validate.rules).string.min_len = 20]; -} - -message LogoutReply { -} - -message ChangePasswordRequest { - uint64 id = 1 [(validate.rules).uint64.gte = 1]; - string password = 2 [(validate.rules).string.min_len = 6]; -} - -message ChangeRegisterReply { -} diff --git a/a_micro-grpc-http-protobuf/api/user/v1/user_grpc.pb.go b/a_micro-grpc-http-protobuf/api/user/v1/user_grpc.pb.go deleted file mode 100644 index 3768c12..0000000 --- a/a_micro-grpc-http-protobuf/api/user/v1/user_grpc.pb.go +++ /dev/null @@ -1,228 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.3.0 -// - protoc v4.25.2 -// source: api/user/v1/user.proto - -package v1 - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 - -const ( - User_Register_FullMethodName = "/api.user.v1.user/Register" - User_Login_FullMethodName = "/api.user.v1.user/Login" - User_Logout_FullMethodName = "/api.user.v1.user/Logout" - User_ChangePassword_FullMethodName = "/api.user.v1.user/ChangePassword" -) - -// UserClient is the client API for User service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -type UserClient interface { - // 注册 - Register(ctx context.Context, in *RegisterRequest, opts ...grpc.CallOption) (*RegisterReply, error) - // 登录 - Login(ctx context.Context, in *LoginRequest, opts ...grpc.CallOption) (*LoginReply, error) - // 登出 - Logout(ctx context.Context, in *LogoutRequest, opts ...grpc.CallOption) (*LogoutReply, error) - // 修改密码 - ChangePassword(ctx context.Context, in *ChangePasswordRequest, opts ...grpc.CallOption) (*ChangeRegisterReply, error) -} - -type userClient struct { - cc grpc.ClientConnInterface -} - -func NewUserClient(cc grpc.ClientConnInterface) UserClient { - return &userClient{cc} -} - -func (c *userClient) Register(ctx context.Context, in *RegisterRequest, opts ...grpc.CallOption) (*RegisterReply, error) { - out := new(RegisterReply) - err := c.cc.Invoke(ctx, User_Register_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *userClient) Login(ctx context.Context, in *LoginRequest, opts ...grpc.CallOption) (*LoginReply, error) { - out := new(LoginReply) - err := c.cc.Invoke(ctx, User_Login_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *userClient) Logout(ctx context.Context, in *LogoutRequest, opts ...grpc.CallOption) (*LogoutReply, error) { - out := new(LogoutReply) - err := c.cc.Invoke(ctx, User_Logout_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *userClient) ChangePassword(ctx context.Context, in *ChangePasswordRequest, opts ...grpc.CallOption) (*ChangeRegisterReply, error) { - out := new(ChangeRegisterReply) - err := c.cc.Invoke(ctx, User_ChangePassword_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// UserServer is the server API for User service. -// All implementations must embed UnimplementedUserServer -// for forward compatibility -type UserServer interface { - // 注册 - Register(context.Context, *RegisterRequest) (*RegisterReply, error) - // 登录 - Login(context.Context, *LoginRequest) (*LoginReply, error) - // 登出 - Logout(context.Context, *LogoutRequest) (*LogoutReply, error) - // 修改密码 - ChangePassword(context.Context, *ChangePasswordRequest) (*ChangeRegisterReply, error) - mustEmbedUnimplementedUserServer() -} - -// UnimplementedUserServer must be embedded to have forward compatible implementations. -type UnimplementedUserServer struct { -} - -func (UnimplementedUserServer) Register(context.Context, *RegisterRequest) (*RegisterReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method Register not implemented") -} -func (UnimplementedUserServer) Login(context.Context, *LoginRequest) (*LoginReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method Login not implemented") -} -func (UnimplementedUserServer) Logout(context.Context, *LogoutRequest) (*LogoutReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method Logout not implemented") -} -func (UnimplementedUserServer) ChangePassword(context.Context, *ChangePasswordRequest) (*ChangeRegisterReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method ChangePassword not implemented") -} -func (UnimplementedUserServer) mustEmbedUnimplementedUserServer() {} - -// UnsafeUserServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to UserServer will -// result in compilation errors. -type UnsafeUserServer interface { - mustEmbedUnimplementedUserServer() -} - -func RegisterUserServer(s grpc.ServiceRegistrar, srv UserServer) { - s.RegisterService(&User_ServiceDesc, srv) -} - -func _User_Register_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RegisterRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UserServer).Register(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: User_Register_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UserServer).Register(ctx, req.(*RegisterRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _User_Login_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(LoginRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UserServer).Login(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: User_Login_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UserServer).Login(ctx, req.(*LoginRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _User_Logout_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(LogoutRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UserServer).Logout(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: User_Logout_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UserServer).Logout(ctx, req.(*LogoutRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _User_ChangePassword_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ChangePasswordRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UserServer).ChangePassword(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: User_ChangePassword_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UserServer).ChangePassword(ctx, req.(*ChangePasswordRequest)) - } - return interceptor(ctx, in, info, handler) -} - -// User_ServiceDesc is the grpc.ServiceDesc for User service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var User_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "api.user.v1.user", - HandlerType: (*UserServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Register", - Handler: _User_Register_Handler, - }, - { - MethodName: "Login", - Handler: _User_Login_Handler, - }, - { - MethodName: "Logout", - Handler: _User_Logout_Handler, - }, - { - MethodName: "ChangePassword", - Handler: _User_ChangePassword_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "api/user/v1/user.proto", -} diff --git a/a_micro-grpc-http-protobuf/api/user/v1/user_router.pb.go b/a_micro-grpc-http-protobuf/api/user/v1/user_router.pb.go deleted file mode 100644 index 530da38..0000000 --- a/a_micro-grpc-http-protobuf/api/user/v1/user_router.pb.go +++ /dev/null @@ -1,260 +0,0 @@ -// Code generated by https://github.com/zhufuyi/sponge, DO NOT EDIT. - -package v1 - -import ( - context "context" - gin "github.com/gin-gonic/gin" - errcode "github.com/zhufuyi/sponge/pkg/errcode" - middleware "github.com/zhufuyi/sponge/pkg/gin/middleware" - zap "go.uber.org/zap" - strings "strings" -) - -// import packages: strings. context. errcode. middleware. zap. gin. - -type UserLogicer interface { - Register(ctx context.Context, req *RegisterRequest) (*RegisterReply, error) - Login(ctx context.Context, req *LoginRequest) (*LoginReply, error) - Logout(ctx context.Context, req *LogoutRequest) (*LogoutReply, error) - ChangePassword(ctx context.Context, req *ChangePasswordRequest) (*ChangeRegisterReply, error) -} - -type UserOption func(*userOptions) - -type userOptions struct { - isFromRPC bool - responser errcode.Responser - zapLog *zap.Logger - httpErrors []*errcode.Error - rpcStatus []*errcode.RPCStatus - wrapCtxFn func(c *gin.Context) context.Context -} - -func (o *userOptions) apply(opts ...UserOption) { - for _, opt := range opts { - opt(o) - } -} - -func WithUserHTTPResponse() UserOption { - return func(o *userOptions) { - o.isFromRPC = false - } -} - -func WithUserRPCResponse() UserOption { - return func(o *userOptions) { - o.isFromRPC = true - } -} - -func WithUserResponser(responser errcode.Responser) UserOption { - return func(o *userOptions) { - o.responser = responser - } -} - -func WithUserLogger(zapLog *zap.Logger) UserOption { - return func(o *userOptions) { - o.zapLog = zapLog - } -} - -func WithUserErrorToHTTPCode(e ...*errcode.Error) UserOption { - return func(o *userOptions) { - o.httpErrors = e - } -} - -func WithUserRPCStatusToHTTPCode(s ...*errcode.RPCStatus) UserOption { - return func(o *userOptions) { - o.rpcStatus = s - } -} - -func WithUserWrapCtx(wrapCtxFn func(c *gin.Context) context.Context) UserOption { - return func(o *userOptions) { - o.wrapCtxFn = wrapCtxFn - } -} - -func RegisterUserRouter( - iRouter gin.IRouter, - groupPathMiddlewares map[string][]gin.HandlerFunc, - singlePathMiddlewares map[string][]gin.HandlerFunc, - iLogic UserLogicer, - opts ...UserOption) { - - o := &userOptions{} - o.apply(opts...) - - if o.responser == nil { - o.responser = errcode.NewResponser(o.isFromRPC, o.httpErrors, o.rpcStatus) - } - if o.zapLog == nil { - o.zapLog, _ = zap.NewProduction() - } - - r := &userRouter{ - iRouter: iRouter, - groupPathMiddlewares: groupPathMiddlewares, - singlePathMiddlewares: singlePathMiddlewares, - iLogic: iLogic, - iResponse: o.responser, - zapLog: o.zapLog, - wrapCtxFn: o.wrapCtxFn, - } - r.register() -} - -type userRouter struct { - iRouter gin.IRouter - groupPathMiddlewares map[string][]gin.HandlerFunc - singlePathMiddlewares map[string][]gin.HandlerFunc - iLogic UserLogicer - iResponse errcode.Responser - zapLog *zap.Logger - wrapCtxFn func(c *gin.Context) context.Context -} - -func (r *userRouter) register() { - r.iRouter.Handle("POST", "/api/v1/auth/register", r.withMiddleware("POST", "/api/v1/auth/register", r.Register_0)...) - r.iRouter.Handle("POST", "/api/v1/auth/login", r.withMiddleware("POST", "/api/v1/auth/login", r.Login_0)...) - r.iRouter.Handle("POST", "/api/v1/auth/logout", r.withMiddleware("POST", "/api/v1/auth/logout", r.Logout_0)...) - r.iRouter.Handle("POST", "/api/v1/changePassword", r.withMiddleware("POST", "/api/v1/changePassword", r.ChangePassword_0)...) - -} - -func (r *userRouter) withMiddleware(method string, path string, fn gin.HandlerFunc) []gin.HandlerFunc { - handlerFns := []gin.HandlerFunc{} - - // determine if a route group is hit or miss, left prefix rule - for groupPath, fns := range r.groupPathMiddlewares { - if groupPath == "" || groupPath == "/" { - handlerFns = append(handlerFns, fns...) - continue - } - size := len(groupPath) - if len(path) < size { - continue - } - if groupPath == path[:size] { - handlerFns = append(handlerFns, fns...) - } - } - - // determine if a single route has been hit - key := strings.ToUpper(method) + "->" + path - if fns, ok := r.singlePathMiddlewares[key]; ok { - handlerFns = append(handlerFns, fns...) - } - - return append(handlerFns, fn) -} - -var _ middleware.CtxKeyString - -func (r *userRouter) Register_0(c *gin.Context) { - req := &RegisterRequest{} - var err error - - if err = c.ShouldBindJSON(req); err != nil { - r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) - r.iResponse.ParamError(c, err) - return - } - - var ctx context.Context - if r.wrapCtxFn != nil { - ctx = r.wrapCtxFn(c) - } else { - ctx = middleware.WrapCtx(c) - } - - out, err := r.iLogic.Register(ctx, req) - if err != nil { - r.iResponse.Error(c, err) - return - } - - r.iResponse.Success(c, out) -} - -func (r *userRouter) Login_0(c *gin.Context) { - req := &LoginRequest{} - var err error - - if err = c.ShouldBindJSON(req); err != nil { - r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) - r.iResponse.ParamError(c, err) - return - } - - var ctx context.Context - if r.wrapCtxFn != nil { - ctx = r.wrapCtxFn(c) - } else { - ctx = middleware.WrapCtx(c) - } - - out, err := r.iLogic.Login(ctx, req) - if err != nil { - r.iResponse.Error(c, err) - return - } - - r.iResponse.Success(c, out) -} - -func (r *userRouter) Logout_0(c *gin.Context) { - req := &LogoutRequest{} - var err error - - if err = c.ShouldBindJSON(req); err != nil { - r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) - r.iResponse.ParamError(c, err) - return - } - - var ctx context.Context - if r.wrapCtxFn != nil { - ctx = r.wrapCtxFn(c) - } else { - ctx = middleware.WrapCtx(c) - } - - out, err := r.iLogic.Logout(ctx, req) - if err != nil { - r.iResponse.Error(c, err) - return - } - - r.iResponse.Success(c, out) -} - -func (r *userRouter) ChangePassword_0(c *gin.Context) { - req := &ChangePasswordRequest{} - var err error - - if err = c.ShouldBindJSON(req); err != nil { - r.zapLog.Warn("ShouldBindJSON error", zap.Error(err), middleware.GCtxRequestIDField(c)) - r.iResponse.ParamError(c, err) - return - } - - var ctx context.Context - if r.wrapCtxFn != nil { - ctx = r.wrapCtxFn(c) - } else { - ctx = middleware.WrapCtx(c) - } - - out, err := r.iLogic.ChangePassword(ctx, req) - if err != nil { - r.iResponse.Error(c, err) - return - } - - r.iResponse.Success(c, out) -} diff --git a/a_micro-grpc-http-protobuf/docs/apis.swagger.json b/a_micro-grpc-http-protobuf/docs/apis.swagger.json deleted file mode 100644 index 4dc50c4..0000000 --- a/a_micro-grpc-http-protobuf/docs/apis.swagger.json +++ /dev/null @@ -1,281 +0,0 @@ -{ - "swagger": "2.0", - "info": { - "title": "user api docs", - "version": "2.0" - }, - "tags": [ - { - "name": "user" - } - ], - "host": "localhost:8080", - "schemes": [ - "http", - "https" - ], - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "paths": { - "/api/v1/auth/login": { - "post": { - "summary": "登录", - "description": "登录", - "operationId": "user_Login", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/v1LoginReply" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/rpcStatus" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/v1LoginRequest" - } - } - ], - "tags": [ - "user" - ] - } - }, - "/api/v1/auth/logout": { - "post": { - "summary": "登出", - "description": "登出", - "operationId": "user_Logout", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/v1LogoutReply" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/rpcStatus" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/v1LogoutRequest" - } - } - ], - "tags": [ - "user" - ], - "security": [ - { - "BearerAuth": [] - } - ] - } - }, - "/api/v1/auth/register": { - "post": { - "summary": "注册", - "description": "注册", - "operationId": "user_Register", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/v1RegisterReply" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/rpcStatus" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/v1RegisterRequest" - } - } - ], - "tags": [ - "user" - ] - } - }, - "/api/v1/changePassword": { - "post": { - "summary": "修改密码", - "description": "修改密码", - "operationId": "user_ChangePassword", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/v1ChangeRegisterReply" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/rpcStatus" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/v1ChangePasswordRequest" - } - } - ], - "tags": [ - "user" - ], - "security": [ - { - "BearerAuth": [] - } - ] - } - } - }, - "definitions": { - "protobufAny": { - "type": "object", - "properties": { - "@type": { - "type": "string" - } - }, - "additionalProperties": {} - }, - "rpcStatus": { - "type": "object", - "properties": { - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } - }, - "v1ChangePasswordRequest": { - "type": "object", - "properties": { - "id": { - "type": "integer", - "format": "uint64" - }, - "password": { - "type": "string" - } - } - }, - "v1ChangeRegisterReply": { - "type": "object" - }, - "v1LoginReply": { - "type": "object", - "properties": { - "id": { - "type": "integer", - "format": "uint64" - }, - "token": { - "type": "string" - } - } - }, - "v1LoginRequest": { - "type": "object", - "properties": { - "email": { - "type": "string" - }, - "password": { - "type": "string" - } - } - }, - "v1LogoutReply": { - "type": "object" - }, - "v1LogoutRequest": { - "type": "object", - "properties": { - "id": { - "type": "integer", - "format": "uint64" - }, - "token": { - "type": "string" - } - } - }, - "v1RegisterReply": { - "type": "object", - "properties": { - "id": { - "type": "integer", - "format": "uint64" - } - } - }, - "v1RegisterRequest": { - "type": "object", - "properties": { - "email": { - "type": "string" - }, - "password": { - "type": "string" - } - } - } - }, - "securityDefinitions": { - "BearerAuth": { - "type": "apiKey", - "description": "Input a \"Bearer your-jwt-token\" to Value", - "name": "Authorization", - "in": "header" - } - } -} diff --git a/a_micro-grpc-http-protobuf/docs/gen.info b/a_micro-grpc-http-protobuf/docs/gen.info deleted file mode 100644 index d61d9c7..0000000 --- a/a_micro-grpc-http-protobuf/docs/gen.info +++ /dev/null @@ -1 +0,0 @@ -user,user,false \ No newline at end of file diff --git a/a_micro-grpc-http-protobuf/internal/ecode/user_rpc.go b/a_micro-grpc-http-protobuf/internal/ecode/user_rpc.go deleted file mode 100644 index c0e0505..0000000 --- a/a_micro-grpc-http-protobuf/internal/ecode/user_rpc.go +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated by https://github.com/zhufuyi/sponge - -package ecode - -import ( - "github.com/zhufuyi/sponge/pkg/errcode" -) - -// user business-level rpc error codes. -// the _userNO value range is 1~100, if the same number appears, it will cause a failure to start the service. -var ( - _userNO = 1 - _userName = "user" - _userBaseCode = errcode.RCode(_userNO) - - StatusRegisterUser = errcode.NewRPCStatus(_userBaseCode+1, "failed to Register "+_userName) - StatusLoginUser = errcode.NewRPCStatus(_userBaseCode+2, "failed to Login "+_userName) - StatusLogoutUser = errcode.NewRPCStatus(_userBaseCode+3, "failed to Logout "+_userName) - StatusChangePasswordUser = errcode.NewRPCStatus(_userBaseCode+4, "failed to ChangePassword "+_userName) - // error codes are globally unique, adding 1 to the previous error code -) diff --git a/a_micro-grpc-http-protobuf/internal/handler/user.go b/a_micro-grpc-http-protobuf/internal/handler/user.go deleted file mode 100644 index 0ee923a..0000000 --- a/a_micro-grpc-http-protobuf/internal/handler/user.go +++ /dev/null @@ -1,43 +0,0 @@ -// Code generated by https://github.com/zhufuyi/sponge - -package handler - -import ( - "context" - - userV1 "user/api/user/v1" - "user/internal/service" -) - -var _ userV1.UserLogicer = (*userHandler)(nil) - -type userHandler struct { - server userV1.UserServer -} - -// NewUserHandler create a handler -func NewUserHandler() userV1.UserLogicer { - return &userHandler{ - server: service.NewUserServer(), - } -} - -// Register 注册 -func (h *userHandler) Register(ctx context.Context, req *userV1.RegisterRequest) (*userV1.RegisterReply, error) { - return h.server.Register(ctx, req) -} - -// Login 登录 -func (h *userHandler) Login(ctx context.Context, req *userV1.LoginRequest) (*userV1.LoginReply, error) { - return h.server.Login(ctx, req) -} - -// Logout 登出 -func (h *userHandler) Logout(ctx context.Context, req *userV1.LogoutRequest) (*userV1.LogoutReply, error) { - return h.server.Logout(ctx, req) -} - -// ChangePassword 修改密码 -func (h *userHandler) ChangePassword(ctx context.Context, req *userV1.ChangePasswordRequest) (*userV1.ChangeRegisterReply, error) { - return h.server.ChangePassword(ctx, req) -} diff --git a/a_micro-grpc-http-protobuf/internal/service/user.go b/a_micro-grpc-http-protobuf/internal/service/user.go deleted file mode 100644 index e9758d3..0000000 --- a/a_micro-grpc-http-protobuf/internal/service/user.go +++ /dev/null @@ -1,102 +0,0 @@ -// Code generated by https://github.com/zhufuyi/sponge - -package service - -import ( - "context" - - userV1 "user/api/user/v1" - "user/internal/ecode" - - "github.com/zhufuyi/sponge/pkg/grpc/interceptor" - "github.com/zhufuyi/sponge/pkg/logger" - - "google.golang.org/grpc" -) - -func init() { - registerFns = append(registerFns, func(server *grpc.Server) { - userV1.RegisterUserServer(server, NewUserServer()) - }) -} - -var _ userV1.UserServer = (*user)(nil) - -type user struct { - userV1.UnimplementedUserServer - - // example: - // iDao dao.UserDao -} - -// NewUserServer create a server -func NewUserServer() userV1.UserServer { - return &user{ - // example: - // iDao: dao.NewUserDao( - // model.GetDB(), - // cache.NewUserCache(model.GetCacheType()), - // ), - } -} - -// Register 注册 -func (s *user) Register(ctx context.Context, req *userV1.RegisterRequest) (*userV1.RegisterReply, error) { - err := req.Validate() - if err != nil { - logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) - return nil, ecode.StatusInvalidParams.Err() - } - - // fill in the business logic code here - logger.Info("register successfully", interceptor.ServerCtxRequestIDField(ctx)) - - return &userV1.RegisterReply{ - Id: 111, - }, nil -} - -// Login 登录 -func (s *user) Login(ctx context.Context, req *userV1.LoginRequest) (*userV1.LoginReply, error) { - err := req.Validate() - if err != nil { - logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) - return nil, ecode.StatusInvalidParams.Err() - } - - // fill in the business logic code here - logger.Info("login successfully", interceptor.ServerCtxRequestIDField(ctx)) - - return &userV1.LoginReply{ - Id: 100, - Token: "eydiewnafiaekdfaf......", - }, nil -} - -// Logout 登出 -func (s *user) Logout(ctx context.Context, req *userV1.LogoutRequest) (*userV1.LogoutReply, error) { - err := req.Validate() - if err != nil { - logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) - return nil, ecode.StatusInvalidParams.Err() - } - - // fill in the business logic code here - logger.Info("logout successfully", interceptor.ServerCtxRequestIDField(ctx)) - - return &userV1.LogoutReply{}, nil -} - -// ChangePassword 修改密码 -func (s *user) ChangePassword(ctx context.Context, req *userV1.ChangePasswordRequest) (*userV1.ChangeRegisterReply, error) { - err := req.Validate() - if err != nil { - logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) - return nil, ecode.StatusInvalidParams.Err() - } - - // fill in the business logic code here - logger.Info("change password successfully", interceptor.ServerCtxRequestIDField(ctx)) - - return &userV1.ChangeRegisterReply{}, nil -} diff --git a/assets/cache-grpc-http-pb-test.png b/assets/cache-grpc-http-pb-test.png new file mode 100644 index 0000000000000000000000000000000000000000..78a9fc552d98341651ce792ee048ae37a7a785fb GIT binary patch literal 71307 zcmbrlXIN8T*C&jkqM#xoAkzMbfOG+sPE-&?q)C$+dgw^+2MdTulP)z10@8agp(8cY z2`x%52`wZLLSPQw_kBGx^UnL>nfU-$vUB!2YpuQ3Z?Cd5QQHMOY9(z@JRFUIxjT%D>_9c zhDAJU#^%vpDGAbwy+N06%U4^ojGh~p6}jW`?dUa+nV7iHK5jHviPQhiKe27@ZoHi4 z&co>AcmER?u~Rzl4GozdZp`{?wNIYt-6wXuoZ_aD2X|f_9PE1E z;}m`ol=2zv@fm1n!YLXUcQ%$-mJBu!b`r~nbAww}2h(_jRE2ms1EPCX&4Qs%xP|UZ zcz?86x?^9>ZNaM;?jXwPWT&uLr$$(94)%$RFcyn1Xu9uNVG@hCo+!5z=H$6=m&xtiW(<^)XJL0NsPxO8-aO>;p>I%uLeylvUL2X(36gg<s7?|^2S<6_tJ}(q7AJf~7qeaEw(lrLYjR;h&1h-YrrT&wYzCR- zdpLT6hB(ob(T->^(-1c6|MR^$C&uoHxT?;xsGsdcUkV$KWFEg<9;-RHW0w{AhDTG~ zy6?MJT)dTQxRS{2vL+x7)FaW1=@< z!CluZKFiG?T`t%9Nay|~Fs(=B<9nF`hwDREHsf%Vv4u?+`-<K%J0Ra`uzG0{9L}#*aNwdSvV4MPuHZTz*%=z15IiYF|5XTc=Ki~o@cT; zLqQQgRLVg?VFtlaQBX|MlE4u{J3vA4hWvjs3<`fr=M9Jz6#qB#|9_bH?~{GRJtea5 zb`=!oQ4wm3lEbzQlu6T_Kx%`*<+3iQF<(7d%MJSP;K@kR91k>*BhBW!$IOl4{ zO^P=u1X>{3e9kWYCcZ26Q$uV zpRNw?t^kL!2;mdN>}HwZA!h`^b2g}OTO&|w4Ta;g%CbY%$~(?zYYy{2o1N>)q|3VMQ12V zp&9AoLPu}~0q`HZ@dP*kJ_s44z9-=TtOF=<;nz30} zj-BvpYOaQ|X;6dOoEDifh9x*4a%ghU4<4q=OCw^a&Qp9H$JU6?izz3fgB@fdOi63n zjjJ<6nM41x6r+vg+aj(;D1t+VScf(!>INaatl25MI=}*4OAM2@C*pYxhY%dxnOqP# zlF-K`OyClmk+A)pho7i9cDAXMWGN^{%gJJ*h<8Jqpd#`J{!&c)0$O1Kf6`x_C2#J_|w>7U=X<<&{l?rm#Dxo3mboc{iAE^=$-r=Uw!5U`gdd3D(}{h>@`>sk zelelvPX{v*-6j-|aUos4h^! z3|0^>jJ_ZERreEFc;~3Z4ZDIkrJVo8{9pWBKcMQnaiaN$Ko33TMwgKhqrWaqTjhXg zZSsEL2$|Y0iJoZKdW_r?r$vmHHkG*ex3AT}3z#@&qyVL3t}8XN@;Woa^(zm!>h=>a zZEi>h!k`N6-Wf7<8^$DD6ftHcC?K)>xMV;0CQqhp5>KWM4S89{}hRzmKd*CBddo93AT) z;ch4+NXd}3=Q2{#uE{6A=iX0;lePjnB@ecXx(gSh{Ofld!ddMeLTmejTd3QYrbyX6dob}Ol;i0rV-I>ou~?@O2Gk-DU-_XfOsviC>2jn`)D`0L42 zxSZO49Kx1Bno-Kl5q3>CpJqbF`1R-3x)&QV*ds;}wceN%2DeiYZ}5E}56U-mBwu$J z1_s2V&QlL&A@pYm4c>+R`>Es!|Ml>}*Ln@GT;P!v$#YrFHEShk2;)72&6A*9VzPy7 znB|dQ_63F-GY0%6XJCse>1ctfO(9?ZnkIQYv`$PBN(?piz-&4A_!HkA8ZW=w6ql(t z_Ah!LeXIwm#R|8-XL8zgE{p3Un7RPK9QZ`~J`I|ns6z@1GlOJZAT-cNs>!H{2hK&I zJ#XzctREB}8jlR3y3Z4)Jp;zVjt23aP4FPmUqH@L@l>OEps+q-*injnMBg||@i~|* zZ-v@QniTm!E_|Kgwf?1``r#iA_^g=(O-@8pU{WpZX6h^3o)JB!{I?lGUT!wKp?VBk znA>t8#RFVshyr1|`^X=i2P4u7Bf1Tr29ga^WrpS!&_*^Zq18iyrK^|wk(dt=K?!O|wraeIH1^F00c~2$!h00~~;z6la(otiU3o>Yx zp1jUVFbXGUPPs%?)O7M?;a^ryzA-OQpe%o{zoC|;A%jV5%Iu3t^D3I_TE|SdOqOlM zx=kaQ&Bgb0B07?#Wl9u=fE>Z1kg#(c53hT4G7L;{E08GuIYTnpEQQ*=MHsT}{N7HT zdq}LrNOV{pu5i4%D|3)t7Bzbzyh}RPsG>v5-Qz$k8I_(D=~D_X5AvS;F|yHhjl>K; zIWi=M`Xg2gS#KRts=+Bfep3;P1Y9@K_t*wx$N`BF1RQzhl{xY*}nZf=wgv`WU zUTpG$#OxlykfeXFf6>_XZU(rNtT%k{!TJU|E5PoQu z_N}?C-l0wOab_~Ao4frCnPdF8Tgsx?u&`+C9!KM1MB#nmc! zU1uda@=O@D%{wqYy7z=2L&9{N4VpwYS!+E8lr<1?zP_sahoY*_OVPn@lUwdF*pAhTR|C=Z;6`V?zAQ`&97l3z@&B;1-O5366-UC zb`EIORQM&Is}3N17$sBd)|nLC(jDNJ;-z>qjNPY;CO^M)WDc2Nw7hSMKH}V&`zZup zXpA%D{e*IqQnf(5y+*n+sqzyGyZliShWqT+G|chr9L2Lx<=rqqy1iabMI+`3jA9eU;CcIXuv@!TC@%165d zmTgo?u>6fl5lKz`SaSb|(eMi&HhfX5=|4L4^jf4Z7bgz}l)DAW3JR{KDpX4PUZ!uL z0H={kkpTC(<5KkNDdd-N$Ac94Nb`;67HF=20TSqXk=99W6;Ds*xKav7M2GvTL*}h7 zyy?KM(2x?YAG8ClvpUaj?Pz}FGTpQIN$g-dV?Mq+qy5}p|K2VNX&hAIqnWG!HT^gd z*;gJrn|T+yj%K6ztk?YKrk>Y2R#cdx!p=*w6a8*diE*q>CCtqo&f#Zjrv1BPp|sfKyzTYw!f+CWxWM~cd*;wq?arOp}nO%Sfbr29rr zBBVU~UEtH`$@`Pl`^6uGC(dM}keB?6R7hg(0XED{qhpTymx45 z)J|`!cuKZFH0Zz;tz)C)rysbbXz5HIT!QWmOz#GrjIv&a!ZrNRJ0Vdwa4N-OgLc`j z>fZ?BD~hu-kNLoD?#|ImGsFQ1Lo0LTQ*xc2Xhf9I0Dx+jIm;_8Fa(6;r%~x!zqq$Q zsaqnc{aRzOu5VUzYF^5t+27l~&P=SLe?gQ-TD%PyxP0SW3m_(~xEsy^8bC%)FID=X zL&i%(xJK4XcL)8Jv&z|1NGwO(K(R0{Kr+F9+IUQ&4Lt_AUmQ(h<)cQO3M z_(5?na7K29h7iKp4hkNS_L@C;4laYcD$9a|1NeiM3 zM&j15cEJ7BEl?3E1_EX1&@v94VsZqrg;#9m3^+#o+yS_PMny}CJiAxQ5oii#eiX5# zXcVmbXf}deioW{@e)Z@Vqw`;?G^F%Eul`I*k~u=FWnDPu09>-ig7H%#Hm9*ELTmds z1g?(u<=UtIOL`PQbO2xqHlm-Hq5)^3!0@g(O6^e7{*C>W8pzUTF*S^*A-wLCi#eE= zoIgHIVx6_}A0_ikknsb&mW20IO_s z-*HQ3>UCcTr3!C){a%L5VEsar3_Ef0kpX+Mo$BzK$AscSpLEGq?}_V5s%tcdR&Tm1 z>TjU|37bso3pe6f#ASlf^)$(i;gwfM9WgclltC8^6$7V7&c^}^RqjZ?%9h;TmtP7| z@rX~&VG4uSw||w_c1Qn?gKkEQFgmU8%2U>pJ?hm8^HC^47tL$wP94V;5M*z(XjXlCnnzw59n=_CEUIvj083 zplDxiN1}O5xV`V?M~B?TC6k8Yie{X&1v|$O5Y|&Z!<-aoUTE6s9}pDBn_Jmm&n2^) zJRG2Mo{?k788DLv>k8`?&=W}$e{XdZyst9wh=X3pPCKYDX)e04tUAl?H(t3{^P|Sd zQ()D3g(r)zf;jM{JNh%ZH^cQ%;gy!7)G zKtce;4}Q{^cc|tPVS|mS%=H;t7I|}a*4p22T$bRPBaHI zDWT$zyUiTQ{j8&mP0wpNuJ)+)0sUU8H~HmI?fv>m|AJP4evcYC;t;y9iN6(cFy$sE zs5Iu3^fgd8y9Oo~=#5$Eb4G~4(umjk>qI9325E{%JIe)2loW&l9@3g+{Z%KrO_mvd z{dPjum6JoCz)QlD*Ec3}RJZCB;H!&QTK`P7Y`~k^CrREkY~}Tts^h^0-bQCN|!d7 zE0>er!&G;WquJv=H8OMC1PiJCim2At)qpgfXJ%{sQqT$dE7jhGcep1$0r5DTaT!h$ z`_3QZ4(!iYy%YJhZaP8s*mDef#=LO^IhmY=467^(Hm9k6U7voEOscOp1)_GRbdITb=e2fIw_#|wsrwYNNwh#sW>tNu|B?{P7xm!Ok}tFrlO=~(*v&0$ zpiHWms>=h})mxOZg#>W{orW5Z`zCgYGf@{`t$S7+@AiavvUwVrPRQvh;cJ(#5WQR&RA&0db1p|a)U`16q0FYJV>&}{J8rq^Gy@|^p!bAX_tSfRI2 z=bbO6>r{Jd_x;`*Ggd6ae!5I|QL5GH*vxt;sQ0m?k55(a9XSMm@qB-ZO+#LT{KRO* z-dCf{ScQ0(X8ThIwl9Ov4zB6X4~2!Gb{<$VI_hVAeMeR-*RFyyjEV93S35|h41L<) z?Ci{ZQ9x=n2+6|3vC7%)4141xzk9#arySDuATrq+(lnQI{>}m*5^-y`MCM5a_d%af@h_R9b9R7IsLN`fUu+uN2LmcAmcE|K7ZP ziS10NVlA=v@6Le+lKj`7iW&ybPa%nKlVMU90Po{h@|2TR8S8mw`$u2n7&thvM~YKo z56SiIxy9ioRXu(0#Ro&Ras~AMES|pm>D~nd)syKEOSRC65+jG|jmwu^iaZQAc=ml> z4s|_Pk!cS(e99tSNGo}#Z9sqUbYCzcj8IW_d|BhaM5>Z8T91e|GRBVtp{WLJ8tVnV zJ81+2RO0CER3R`8lRnv;LDvptFacUQ43;%|(%fkK1J+g`dwA|QQ|GHAv$D;*`@8Eo zKcyWVoNzCS_@`_YY4xRhDY55aBf0n3mQp!5IlwST4dv1q`-%q5V~!ZCU^NnE@Iu|A z!MnD)dk9k}=Nh>Eec^_2P*eYp0x6kKeouL;;16FfZS3D1sg});7vhAPYA)grQ@l6_ zQuqQR5z~oJfkqx;(dbqcTAAt`!QEvvZ6G^}f6-A$Ktq|hto-v-PHh5<_&Wlr%yPAb zgT_Ly^SZn7>ya6Y1k*^Du2OXLdu%Ql68(v9l!B|17a==ws4fGW7s9lP*n#)XWx$&Tl4CW4JS4sjc}pe=QARl6E2uX_1MgA|g?kMw~Y_nN>Tk zd6AVN|KQ>bZyB^zC!Zib>^i|o8pAUA;j$J%uW1LkEFdls{dOg#y3}nL?}gPWbRA43 z?lUYrg`ARmSJ-aS|FxqI$&<$l;S~mE$v>}=LgtQai4>YnV9lS2lrRP?w?UHh$; zWl)XBz?S?j3NhXE8v96z&&W@C2EeLJZDpM$T`@bw^5}Z>T}LA3iX_aF#K7+w-6(pE z�|g_GLJnF?UH@6GlTndvBC%OFRb#pZsVbwif*i@26!?4Dj|2BRF^>t#VR?%%SXA z6MfCLO{RYI@ObJ}9Nl1rAqJ|S$_;sdd)^aKBA4{eHekwQ=+t7!??m6|Vx zr)yWn_#hx_cGhxuVOry}dhiYq;Y9^9sWb^RC)V0}UHY=o`DK9LjIM@C*G76eErM+P z_8~5f>UlXF0x+2uC%q975sMH{ughJCaDuoOC!t#1o1S6!{}E3=bGi)nh~LsLYEAm9 zE_b*VYQG)96%4pxA@cd;qqo(BinB%>pdl$$2M5$8$h1rJRSn7Wxg6?Lqf>Bls!uO2 z6XQ|Q2E}JcJx7e!x=Xr&Od-(;pY1}bc%i}ZLVG56x(O%^TQ0JLyf-`iZ&Hng-rY`Qu7hfHe}fq_YYKNJPl**TdYIM-o)Jxq#pPAjEZK6 z0uu(zMBrm#GhTw~Ah7KB$V)ZNYcrZ7(u%2KYk_4j1ao1lOur^UW3TR+o#(Vo_Lvjp zATrIcpUqZ$kT?`>!d>Ax{;A2dvF9p4(5iC0bnF&>P%ypDS=DkFt7X?K3lY zauD1l{w(3WeNB{S6ZX@VGJNk8G#-$$z<|26ILj_v6zlG~^8Sp&H9|DaCOwxNxQr2x zrn90k?5no7e40w_q3cv3c7|Rh1Mt0;1W0YW>$p8ZoY%>BKaN;XJiU9!HXlH5qxw6k zfxr2&w>ARZHWeqYc?VwcI5H%pXRzAIvB0I*=E8IihsVd~vjFM=Rk%Zq1YS^X)nHKM zC%`WO73HAF?+3*CxVgcU-|0f7&wflrCaN=Kw@<<$X~>>Ce6#W1!^XbM?Smf#aR>7Z z@xB?ZbcYw7ITh~jaYFwXy=$Ivs<}rp5zZ3~S#l)#tSQ2@FuA9(%R>JpwVwaHVgRr} za`S+%sQp%KaTFyXMu&|R8Gvl?^>XkPzb-!MVA@x`FYOdu6?iE++8$vc)ITk@7N@;a z$bii?S%2}z#HIPXmFlmgBxkT!!ZglY=^g9%HtQ|{w~pjP{6pfK1*5Sldq~V$EZCN# z-`JWBuTiWCXYZ|VH~4I%4ZYR0+lko0Cl|N*mxo710|0Zu9>iKOhMf{tA(|t^%ys6@G0-)2 z`=gf+y7eLA_?JQ6rDVnMJ2%Y{16hZ|IxfvGeNia+-FU(+UofaUosmsW>r&MNXBe+)i|(hd*wK%uvuR&Bx)@y& zwvQqHu4~z|`@QV_kv3H_1t$P*_dQf^)+^!I#^9w;a<5*h9Y*LFy*3ZBABan^^$uo&kIB}{*i2c zc<<;T)FR9@&M0iD~z+)dQPmGUgw+|pzWu6GvtWCgzoN=tnFx> z<^uG2PRYni<9Cbu#`U*wLmta=&0cer0($r|`9f2p+!fG?HQh`JEhy3a zc(sV@DS}uYKyedKP<_{U9Gl2N5a10@x_Nl3v=1SJj_?m~ib>hq&cT|PlJHJ0KK{_+ zvm(5M14)aNkJKhbrf1WAV*kHwS*^bv+S76jxQCqn{9i>BuzP!2S2^waLco3M|DmA^ zc7jh&pn$Ya+rX!L9-LfR?TcG>TGgD9HRjz(qM;5jDEQ?4=Xf3;0DCi&|L68lvo?40 z^Yjsylb+E~M!_%A_~IVQu{7)U!)1`2mD^*n6Ek9C1MMCCB@A$)e*QeeCOK{%f8=$|2a?j=1$< zRA0xySy$xJY~BOJtKVk81O0>0{ef06ck~>Ymk1~(>g_xZtQf60P&_-pb}2)>=cpu< zI=6hGJodTV4tHSY6PD0kuZX02G&f>5JizC_C{Q2qK)a1l z-|7vFJD{)h=T+`pXII(uxGso)F2l6LO-Cq*huuz=!4E446PpNi>p!2G=c_bHf%@h< zDjt=oNzPfK~s&-7b695PFnNxH(5CG!~lUNZ>ztJyu(Z&>3N2bW8amcORdaM&lc6IPAtC5JWdn^?V%J( z#CX4)sayK~1fJAl`)FmjWJVglPve{+ZMiFJPMPm@un{DR|8`!xW-b*iVmEzBO_AuK zqasA^9}NW`BEI=O<-b=^=ld4ug-$}JO19gfC+!0?yZQ{VFJ4V*(DE$dc4)OZQtEZT z=wY-OSKdFIBpmC@UtIhP(aF3X`NX41{ zp{s}zjxP{4^3OW_nb7)89hkgE5W;NT-<@mjuESl7kcd)X&!S@8!Am!-41=Ayi8m6t zutO}QXDiiji)p=r>kOgbKB*InvpD(TY1a9$va?3UKM(7J+?TW;aelptcyf*XlE$4q zk3#Ol(4fp+3Vi{3mn^&vGROA5046(lpZQOp@qcU((8m_$v-St#^mSN-39w zm#qK%@$Ps14YwUl7FnxDQqb%C6maab z#dvGty+1~oG7l{i?_Ck9+H)A^Qk?xlGPvu7<0~U`v{| z`bx=S`!a1xw6E6At#gT*0X>UJ*RVZzIhiQMRG0ol?Ja-TYoH1dEIl?30nuRb1GpNy zx>~L_>W{yXHMje>4$^U`S4R)Rjs#Wt?qs@2XF}iT)wVi5E2sJ5KMR%W9D0V+r@b_c zSh^Hazsfy#Wvfm#g00~8O6guix?RHS-tvHJj0QA}ja4shEK3KxUx?3QUF(5Q*i@l6 zo3D?BU-nUK7gP3PL$xEWnSOM9@?0jph_=(O`W&;wQ!M$j>5~rLtYG8IKLry)WqMOb z07T&BLgBF?)VD{;Ec?qeh zVnC`bMm~dV*cg8KXUnht7?bW>Jtfu9r-LKNyxAWb&j5}O2##{qj|W!qpTo~2Gkmr$ zl*lDNM!|ZeI3)=M_ejj?Mz?e&WB`XiHNtb%IaOG9F~dtDL+a-n*R^EW3J%H?*!TSp zzXm<=6YCT!(kHaPlz(Ty-l*yHAa14fxnXu%`F#%4D?(=H>>BZkGhC{T&wMX@fJH4z zW3*N>7KFpob6Zz#UnDV4GtAMVia# zP!VJ?bLIW(FU!rk8u4oHRT0Yqw^_vr&P~oHf!Z4{8|(_U3qSB?`bay-uLj^ek2heO zLQ!XJx))vUTQdgrGtHLvZNgT@CGamAfXA;C<^J*cG@heyBu4~z7Iv_&rkR#RM>?R#5orgOEkTXTFtFgU-ZEaBa>E(~qxonuNL^)sA53E~IQ_-(%q~gyCfr zj<-M35wxEv?s%l7A~07gZp1OZxVL^RTrleSoyF_nnbz?6nuF)wORBrvo2mts+KG$r z$jqo(CE9S{r9^XZR=Jp;Y0VY zE4W>9&4~C?cQetzoVI2+m8&=E#P4opxHzG}m6R~L@^sSK<*5VfAE|vBNY})r5henS zhN7I3O*iizWzn0SD1+T!5ehM$sL#a*v71+Se}_OQ_QvVy+JWYdK)X(b=eRx9Rz@&Anxj*-U2{Ros1b9hnHhOe}8fdw-viE)jh)i9C+^cH~F|L(r|zgXa1N>^@`$2n4W1if+IX#yX7@~MXqZ{vrsqHVOdae+)xx1WO z{5tONE-d7CNbeD0UUj$R=ER1|UlUXC8W3r_;M_NE8{z(a_BTG~RVY6bU+<%kg|p)+ z0Wa=F{Z6!AH%F}gZ(36LYyW_Ip@!rj8f^&nqTwu-c!JW{0RIq01IwPEO5iU)uMp(A zfPI+aMUqr+h`u9TF*WtTt9K98wPYdgJ0`$w=Si1-syG^w?+8COo}2jlOQuF%RsqvwgaB^^Q4N9XH$Nt|`Tk{U`g z#+Q2vC(R*SBhH;nHG|X_uYw5&ukD}F8xjIfDGQU@M5_v?y~lr<3{}`W(`eiQ^}JL4 zqU@Ft>+J7ye}p$~MAl5M*Ao&C4@an5!(AZp#*@fYy)^BseN_G*QRBWD=K7rG2p?sj zjfG&eQ}L6D4^V86jtdp>JaQoX1Y#}?mu3CRM<$-+zWbk88Z10GWl3K9GaLM(xHO@_ zlf+z5gZNQzz<#Cn#rDgKq{oO=KhLei^W|-)`@?=oHv|FwC++L&Bu*8Q_(j?Oa|}^9fV2 zx@@eU%Hy<@C?QMi!wvJ zvTCG1VTzYXD1=z9{G!7&eW!9EC-X8AnT)1RskFNPf&5&?T1CEb-RVuaOl4>=*@A72 zhLn9QcKCN6s(n)mU&u4u_w?siaqVtX%n*9~xOn??|LlsRlVAcZr>qn%1UxPX$>O>U zGBK!J{#{<=n0>AIi9;wN-20os$Vs7JDxv*UA;@A2*hqcL>j~A(cR0aKI&Mh}POQDTiAho_cylHP;qK(?F|sqs*nQp=(lH#V7IG7-P_jVX>TRjXrDNe$|F8#BHe=i~19m{EGvRzRLnK9P|$R!M%oft$BAe*JXb6d#;QYNE}Y#pp^3Kfqpa z83Al>_vS7D@2Mj>4K;1MOg)+<*@rTmeY=gj-_59=%yXi)=B(O)SXF9T5*Sv*2E2YT zWjm+je-8R=Qe-^lxD*a)e6Yfvqq5e51)^Ne))u5Y99q2yEb$2LohJk}PeG z4|BYmj~}2l6#ad(YW8$n6X(qQ^Cl}fGCr=a`J^ZrYp2zzqKYPp9Qxq{-GwF9{MN-+ zNzhg2e(MVpY7)7^zSffMz`QExF@ISv)lfok7mXltWT*bJ4vh$pm!&uF?y;~5>0T|` zST;q)_pKL<$=x`3HMRC4eWnPq=9MRFnY{ksBFQDVnPc~-^&&6V=?1Ac+0M2!;QL_r zAP}R!L6G6woN;?ugbc^+c*>WQEbJ%hl`iApY2MZAZsNQ4W;XIr}a&@h4% zidppnJ>NMIjFbR6;!C?dXU75Pp`l)DZ}6o|mgR1n1Y&ATpxH;I$&5@lfB)6BzJ>RY|kkH;NkFX3Z@ zk2QzcA@A6-E}mx5vSVs4w}z8Ey5&RVP9Ufsb3*Mm330|RTMl@;Cv$$}vZl~Zku%sN z1U``GPrdUJFC%r`Gn6=;TbiH^A?#yT#}e+5hE`B@M|*YqT=bhW;ac#2ZONRp-rps< z>&hEaCpNo)NsX4TK0F(bvo94lJ$KcStt_Et@61EEOLl7$Q(YS%N_2l0uulkC$qLu} z3_p5$g8Orsq#~@sjnQx8Q^mC|bi;H(7m$9*jmdp|s)hqFP<;lgK2i^SXp_Nj{NAT} zxpbe)w#B03Z`23V+iqvW4(z1$2uVa}g4Gb+kL2g6zYTxQnUQad0IR73CkFb~VtD;1 zeeZ66h0L2zPt86H2c_+i{-I(z%$(s2$;(F2+UKz)mckEWK?Ia3g7duY9S(R+ZkZEx z(e$i@7INvf0n|)dvwtEx_SiUF1t9OJ1eN^6ES4>NY|cYw z=+LYy$&TsXts+Wy!FZX!15jE?^cx$P9~aYq)Lk~&Yrc;G_Whhv49Wi80s>)L!Q7dE z^x(t0$Tycr4JGaYvr#?Ay)F$dT{+AXV-98Q02*|{FcH4CpRKQQ1(Bhn_6|rPmiWI| z!9-x;3Xi)0{eG1EnS-|}YPZIZvGD1dbn+llxnx zYnfKI9A^ep48jwd@=dY70Qp}l?6|KQvG-papsLU53&gP0L=5&FDedx(IvQp z#St@zr4F=deYv-b-E2o?tC)d{k)6ZjGp#0(wj|*B8Bmw3tKtSF{dvyC82AUhhLw=! zN%=p*yJz^*O7bJ}JHVJMe_>51twKja|9(_RXP~}PZFs<&=PSU@&o)Xfx)$&qn)?N8 z7%z>J`*y(%AS-+DnoKDT??wr+9Tt$j5}iXcPxAbW(AP{v3SHdDK=HmBdtNgQ=mU|N z93S?nI$JJz$Ixo7*%qmy5oB;x9x9D#q>B=1NXo?{(toS94{R+!N`I@q|7-E~|5z;k zf4Fe?U)AOR?LE-!d*&+A=^5_6A<)i#TuI$>O7zv&G6kvsy5N`pZiN9{Vnbf{H97Cq z055Mgknd|R`9-GUC7{xg;B`krfyaImFrM{v(FLBs`*s!%!ELaUvLI2tl%J{D?Be)uLIHlj(U14uB2k(iyarI3 zXML|{zMat(Dc;P#M(RAt8Kue5w(*X516ZPvR?}U6PJ0MCPNst=rhrK<2O2&4_|IFg z$oaHy3x0Ve@O!akOChCc;ql9)9zbBvWB{_e<)d;!H+PidZ+TMdF*aRt^3^Ub^#fQ1 zm@sx60DHFr`BuK&wl#3#<03xs$?6G5vaIM73xrZc(_j9? zCtbN!i;YmQb@dEM7f0!pS5Q4s+_0_fpqxtXx~T?F@{tf3W|3c+S$*-(6YpT^YA6Eu z&D?}{pVlN|nPkK(+o?JSsuHXqa^r&bA*bzdewwU5IPHe;Ag^^fGx@65l_(MG`+^hXW;Y`pGDcbCMS8~5$bR?_NT&_`G!ffF)>*;`&!tocfgXT9!=qB- zv=zeX?K+z=ajSa&!To@hI{ITjwdr=Fntdlo;GZoei+l0r5SPB(-%VRHq;2rruQD#_ zp09%N%&V!MT+MT9VsZ(rs9NzA2TWg~L#q7o&zRiZhG?Jeh8|S|r&1@EQYyD zhyTP~^RU)qi*DiUI`^(hl^PP-$=uDb4%>+bo>?Bmjb`z}Pqs<3<6ked*4@knv`yms z#>7q0HLih;On2^rPr2_I;eZ-0yctQz{L5);Cbm9&oUqe4nBpQZ{_XEy1Fs;*!?6=UQ}fBHO;h2+`di<<>po#eahc1Sc70;~ms3$HHO)UFeX1?%>JlmD8zj!@}?*fq*awx$n<|GLm>m zEi6&B`gMhxEi}ylMfb&Z%6uhcPG>v!_j~eL(s^jHfcF;tY|3wOnrDsP!;e=CYHm(l z{iy&NSSj=?+0eyvUm8Q+6HVgUG+t^%fEOfH*+S;njCdt<&qtXGJF5 z=O+}apqlf|EHKuvQ|t=2L#U|E2yZ9gi-%AEF<}}|f+Q&iIbHn8y0yZBm3LB+{zR@E z%LunWP7v>GM}91s^$Qqp9e!E8e;zJ6 zTa>ETSW@$&^&ZJ?M3&dLi?P=CZIJsft}ylJht=tyVa6mM*Q-A#s_n13TNBX7KEvcb)|oYpPpe9?uB?B&lLEw9IU;6mItQZF<>j71HN03w0UpaSw>^IOKbTmfC59|rD ziIk$%Sfmjr}g9`AI+iv{=C4Jl&a)FuGHwvr@8FJIln@msM|wd{2H8+ zD{{PJ9<&MI?b_|f6}6@^E$oXc{owmQk!hp-sEI*V$^)BZjQ7Y}<8O?f%>}ou5=->_ z4cAL$hLRVISm}*&7@5O_Xrw8fVv@fu=YNyzzF0Y(ZqhG*C(>NsnsKkw0s(7OwF#k_ z(Yt)XJ{*ZqsHeTK$>`^7W}f!QV6+vYr6ad4=$vhl ze-Rg@f72@M{D?yZc;Ft626Ay#>;ndMGK;JG<{c#&&C@&G=`KqAHS{%5t)S=>-Il8Y zO>M49Y<&!-BaE}x%^-nMYkQmp!4jd|0cdW~+_A#ZG&?*nC4g|u^*2p=7cX+_ZZsKl zW-3FTq$Kd2>&p$<-z|azFsi+<`wlYd--HOaoXY~wcBP4IhOjHzrJN7UQiYE{YHd5e ziFpnG9@O`|NB<`ETs7KP)u>w)155FLhIdnIn4p?X`3E#-XX7}5jZZK(H~Nz8%EzaO zf1dOp2(?qqyijIBuW(bCucM^@#AO^jokg5#F%aIb!%Dat8%npRyl!hUHMsSsE;G9y zNTJ_&H}aggr2lI5PlM;TEAaN~`?$o3&MP@g4{bjY`szbh!|LyY6}W$i&eJAe2KsOTX2qabH{g zKy4J>0A9@p8}vvQ#I!M)G$FWyACbscviVk|d~NuDaQ5DDO>SMyf2#O+?S4UR)V+aRbN=AI^!xo7rsk=SQ%n^Qi+Q3LF<07L z{;LHHP_hSsC6uN?yh6I--u`f5pe_A{OWK4dUlG|KhsP!B7a}zJ7P)1wd4ezqiD|6_ z%P(~;9Pe{|*3j*aZz41fUCk_va}JUfHYJ4O%*IgbdS-`<+}&27isq_e-Q@mtoodK) ztV%_WeV;K{^^&2&-Nh85CYzJVY9oJtW-L+g6Z!rgL7055>KVoYxBag9B-6J9uIL6! zGNWcA#^ky|1#*Tiynwdz%>}N%D_IMDGi0>zaR@b>A9$$(u&i2o9UQ9{kVEb5N9Hpp zXI_zuEt%wqU-!zkY<$ZMPauGWc0SKM!yYKi?$(`xH_hC$*dr`|Xh>S<1HTtYzUu6h z;;OlKdDwC2$)FzJDr{18kpY`zXTp;Q>BD%ua8!=DHppAv4uAi$TDSAuX;tVMi0cPz zD}g=HJ430jKk__SMGE&fy@FK>VnQ@)P%A;!D|g6da9$8WOgHs;mZKqVx& zRd%%*moArauQ+r_rd@%qd1$D;GX5wU?wGBu_H%jtXPtM^{i`xlWcO#i}iTS_tp7NLm$zCgc`Z%r*?Z`%`OHt-c_J9}E8I`$*3)aMi#!rQpN|8KULYnLIgOFvA zjDS2opd&e!5Wj23Ob4l$RPE58hWC~b_eCcY;-4Yb5hA(6=>`jcuivEx{0?2z0pB-4 zWzLdmKV(*0wc_IH^1Rc2Y}RVY>78>}QRgr`YNqseQp_UHD`3Q)uLq^tKQZ?*ZWtgC z5i;y#n=1_-vowAQD-eO*eC6tOiF|5!Q^}gf9?{+ddb`BV36GkVoX&MBNyw~GnBjQY z!s5?vJ2n}PE%_1M#D0`yJ>Ml4A@Iza@bh){P_ssC%IDxNL|O)50eb6lqL+Ol)t9t2 z(a+_j$6@02N;A(zj7-B z?@of9B~`qSdV2n*Jg;asbl_9Ow2SDtbXCxmS)wJ`#J$;pRK0(X0)J+Keshx`*rQB3 zr^W7*a~D{YrBh5Y1A~puvqDLyt^iv$A>jix%jpfj1G<)vr#EierH^$7R$`yV=Ynlz zid^1ywd}_85AoCz3JnOOlD^cUh4Zttw z>3WrmFqM8xW_XMU_jW2{`2F?nB>lNgs$Eb!b^FJcjL9X#GrVF;9YgkNMCKKR%+#=^ zH)Qw7qb`chq5?8&zB>$@qoQB zZMRgIB0?e0D2^<+F~3X~w;A4NF~EXZd9| z(IofxyCBYyFGZX%(k2 zrQ!LbsUu6iw{40!t2~NWc>U{Q8hcD?S)2_WVdfT?rxX(Kn|Bc*&@Ge<}Ed=8xDi3Wt!_4_^ z3PvY&OM_^CmW^AkSFKe(4pgyb&t5lk+0=cM1HN@{m(tlXYR}(5*ACmjyPwp0BNV8g zHP0C`eBm13^8)Jox?39G2~`VbJi2amhxC;cx>^56$xYuv!U%>dt2oXpVm5)9Yvqt# zy1LhC6Qa%3-gQnh7BdiEmE#fdxy(SuDR=&fa=;yW>~IA&?)!7Mq;94`MtzQV8r~QY zYB&dgcqgU_s3B}|e1|wYxeeJdq?)Ez=YVNmflPZvTr$&~T_R3c-Q6CPm&%*dn?~5_ zzrd+4FJ`mVh1o;sUE{yYO)Hi^b>Iz%fR+fWZcO>kN*>PpyhA_ojqrO^I^XTS1Vb?w zwT2FF-!&Q`L1s~BC`m3sIPF`-B*W)L(>%uP9(B!hpL{zS#*Ja3Rf(g$=Gp0S=lDYg zu94E`-OCmVAN#Dln6mK1y(oixJUrF+f!e}o=C07N?NU+1wQ715zVp0OT``|ho!jFI zvi=I0aW`;F8jsi@*-=TKqBzD!VK~KxN$0X2I~D0zIWytdCG0wTpWD^x66#fbrOq24 zu$FV!^`-GF42#03UB!B0yfEpHA%xxSn&NguTc4!2zk9ATF3EkbX9jX>zYmv^?4@&0 zI7zat7#3|0vpo8UM!@>q{M3HORvFaa{J5BYr2H{w?~N5MSBAmgN8~moNx#%(EK#M{ zKHA@9cfK~*H?T)|$<=bs6tZ3qv-cD5$r+~Cln-$2Uo|ss-5eaX(sTG7++Bm5dPU!b z3L+Xhx619~mdVmmOx46D4c>l(gIJ^ZU9D6Tq@&*pJ;V9~vJjaKzB@l{A}hOAa`4cy zJi+;4Km+6W=s{Z7>~o~kT9Rv|cdf(s4W$QZirZ8Bra8ogBkwwE6PnYxgbPIM=qM+b z9IuT;kp8mP%W)Zqv~Cw7|AbijR)wVBSP6Y3LjOp#WNyA zM?Ua694A#hx;&6yD)RBwhkVY5acxzD?{4);E;+)+@wln|+tY}4(~+0~JKu2Ak2r9O zSF_Uu^1w}@u3;phfvynhWHcK~yX_6kd~S8w{dhY;V%#9gGhj-= ze&ZqX$Ja;iqM8{okRjEk!@7%1N zNNd8)?0V`(3y9RB$Z+-<_D<{f)PnEFX7V(&x|%vBGIvT`XDAwb{qvMYeQX)I&6&b5}CQ#dvUfMFjO2+sD7Y7SZv`F{D3SZAAfc$}Cf z$REb7?Z!-rF)I;;4G^xt9+pR9ZzsyD#@7b0=`2U_%)!4QMjek0?jbHX(WB6rwbYyY zD^R5SGy5FY!XUU$ZukdkdAOw7zRZX&?Gn;@pWLuh_9-?OvmdRRn#CrTtJ#Ej?vV9XA&kFQ?~l=f_3khA2mLIXDIv!3+b zC@BKF2fNAcOjVVo-z+)EBz=0B-X=R|cC@sqZmxxio3Em};nhXMW<+p59}+l6_#dkB zj13$)00d0ClKY5mP@~$cXOAC+V4OTg9&uvAZ6O25?{Bq_Z2LhZHW%b#z~SD@udE9? z`OYD+?mxeGRQr;+3~%Y^6~}b&`mFVSiojEsQ)C-=pB%14$OV!#pg3&>qHzBl)DifN4sTRsny|@ZcVI-&+iAk?W?yg z1GekOU-_k#NHEV8WmlEBNt8oV*1rnr>KimnBBX%Yrt`!pNi!OzG&^+NACIc0F2MhM zF}~WlyPSxxFCgUNN!@M#%GA?0Si&K670eLl3Ni}_>7gN245oR>DH*~~Q)AfyFunfR zYW}!pWuQIt8DR-kSV97EU0Yu5KP5B+TKOD+bZF;i8b=y%nEX#{;{U^|#jz>9@!zKO z!vAFZ53eJ90RP)pL>9n~26XG+o7ewS)MzQSav9?zwUvHsmM>|8PFmR9l=G!0(OA9+ zhIC{|KzekcpoXv-FlBFQVfLNUmI|qYdS+Sun&5}9x1ap?wL+-FKpOs06a)% zTQ|lH0j5O2;s=_z{_VEmC~8l&JU4U75`735aMOA>dv6ll@XZ$Fh_S0{MOb_60oMAo zYZ#3xg+^e_06b&3p5Sf{AQ!E210#Z9=xVkAaizTT;lbYR zWmN%=(?6F@6%W_|o;OpLG&uaZ@~<*qQhcm@u~oha38ZuIGk=d9 zAhcjjXkW}8h__bk-%nd{;Cxnz9 z&IHW%r^tPU-Hz?>OU&BiRpzs_-%F_%tasBoxfp43-`>iLX?SGVGqlAJ(^MIt3?**| z(VQXoHM3UFDgKj4xx-Cv zXj32Nn#3NmkO4rZ8TM&X3Q!gG z*S&{@+7RvEQ=3gX8IPN#yc}B?x}2x6gYb2>w4Q}ZJA463c>%IM*|NzDI$m4_Er^rX znqhcya}Z|0l_R1|l8>;v&C-ZE7D*<{_f)AET73Y)Uq&!h9`o|_vCu4n^Jf_CZ?po7 zpZ09|h?z39Om{4@!rL-6%woHe$I0}H)~!Op#}YZlGrWvhm4n^{bjooSUVji$QIu9R z@C6V=8pCUp$N$Z*_&--x|M>mi!aZ&XzcgzuCJ|32o8tIk4A&Ml(dsK326~pr%t8O)A~$i zM0BTvmnTiN;;I|fWv@t|+biPlKG`Ikv5CQ6Oq3!zC)M;GoYjLgy_vO1t$Tmo_bhE; z-yq~xt;2s;1rc#kQuk94ew%ORC=nocQ*(r>DCfFhA*?+Bv9)Xnq4wbqx47yaQ25k( zV#B(Z{`8gC9v0tu185b8m+K!X*X)ohfbpEoAqQLu{Q1Ea+2r=K1+||vx(EFfs&6jo zLA=-S0lB$jZm)pVdaG-oPAnv=pK zWlQ@3q%%E!@+ozd3E8}cvj|%jVpJ5K5EZpi^7UO@e+*FPSfG<@kkt-)Sn$@yNeu}N z4{Njd?+L~0-MOB+W|!5dJENB=#e12BW~S0tTmoVn!*IVQ6`||CU<;R*0hvO*Il=hQ z>Mg{5(R!9c7oLSR)U9kf!uXNuWs1+D*UspX)D_!+tX`*A5AJwWyz7tmVYq^MQq`x|W_znCq9W4*Jrmzj za!}_o_{d-T?*e@(RghWRYEKc64$`T+5ndf~RNxT(^%(}dQwPb$vxykfIN7+*R;UgBAnH?> z_Rkgrjgz8hxgYlk(o{1eSwmsg_P)C;gOtm(neWSW`NGd*5u*v3s&O}45J}%)GkYm$ zX+GE~3Uy~7!oob`445;(yh$A%kTr#IdMWr!`rL%*sJLHK{Tsh44Z$y5d>4mGAp_kV zMe!1NuM*J`(yviK6lqma$r8qrq;0lCg#fAQJyio?CyzjC#{(`A;D@V^QHOGP$FW1K z1MZs6G;c^P+&pwtd>Of=o!9&(fBmDKelYGnAJNuWKoreG&3hJBflYiNK?$^?dAXyi zfg#T_-}1wtt+Ak)mCHX@=qZ5{&_MG9igYkUE@EKd=``=E+dMOQYXT+c@!Pzu0NEfH zKBQ!47ADfyA3wQM(k#hC9m3Pm=9G@Y=~=e-O?mYeBZUqpnoXjH87cDi6LoOBAro-( zMluGTFE6Wu+Gh%ELg%xoNnZ`fWve-S>)X`$H>j_7ASK&B%A& zzAl#F!Xe3=ds4q1L2oA~S7!EzdHsSW$P^?;mO{`&bCo`!x>MSN3t<4msA}2g0Ynrh zQwsO+n#IK76EwHguFnP*TH0>1gXwIBeQ);6wz$#nDcn*g*H?Dg>uH*B{8XFg_h%_t zMpd;1@2e}s7-`De76)yK`NnM=UY`+0O6Ip-Yc4D4tEpQQ>swVcNuDX4o#PJTf{0-> z0TV9Xc(gl|f{Zjzv!rBxj+$iotyDevc%}9PgAGsYFPTqE1!8J zjfc12Q*+>59Qu$e_BY5BziFEU`tS#az=~(jiKQ=zsE15&c%cu^jx%44QxVofx;K{^ zM`b)X8bR)X?Twex?)ldI4FANcq%4tZe9a}*IOc}SL&nF)+YTpL!Xvi%iLtIvSD~21 zApnzZ9O2S&NdVuIZG+5%nLAJPn0bVHS?eXnE7z$eoyNm}uvfB&w?QkC528zv0x#M< zD!qb;QIAl7m7h6a<>yJUtx6b|UMs8PS^StR7T+?vnb!l&{@fowL*U>G52CrNbdDql<6b6hqL=ku`TvZA;Yg=XtH?KwuLV~GbTDTndy8G~a z*7o4k-O;{>WPBCd4>_ILOUAn_)m5&4tQtT!3AY4Los8#XP3#==tBnkd-!{u6T{n98 z5vvw)tbW-jNuv)kPfh%3K0W}#LL*%B?Nm#38peirJ2*hIHAc^LWLpqvHCIPFAu^!& z_f!E6u0=0K9PgW_`;E1P+$gbICC&Gswm&ig^|?|njKL~7HEI}t0`62o-z7(@GcsLl zMUFh_0@ANY4U&6tqNZ3g(uaySGwHNW$4#IBgUUCVxyYSuhhN~$B;T3b8usx0jO#au z4;7k1TI?Rn0c@yeE|N_3yOYjMkmx^6SLYF;e$~fT=lPjOS2$lvzg4eA5HpH&;jH4og8#O}W|sQ7Tfn{f)CVN7{00 zHUMj7zR2vew4594SieC1GeQLWaF3T{m>L#g@`zBwf0DBEL9S-IS1+Zk0^^pYzvDgu zmjB{kYCB{4iF#%>;YEr`k+N66aCaFjp#NaawjGN9V25UkjebqFUa_J7mn}dkgmn@u zDm2hI`Xe=Ek<}?dlwf~`B)?@9Rkns8F5IlJvo8jqR#v7wv#SsuD*R&G%6krd#OS|fg+JQ3P zoA?%rJ#BKTs4!S&prlc~#=$~iztFNAQm4|8FtN;^QZ*$pVdW^2 z=Q0$FbVSFx|M@7&rBE|$^t6h5zoy-oF!9oc;LdtZiBG?Lm7!w-_?WRvzmQsfn{U|O z>&KP%R9WRWP3OWgr~O)65X$SR`kNsP!~}+m*CX`}qXAc)Sx-N8gg+nuOSn$x<-MIgu9QibGJb!^fSjSKF{02OKmXe>W zvz{bsi+*WR4qqDb089w2b>4eRB@Pn}(^(_KM{$s6(|3oHo_IW^WMh{u8o4rjpwLGW zMDFwqnLI;s3S(~MXZ?L9Q6bv*VvQ23ovRc=G+t9Z(vqk)rwH~z)b0E-9Q29c(1z_9 z!fTR12aZ)pkbn$hWrR#%p&wBfs`npw64fun2ku=4l1Mq0u_rlsDRy0f&0r1vf8G z6aHd5nnu>BGV0-R)H0Szr*7Wv-_)lv2;RxhBHtV0q@B^G0bbzv(lXTU*&f@;k#q3V z9N12u@z&AscujX7kTrC6H&9<)VQTmFByKa*TCq^T7L<8o`68isu+VSZGmmK4vN9!} zRd1Bq(k(mtww%*%1z8tb6fg>KgJZOf^+DtJZ6BVNIJM-P1V2Tg@$ke!(X7_Rtoy0p zH?BEWI{kzkpY$FP>Dq*H;k(h>U+;FB&pE&VT6YH>qNG5b;#!%_4X1__5%-IbduL@P zESpNHlaH=pu5MrV)>8+3DId?=52Uk&?}yoe;8od^?y#}t?QifN`SkdrT$}{VUfTiW z0v9ofI;3g0&>D!jDx9ThNW{H%HlxMnmI}u}Q{yL@^l7n(s-bYl5JITPtV8q2jA)4` zifHI?cSk!YEu6Zqm1G32AdiCp%J?9nXlOl2epNbQYFQbjI*Ox;6N@w4L#ila7cWFo zY$Zft%V4LHh64MdhqVtI<6TNxZPo0}#@2ioXj=#!Zh#{T<}pvY7Wmm~;WTlxkteN_y1iWC)P&g;eDcPK~&W0I%YsuU-F+dpI>k|qSP79h! z)o&=J20ARCM#dnv)Lu2rsqK7!ZXf25wfAxY3?`q-jFcHqmN7|yggpspu6}NhVu!y0 zJv2F1OmVg#iJaWRHR7T>-d!un6SGTLaT9H@f4?zohE9`<eb%O4HBfp6QG4Eksc| z=(LhxYysa6GPU6Wa|Mfh;^B&Xz|}$HKt?0=)(|hSd#ZQ-3!AOt$ zMZ#VmqA%0V;|`R%9P?L$i>6*AvSDdS$WB%H7T<9A+Ucg@X%^zs6pypuU(x340fTwy<`x$n7l2ofgOaO zg_DFyME7SK#se!RHPTb!MpstUwF0~g>nM}(2)?a*Fp)zFbct#A@0*H zhir0)7cN_|*E!Au+Y4}6@?+XQlwrxftp0gGBZ}b8;2x5;y^ZM|pLx%1c$T?npboO4 znKB;KVYpt|tmEecw-!;&6aW+?uvuJIAg~WEw{wgdqUwzZV57dI+(i)Ujf+@VuV|wB>LpkJ|UkDzRLrBn;2Cx3;0upjijK(yK1FA0XV#M*0gq3bQ z>2f-dR=o__>O+r5#rrmGBVRkrAajvP$zS#PhCCsfeXGbI{2w2xu{QSmvp zO~R=Y;l31lt%TG=z*^p2WOL4>LQLwJt!~5c-rlq@L&B3_-u0OS0cKI)O{0(> z@Hu!8uMKItp-RgN4Bqhg?*Gqh$8Ac|a%+dlv}184#=8ZNaq-TJ8%j~zmYxw?mj49FbM@@?c#|nz{fwK@&!rmigbsDg!e9tDfDXirsZxgJ-Z7ud=I^+@is3Hxl0eCE?+8 z@2m%%_~mO?#QyTLxbkg6xcY=nnka#V6@u3lW5&dc&=BP-gpzArveVs5n4E z{wp=~Z=n7EysO8$`v2D(>wia_ek>ZtnEZe882Qhl`bVw)@8$+*{D1fT^sn|(x-J{H z{ojXLttcKpLK$lO%7HA)wx0{{j3MW&DQ=XwiZgNx7r#K;_yDW>*3F$&6XI1w&eLXD zrGV)g-47>e-xCU{Z*B4}>uaA9ilkyo*d);saa+@#%~^O<^!j<)e)NRmdDsIyK%jUb z#>yoG0{gBywrjB9R^a~2Tgjdourypt3JZ`BkSlQid*i00Uo#v&Pg_28OsPByINros z5uq=JG4+D+9bGX`bJSq_I#|Wy%7dv{re5;?T$Y@Z;L2q$g@MAq3zC@>E%5{R7JHE4 zI;wa009Y1Fgl(?V6bDez))&K1S5?^s!29e?#0n=ClJ@bOSb}^;p`KDoYaf;6cGi}Q0oU|3H_YW z#zMxGFO!p`Uk8-4uKjNNUs%ZlxJy^J8HTEqeUk_sZ4b88!)6$ZCT^+_MpPR)Jw`!> zf_|9g+;I&us!orm=UzZUp|hwoOhGE({Z>{?vDsVIe*7!bj!4f~sc&8rd`j1H9Jo@O z3+-P#S`7JqQV!E8xD>|B1aCn&kTCO!Ng3yGNk)Zzl&8k_5jBbYIjj0Ymm=6}mt18I zItl$`JqY6ICh=y;5N50sN1GR+0;=&Ww?mg^0K1XOW#@?$c8ZKh%%pcDRXtaf!JA1M zGQI9wI3Q9=?MSZ_>di+B)O0ut?%Y~03zz@`2KLAFb8o>WOQfXD6l7&(P1BzMFwz}x zHBKwxkFBD|Ne%!WfpJw#S3rH*-0S2XH?iW4&mf|-d@&0@+l@O8XovLn>RFj1hXkYj z?BDBuB`j)_`rc7f6E1OqXXhG+S9kp}e!m^}Oe$ZJDZ#hh-i5GNtG;X|OE4Q?Q9hx= ziH>7y8D)Ykiy^&;j=asEL+-`rJd#SE!rptE2so+UeQDflZ4unnxVeRpi`hvQrkc~v z%vM-Z;G2s{*-i)*1!q!xe0*|9#|)E@TQw3dzEsy1cvawPN7IZbdEB{i1q|QzHEZ4D zh798<3@#wu(_C(O89Tp>H;yWbJH8jA#ghe>C2txtqf&2_0SE*BVio!3MQ$Fkc0_46 zB5htTDi!XypBSOFWj2l<*}133m>~F+!r7xV(O9W}&}8%C{bg$#qM=h78~(~@cP6^o zRSS&gRTL`-oX*K(=Spa@P%&%Gh!lnExuQp|B-7Ry8X%<>me9Jd;yRH+<%zh1Q+6xM zeuM+BX->nFzJGdUgziROs*{zKz-i}7PWw0hBBumoT>sN$67?9D>!YW4%_Sy8U|B7= zkJrjcK+9C1zp6)+4qMgoeo2?5Z3or8&Qto=*e_05@J&yJR-*{0@@+BK;!s8hLC;xF z(Y~BUwYx2`#osv-R{Le8Hqwk&Yci$b+Q?S?Kk_8yZ|kK+_H)9x)L2&Ud2o$B)k$#Q-XNs5dT)EagmkvX1@?djE1U z>OPV=tp4ZGh!IyC>!F~@o}ZXY;wY@6pT!zE_{_~aVyAOxnNIi=QS)*Mt*(C5frywbUkuBUHkN9(LWj@1t8l6$9;HS_|};y)AQ$9MIb*o1JOm( zgw`9;wZ7UqE#X|LU4PF@GL;r{(I8D|e@uR`Ahw+*>d~3WMByUyHm;qd zs-#sy{2-a7;Yby}J`xt0_r;>DxH-=-qLJlc@BoXR{=P%QPjbG>*I)6{H{v-Kiu!*| zl`o`A^|V*nY3bj5<*iXl-DXy7y+hk2ARB~b|4V7sOfSC=ldTJ-?ffYb%=KQlzh)h6 zw(H+~aV&a5_8YQm45%1A4cAWoJ^kUj5E=-#wV8NSrzhyrR}j5#wPi7R4LP^f_W16l z#B0r^W2}W?R-k30oeU0)s8^2Z<5M+HV)E1z702yiyrg4I96-pWqWav#v(bbEY+EQnZApr$SBa^&Cs@-I?wgq=P;CuMbEPF^eZ(q@}M=jDxNFMGq- zYlH-j)Yu9Dpbsf{zxwJsQOG-hc1@*PgEE%^=cffm7tqn|$2;9oFQh)+m>e)*cRH+W zC|KJ2y}7sc?bnREv=GtW|HVX1I)g!$d>OpeNrDf3_oPJ+u zzjb)L10vsbfd%4Dw}id`mDz6QnsGl;;N8`Jx8%3pb%z3jHn%VsiH&@qg$S%z!~u={%5)*cP+A+q1U^R0T; zk72pfFQQ;-T$Dg>gu1qrujvb&#cXm8Ad>2eGc{eg9YBkBh3AN>$B)qCNHU8svP2Qd zZ;9^O3!`WqR@MY$dmo(*e=_@Ga^OKbXEwF%^#@hNNajSb!(MDw7lM-BfV?KSo{;p5 zf4D^jVZWpC*bb$H+xDf=1*nfBNO1tXIkAkvSzRWyt40QBHbcVvCdHyw>S# zA=OHXu=f3SqRMDZMSM~}ZO=Nagj5_oQZgS^yQ;xx1rif-H(%PjA5<)d6)rCJRHq2n zkam%)ck}dm?|~-XPD!SvL{cQiR1aAY(J8Wv;q!DMw0w_F8G~%?(c) zOnvpR?Ea_G=LKn73w?B@$V9ElNj;^MUlI546d!_p@5J8sKrbf_#zDm|=B3b*OgFpx zG3GDQE%#zO<(tf%Keg8(y~4eu$&kfb1?%^p^#ZU{6;95>zYI!8d~U2gEvX(RL?cwL zK3*B9Ya0@`KAGj<2^xa%Pp)74D?{ryX+uid_^skWC19W~0EEf~gmHfy5%Q`-y>g*X z^r_rY;G3n~aQ)vR8^1TVm6O;_@eQY?+1n5<;|~(zF&BePannv9YfG|ff5kJ5kZ;-q zPCR0V+I!N^Jz#MPdVX7kFXl2OU@^6K(YUjP)F)`x(4IOL1#jvHHadc{liKgc7=Qd9 zLuhxs-}0AURsOuJY5_%O)+ZPdSJHXGyV<`mS>s?s;S_7EMD{mq>nIiM!1%`O zwr#d_?Z|O^N=uU4l{``hlCxN7WxArZ8}F!vtR=Zwd$C*o6Wn1TM8X; zm0q+va7n>q6U#BsZzev2+C{9122&^)TB>?(uo1pHQe%P?H@0sG8pv}>+a1{_XH6lR z-oa%fPtjKA1xmItNl)7&K7vuUMw@HpH({0OG_Z&=L;B=W3@%V?w-?g&dnHu9g;=k8 zj&m-_aj7TNfr(SoGRorA<;~lQdhZxg)J|C!$ag*Otk&|_^{7n0ngMc3<3qN)!HEg7 zje253N&J&{RgX-48Jdi--r+SgiyS~1YH+!J#v!=u>Glg^h)lgHEFoxg}E56^kq8TToBB8vIfJR=`4Q&9BwnTz6;_ONBMlxS0u{~ zal!y8s;Q}&M1(+3(IsUAxA?! zl*^H`H50Py&B||?c8Q);fpLN6Qiq|h+vzA%t5MJJD5y77;|)uE?ZM%{UGG?A`Y(LZ z;wtkxj6J^7Z4H==uZh%fLim#n`o}@yHVL~gLUQ)>h8y}*1i5nu8$s|Nd|LzNg1qe? z9gO9a3B2N{_OzVNVS6noj3+OO{$rG@&Z3%CTPq&s!Cj1A%{(o6eN*W@wHkCuPI`B` zLMv-NXZd34E5Tps``i&b?D7*|IPPccrfh^S1DMnV>NolH=$+Tp;JgFYmrqj9Q&=09 zflG-m&Ve0>I7hpd2UBl;=$;*vpp!%Mkhgj->Rq5zDG*n}Xs<`KtG1MgT#0HYgCt93 zVu`Q!x8)=<7JQ*aaE)d$0Av?b4F^L_jq@s)MY>I`%_$+DLgw+FRGtDwh zx5oWtz+k#5``6xDHH5T()0(!c(`xol7j9b6aPtPpv3sy#!v`zX$md>PUJ~nQ3gY1; z@N`u^WBvzq)e_CGoM?x&MfWE#-14A-IiGJ35{%NS|0o|!c${qX{%Rry;bK@LCtz~= zpBp;8)mU%I9DFeao4YZrW2oKiQ}JB zP}fQOLOzt$N~Cdt?hhdMAcB*$Xck~=iMA65EmB~4Fhb4mMdT{#V2>_R;LJr9T7{0Ngww$B5LkBl)*uymKnj2`eY}~|G_ZYi zG72~_0e|cqM|C;=0&Z#<8l&@-9?XEp0-!s=(2=Z%a&5S4c3ab{Y@7CIQYcSh@)J)= zMU*)YTxnXX+1C!``}?!-6C5>qh_Y;a3+U4X3W#!^GOzrex^jWxgw7Dz$>A$P z`|(6n|K}4YSZ&euktYC1J(-6rC!5@I9>CS_3?fcnA+uPbiC`U$lPB(UAzU{z8p^9? z*CuC(HwZN$<;SUp5YRD!m}&S7(RTu@c=;5q4|O<_O?{{|My}|g-|Z&&1DXMKDTE)3 zIu@ixSOX6^bAXZ7Pyo?ebE0@^#7i3&Nq=z7xJ3if87lr3(8`0Ww)%~>M9mF)T6_8s zt!<|+%0^_~98ypq)Yx3)zeF3=EGiZ(9Jg5AyxFR`-$)zOZXgOnEHRq(IJtAF<=O)32^;9As8M@8{`gzEUveu~Hn^Qh3DN{LU}yom2-^N;i4KoQ2srp9w!BAM zs34FzaDTvvPB!sxyQoD^RFmdh_L;^4pg-R!tlu@>DM#dNH%|+C0|HNpE1*7BvKV)8 zYV9K1Y$ma74NyCUjWiKe(?b$lE5C3zlN}Bv+~v&{^4KWaP*ne)lq5=(4+LACS-25D zSrD-#>2J2C9<(v}vc`7UKgy-iqI~0Re)Ww4$;tjv?{6;hXrOoM7)0_6op@c>LBrNn zsx36)+=F~ldVUvcyz@2UP}?CCt1Mo@FG^1F+Ioe*wc2rwz`j=9i%x0I=SdrqIjXfD zkIt6=N-Jm>k#Fm3yO@+rK$@^WID6)%2d9?;O6NJ21u7#s^kB7igs*NA+IEVfI~?Hy zP!3fW#b`hO%#$+V@7X|+@AQ64>v9(L{kp+y5_L;_1xH} zTqv?Aw$W(_q$VeKhZ$+(dPL7-3Fs5x7GiyX6C&5AFUqw>C{e_3{GM}XHoo-Gy4*p? zCQ&SmQxem5n3>5Mm-h@XW$@&m=IBQZYXsE_Z*9$YzLC_`yg~d;P+UMXB-m*<#~>^y z0{@hY9xP@1wi*AwjUIxOX`s=!eYkTiJH=U@84c z$z*#DV13`lLhD>kq#m}UE5hD5ifx=qKocuimEEydPzLV}``#>W5Me#fx*pbOM!*`r zk+pWruJd%&LC2S0UNPiCxL4E?>N%Ow-6RBDXO+U{G)-*IA2~s)6sM`^XM|ll z7lT({=G-H_ow!8Zrr$+GGW1m@z<3gvVOuY3|Js`nCAe^&epK%|32)*5+GgP+EYsI#C9z$92wX~wt?ydg@%+8C(EV*DI^)cu75Z0=ZTK#R?s))4)iAA0 zd#I?Km&FDM2`8jQalG*hma99;@=&;`I~T9>{)|fuA~J8sry&7F{W2AAO=IAYPbb3e z+;|J9Qy}`s1o-~)Dtt$R33(eBuEybF+rbp^_y@jgt)FTj*fN1U+X_trJF)sAgqSDj zWuLQo?O5l;B~c&Gwz>BO4a81v=aWiv3J8){ceND|0$J1Dtq5JHM#`_3@CJJUJ?pz| z*La=-!fk5}x>SM1SV{ROYa#bRV~UYY zG3)DPv@xtC=0=UABg!U!Io`^kn!-I&Br8ouT(2$EElfbQ^Cw03U5u`oBYU+^l{CfL zSg`}GTeJmD`#suspl(;w4wmYg4 zs4^o9A1r+Yq@=2koZ3K5-|i23x=v=%d#lY(W-$x?TO3u$EGL^gwp09TIe+@yPI4Y^ zkm|n=S|E;Ja%pS*`E6bKk4c67OETSa$MI|laSwrg`*OF4y`w*X8ohFzA@TTo^wUXSgzqgX-?5}0R zS^`WA#bp!HdnA~SC9-CUqp-<0(l4w@?UzkM;+)Ku;TCdKB?aI9Tm{xOa=8MMg}Yk) z86SUoto-f1vCb0sthN5sPhq{j8%~ft1>5Z+&t9(MMKX?1sYrwS$YwHUTElsiK12*O zDOS%hGm<7RdFDu1OA&2$*CiE-x0<(GH{wg*8a%jDM3cSWnHzq5M|N7^){dTtOUz0Y zRZYFuKC1>aQL4(Af|rO5s!*Bg5}V#L_4$GJR{`0~pBQ)^G+lPFZfJYeAQjtY407@WY_Eq<6}WZK}0~hNCyD{=|WTlq=|?W zX^}293eq81DAJ{aln6+X7CKU+^d35d0FfF>LMN0EICps7=j{ES?>pDE&))wfcXF>a zYt~w`W@i0n#x^y(va?iwZ~(k6wdbIFAHhr3PC#hUJH4Wif^|6~|DicP3%2x|n;VC0 zhnpO1{6Ey61)v9W*t|TA8=!Q#iWP3}OWoJ&R51-%$AD2cKEmGwMZ^Atkk@+<;nUC0 zB^(kNr$@J5AUnae_ZmXT zWf{b7{Kfj144Q{Q;32&YIu)pWGV2i(i z!MVg^T8>uXuLwhSpvo6nfMWX!kvi`I)mEC`HF!qkiH*oI;uio~7UergK8XWxz>{{S zZdSHFM*dj>r_ZGi7Qp_2xZOcnP!VEHnqQFoXW*f!|9Sj2N?s-z=bfH;+EpC5ao1p+ zmWD63b6)edKly(K2Y?k7Scyrg?6mUWuqLBe76cJTEASEmOUbQd6NQwr(^nI~fCc4A z)Z);7Nj3&*xseHlz~vSIUPAemyoEP9^Erp+5%)&Z#3gaSb#&yM7=b4|o7nRXzp z!H>R!4WyXyFl&_3GHzqtFFM475d03 zk^Z_5)c7n`vOT9>4)bMUZC#`ws&$l5N_3KkZ%}|ZSXCKL9Ri1#8 zhM&I_Sr!7FZdl;``$V^Yc?$ra22n5Uf8@mYzi!e0F8=S|3@$*nyeE@K?&bd@YRLYk z|L$Y*JOEq=aCBr*_aB1ge@Zpb?mzPCe?lIawo~WRKaBwGk_GMmh#4dZA|19v9&(-( z(bUuYEI{e|{Blg-NdUlOz#Zflxlng$j3Z=Vk_`R0wLwd3LsDv5Ov?Y@r+s98cOZ?r;@Gt9kDl~ z5I7o2Ntqr{0e_>uG;M(FfUT%dHIUli=D)Oe`k_D zZ}5tEwI45?4xD(;lnKOKUlV2X8sAR%uhTnQa-rI*2Q}ftu3kWqSAYiJ2~o%s%|sA6 zfd#owFVjTT1KM-9Ok#-1p3gEIEBi3#QP( zDzLpjAi$w3n`7QdtITBX4q;mAyWxKbLn0ch$oC)o$qOancuNHF5eUqcEP}M3E+t0N zf-y9(8Y~!ps=wj?o|&vHuJznI|w@SyPaVJb5Y4wnleM{GB|gB|`DQnLu=$fO=g#GDo=Z zGoBP{d~e-E+zD3nNJ!Mu)sO^Qmz`M0+7TBxH-G}OA2^m(TuGu_2*}Nm1}zQk;oW<< z+7&+~f1jKL8<#T_E)_6#0?WNTAXw}C>fm+Rv0czZEOyeZVRXu(dulJig8ZLK%w=xaUDT4MG=@Rp6jX`a$C{)=A^cA}&CRK1^YkrkB7kc(g1l#)MZ5=`YLI zt*j!diH{2_2sx`R@Sn%F`lNa`lZi&!tgYNiv)$u#AU`0$|G`?;a(!T$fKH|5EfJ|> zUVsQD-v#jIMrli)ymS&*n&UU_o|!j5$GS&SBYJ(_V1KirNE*}EDLEPsetOmJnJOGr z@}gx~M1S_Cu#-|7j}S1>YN%!LNw)S8m(K1sHUFEnrK5VETCw$nI6`bY{_KTSZjMnS zh(CDUw073DtUL7g)#PaW*8E=-^Ueq)U}L)efve-IPg2~QdmZ1RI-btG-uq2AiuO49 z#uIP&(g8gvIm8*%2Y~3X#%1?vNneMq*NpEXc9NCp*OMj_fHD9<)!fi<&6=nf#c&>ZPylbHF~_UKbJ{2{V<5C0<9z0&Cd(_Lks z6w7jlT<8NUVr4duc>6Bm#R{nJc`q05UmsDt?bisgJ&dlUtIgYu(NhxWi_+<<{6#7T zDRr0)HK6rALU$19!qqOf3p4o@us8j|l-i@egt1j`BjozNr)`}j z+-qY1yh@3zQ}ukaZu&ND&P*jR+OjaZNAlmhz^;gtxI%Hh65}`ot+{Xv9DX)3+VWnLI>s!7W-NOJ zvk099Wb8Hew`sEk{^LZMreuF*t6bXK!?kP_q)H8>M9irlM}Dq|&<_)nXf^ys3in2* z|3E!wBC8if2~|#Be(V%o1^vU|cqQ&jdYm7*t2kpWpLnwe`#76luRN%}$DFuaMHz;nc6IiUG=WaBOpjKO_Y3IJbZg|lfe%oWB?tZ93 z)=?j5w~hl4dEW_mn+<|~T(p*iEA!HUq)ZlfwG|y=4Uqb5M>a)H)!%-My4NmHIZCRg zhypIx^3pf8lQK?Hi1PrR*HVu1t}lj7>Rl1@47)=HtIrvVFWSV2YLsaEFAhS=Ds9r% z_Vg#lRZ49bemX{Bs%);9{aZ3gXvc$Bf`;Oluo|eR z4*~VyvNPi0r!9v4X6V`b#yJi3L$d78oDgRkjjw--b&1pKeL|qJKdARnzDcwzTX?HH z;v6^uO;$|T9yjR-x1o6Unz$>AOGJm=Al6im^9oZ{!A=rv;z_d+>l$h+;Lzk3w|BhN z`0>){c4(VU7G4lHcjm(Triu0I!6cUy+!bO?Rd}9Ww_rYr7xWUy!W7||Cy+`K=ekTR z3@C{^OL1cY3VS;)S`9GC)!DAAV}0odjp{ViuKnv7Di@$C@W%-xq=IOUzIXQw!8OW; z#tww$&Qej{zM2lE;4dNXD=;8iWvh0Wo#JzQ;=_U#91+XsQ?TN%bIP&JsCCzsJ_$ua z`32e6Ad46Y3jQF{p$VZ|01*DH#x1Ywi<52VeYmQ9sok~h(+ob^)F?7^L7_8B97OBd z_@}+2f344No`3%rdi{DL$>lOo>xal-nO-&{`QvCD?o{G>CgeU{Ns~^Mstp!9 zZfHfcLnT)z)9;uB$x}SY!0#~uFB&aeBzg%tM9wVBqO|GC1Dpj7-yZB;EI{|^T+$*& zTP~TsCNgnNuS2;uWqD=KzRw&-3L_rYXtI7J9i4TPMIm25VVf|>+uHJL#eMDjVIZfV zl`>re)Sg$$8W$ZJ*#_NQrlwFX1;g5*HScbiAD+`9?BlYwLv3=`OvKs)yby4{`x6Bz0kImpPj}%ZbP~u zfzlkXbWs3%qtJe+R8$usT$MlRFLMLsU1xc>F8w=)^-~Mp=fqBD>}$^&Qn=)FmZpo8 za*s>b(nX5T&IsY_MAMI?Ei1%=g?mC9){Fza5gfRO>v)?}ey>5BMYsLPd(Xj=g}N8* zq3u^U_nsAivshhR3jN=D0;xw1M<~9lLbX7jk`sx4={n{-_CjQoawitYoTAuiT8EfP zr&4hSW!2@`H!@vnYsq~Vedhg50whpGIX3F+MRw_O*gd{uuE~UVv$Tjb+U4iQBnAm_)nAq%H-<#S2 zr1T#%YLxFg2oBMmqQT^i=%jY1yUK$%iW>H{YYHk*)LCjnb}1^xc4d*C8x`o98icQH z`p!2&l6?H%sPNuX7{unWi}dmiBB5>1n)=P}JE;$rk19|+;3pP+nGl8kfluHVKQ1O# zk9d&rBb#(yM6+Ccovkq`vY2gGTEXLw=9!quH)Htqbv2*Y$d~y{1#|}4pf|1Qdx@hz zc)UlPN!&h!p8FfGIv%T}&TEpPC%->YQw-quUj87>QXT_0zqI#kZPUegnXiI6lP?mT zW*z-|PTrU1sRB+F#z^tm8iC};G0Fox7YyzB zMcCW@fH%N5TDWtO$Fe+VuAHKg`V2C z|H&QuU$qnf%i~{>Z-BS;^y9zTC;xH%4LFQaNMEBg^DzT%6~{SLhg*ItEkB72S4}Ej z4+Oz?=(9UC4FMe#9UK-`X*!jDY}r}*vwcsfweRPPQ&7@(3CF{IT!Q&>{nCWa$;^jz zabJ&?gDrY;jH%!-Gz38XF>R4r9I#g=Q)dn{>opVL+8E`r4GDtut39W;QS0q7U|jgn z*U{>_?)T21G=i>1<(7}v-Fv)tirnX(Y;kRv$({c?t`R8hf%jt zH)oBWIWE8|ExY61mQ5I2(W`FMkOar~}Rljv-{w59Ik0M)ZKKPW!x;AsAN$q>Fq|o%Y-keKcf8_B1 z;F481F+$we|MB70*=^wZSTBK;u#8Vf=Q7L=XI^k6;buvr{zm>NVK-;T!3e1x&H6cF zC$zOsM(5%%lRA-xDY5Xcc6r`z0@gk12BKKF`8=@O^WC)oQr_x?IDtjAGz(t&#`dtBytFn`Eedaw$n$>+gZ_x zZr-lKq`lbb>rX+{O0|L-{c#38LC8+Kb?v=U{QL8Pf?Vf;1B(2fU>Ec%TY<%T(K+JP zA))(dgB!Ex)f=Ll^sRz-GFPfp2Ne_h(dAVb7UF&;8s5QVd*kD>>k-}{=Pbn|=)ueu znQbvZi6GM{39j5iXV!K?+%B_L0Z;>^&ZNp;X};&^#x#^o@1Bx>BbjJj+b0F&Cj^A0J6VA zX`8?%yEWXds+cS-l{<82(qsNlw<^JHIPf_B`dT+~$nURL2=P*wWGYH(LfcXEr1}79 zulFe}V$K&sH+yxBxneQ?lY<5U*&^dwkIvkiY%uJrX_mU#H>jO59YEBK*x5Ug^nL(G zf<(W!tE$0PBk}D39MU%XU`JVqo3Y=1dW7_D!0}~U_Vuc{2-lxzPCeAD$EF2JJ4I?< zJvK_T@&K~swJZ#N;x94LMyo_^cCb~;Jfe~o!Bsd^=xX@F>q&3(-RUq#%y*KOYe&eN^b zNDA0gJcU@y^frBp6#I3)5~h#bTwSf zpALZ>x_9@kYsGOnDbMBS$LYfbuvat(hnQcPnP{7>6s|YHI_s-P^pWbvErTXDaOtkq zC7AFrqx3-g_Q5VOjn$tub=2yfyN&LvC%A`om5zo4R|w9;ZfsG7_DOvM--1W_1{gZ- z3%JbnULklidaIpM6fI=DJb;w@=5Q}N%O;I&r>Xn>Y!`?75X~}&treLDLaLIhSxk|K z)C;j>Ld1!c%T1_P1!l;P5uYw|C&-(yvgBXgU$>|;M!}K z@N-x;^U;er&t(mwTlzbmf!)L77yYZoP~VE3&Wh}`xyXT#x;N*kv=Ih#iEr7jgzTQT z*7((mciHB+@lE^OhNf03pW%AY7LJ+N`dDMhiQ2*MsMe2Uf@%wJuCvuPWC^$mfo#fh zHzjh&oyvPOq`tuHMwT*vF$u?!biZF}qYmG`!t^WWXMS$XlmzLz5J=JJ&}!m(3KTV~ zGo~ClG4FX2zI|)u9ZE1&&iF5pbY;kfG26HZtAkv<*_4ri?DJdoZi!Py(lYI_T~Z<- zB)iBk#AvCc5BudJ7`jBBgAr?x-`xk{$;Dgd2pb=bG*8#=0_fqo4+0gQSV>ud?~IB{ zP)x4cxmUMh)Fvib#U-MqTmhS-TXDSf({uEhHl=~r&ZS?$ZH7m$ir4tDTPr~wMt2+( zR=*nQRp~sYSubm#sUQ(@w3ode`5(}pO4E1}gXrhV4JSq1m{fn2b!tU-(B^Xv97xqx z_t*X2(7CK^?VKXl`dx7ygL01>ZedUDKt+(*y1fuPW2xY-g!f^#H@qqvb4(wU+dKl# zttX&wGs0=jgm!E3dx44}q*NA3AJ-p6m%ZW>p2PTn_0LIWt{w{V00hK9Rb`y@za8?@ z8UrATQWh73mPb7A1d*UP2ng@*lo0zvRHX#GDl_(=`IqCRTF+WvnE!#oA^%D8hwZ5U z0BiE8(?0QRJGA!69e!K}qa_$;v>_ z-<_@+W@aJ`Y&dVUT9Ie&gyT}^>>@Njy@j5)d8fK*lY?xCxbKWmwF&c zJm<0l=qhft=!Kq1W*rZxMS1Oo?d{&%7?XRRxrT=kmU2 zFuSuCM%W{&?xWx@%@LaP;=iCDz|aa`ioAQo*6}bB5L9J31KYF)P|Zi<5K9$kuJq)l zhmS3M$>D0ea2m-OvFJOoPIaBQdzwissup~y%GdqyyKFs4JpSj^^H+X5J#Hs1`-Z;nlxDhuSnRFEN4{iKRL8-D+4?G zDJn&J?sIXmlL3Z6EL+S^_W|EFV#vUX;vDHwgnN|Ov}gwuYDet^g14T&UtXEl6yfd> zsTKtRb$ZA*& zW9BQ8p8Z}TO*VHf+zA9Al?slq-Z~QyVTm+ygbv9bbRwr&Vj5m~W&UkpY~#>&aLU4j zkM9ej*IB3Dvwp@LT;rlDRp(45d(xVEU4$q2MzmlV`0AtkW6x=0ycG2amI)5Opn`~l zmd(`?n4?CIB)iG#ne4K1$>7Sc($A}+*^xuGv(r%c77wd^AlLdPEuQaE1)xIkFcQ6d z;4(Q$I<|nY(VB(7snCl%w>weVW+hs^H*b$2%Vo)pwU#7*zih~;<7{zgX@Kf$6uwv^PUnZ zKd1q}o8N?zha+_&SCcZR&CUh6B+Y%0O@rCQTWnj7j z^k0Oqd=OZ|06Yu%{W5h$~4q$Hj@t`KOXzp)DpE%D_Vl6D_B z58GBEq^Zi?8*6U+bqLSB+aLkf<~VztLMbHXMde#S56lkOydTK#&ph;dF1|M4~6BgjV-7 z{qo~IEno9!;gK1ft>%Qwq0n+&F=}h=wpXKCdF$S6WGEa{{lgzKI1#J(j61Cy8WEV> z*_UoA4KL;#@qRtMK zh#g|Npi&+tyO3x*Dl%Z79lC~tUtKHXS5V9+g&QRaSr_dIV411phfuP|2aBcpOzx^xF93VDe#4ZFV|0Qk55)rB1))<*;abWtTbyvYpSF>`oa_Z_q7 z@zZR9rA#Pw6|}iPx~`lX@vTa9c~b|USQJ=e%&Xwil_U3^RPmM|k?M|3(XQGzz3)N3 z#CT@g-$$E&J`(U<6@luGjsWCqcRTc15g|5N#2@Iyg_PF#TH{^aQbwUNfw4w*H+v~w z^dtg~d`CYSYwGU1vs3x?YHd~T#KE2{uF@O=7K|aK9!SZnSM8hJ2d+uJ7`^dOcbZVF zt2Ue7^VSxuB@IfNpzTIS0_C`IiBY6)MtH27Df@!K1VKnHxKzk@!x9GU$mY4@pJo96Lv8&r@IXfy@ufEq)8vjnkeuNIJ_mkQy8OHn;H9 zTp0bw-3fOYX(}HDwOl5{GD5B;56Z1?ajH@)?rufAAK+BY`KOTqZs$q=AML{os z7czmnlJ0`PU#QMVKYxBAebk?XVIuAe-g+ImAx%H@7{G8K+_Dd0{a|PwAiM4m#e_l z<2d}aW_l04mE*P!77(hmpBs^~{;s--t0b?pAT>%0_ zfY&@1l-Ga8usZwrNP^SC`@b$LG#d2>2UNqnk1E+c>fWyAbII#9d`PHFB*a`CX!OMZ zl)fP@AEqRE0OOk-$U&@62>ZD;-q5mLn=8Po)oH>BylKrc5$0)RtiDu#mX(IOr|*Md zQE)O1MYa=yz6n~xFkS)NpaRIU#|f)Wrlu%qGxW$I5$J z4h*5)%0&^Rme~mLU(hu)Ozh|u}=xl{Nf{?Sy5Z- z{RDklWYbv`@xY9uMz0nAcr~SKQ^&sUXw;zn{ym4$B(c44T*3Sup{3YQKzXuHE!lRx zQw;8|ACit<1DJL_+Nd{Jn9VvCY1~n?!UM*&A8JL0bZB^T11-!Mi&j^3 zKmdVP6Blyis|!(_|ALYC0D4zPeuba*ehneC+b7J|*c$; z5XaCeOiK_-6gP+J%De986hd#s+N9bUx~*y%3*Kg2util`R7FEO+ zI=&MTqXTq=-fQ>{$KxafLXL4dpD}@GyYskK4I@q~?OM?cTcXT~&Ek;F>cc*3mmW#z z8^R!&w+F&MgX3gx5OSn56|L+uCw)a~^7;xrkp;-u^35${u+}<@>&e*4bJ7`tkTkIs z$e2mWt2A$us;H@+@cDs1>daTSJoze5GWDU8qBIdTU=?=|KwgW z)~jzHP(v#3_Ca07Sdo84%Hf(Ne(HM%dmw{lAitfs%9G%F5?X$!y4 zr4XFn-_m3ZgFflexW3t8CgJbSeoeHdYCVZA&39jAJY`2T^Ef%{Crz3i<{)`B6+H~H zaKtW$yVB@)FW>R_aM@eY*O^njvxA3mB$+&nN@55+Ly44erLbuT7r zluKW3`3ygGx5;bQQ`?gcqO6D;wemMGyGHQY6v{l4oK;IR7GPQav9@A-&A!<5EXIdxUg60d z+AU69%%h5!XeE(DEB&MJFe81ldX?y$TcTh85ydP-PP7`3Cqiu@ycq+m;+-U zR1z`o5i}?JQ7X5lixq;R_syq)FIEk&);!Ey;)}{W77f})L^%n<7D%RPuPx= zcT`@`JvYLl2+s=3cshUT%%syS2tU5VBs-s}Z4)Rksgs_%NcloE?sjBE3)I>M@$^7* zH0>dsiNi@$;rEcp?vQG0WPoM;Pzu5ze#CKHv_ob{lXEt2m^YnbV32;t+!+Io9f$ak&c-1*JoQ z(mP&b~);MGGok z^oZ@ca;1-T#P6tKWWl2CYf$O62vW%s_ex^+qvAg9T$>iG>EDx%o&dBMnzE&}ZLs3iJ#;6m_#4k%?oC2=R;M|MjvCHo zr@x8Jhr{GNOSiKmiYJP^FBkqAfabfls=2&U%mato5VP+*cD*wZjq)k8r;ZJuc{^HR zI0FHw<&4yw3_iH+{7!Z zH=(zk2mBBLTu;terE$TIsShvSB&yDr38=_z`8{&J<%|7DLq@^x#x8-dp@urRUYrCI}reMnWg*kBbZOJ3AMM5j0pXVlGMZ0pP5a?;PCLfQ*ou^D%n1$)X48ziPTK3^mTp0ZTh zznjUXGu=r1ciy(qqVbsf^ta*tbE0JioApylm>BFlKw^TGvZ5g(LSOi39*z*q*?Uvj zf$HP*!v@F=#)8@>d$R#nCXiV<)n1~FdSuGUThpn5DyvnsL9E?H2G26g#)T)<+>Cq4 zN#(H-o26#+&@??Ngsv23CXb2A|~5Ig%s_Ky6R#aNAtM%itS@Wk{a@y8=C z`ytP}^HjafB5h8xFG>q^|GL)xV=b$fJ$&-ucj5`}ulHU8#<2>3n)RQFdl)Fc6U)^d zdE(7q5+*??a3L-XW0fbIAu0}zGqTvTpkG6mhUf@Xifa2elj&joH22Q(%ID@QNk}P= ze*dd>R;X4!?-S!RML(tX<10k3r)#nXsoa!{O6o8Tb-5RyxU*EJvU$mw7r(t8j||(_7a8Q2Fg(Qk-^VGF#QPrphy% z4mg8dLiZ13*lw%?R;+W?Jns7gdCJE}PGN#y=zU{KI!milm$E{Rk2&2s2IMyVQ5<|H z#rs*aL8QX|vGc;wE;22Db7~M2D&oWcs=w4;xbCK^(caxIQZztO_piI{0%P;5yYAm7 z-KWMcnuAtVRhG&Yt`Y;i6z8k26hzD@x%O?+#ED zuH&}M4|1&Bk7m2+}tT9-<=o(vg<;vSs@cy6zHgCH+PEZ>Y3a+Xb z-TG&7{j9n9?tN$$-~-uWSqe_ z24FgRNW9C@vDEETY(QNPcWCZ7u9om{e>Yd@oapg!#!(B25_nXSsv%U>|SOYuhawcNiaF%+;= z5(%9#xSvuTOXKhfJfA+qp9^Q>cJGfxrxGWgk`y#I&|pB{aamDqd|t+1;rQU}^_l&^ zx&ZE?;Q*MB;^U23Jij?2jMNN;)ER}jr~^B&r}|6H9i`7=K7D%zaa?DC#q2Ep?ejEK z5@YDoFB28o$4vtvs$y#DW?c!=IL5_;XOoXoV`!xrNfJVNZ-V_@xKT4s!fnuTr&(Mf}Yda-C2PwT|@a7^>n6i%L=w4~M~G5v$U6Y61XS zf-DE7-K8JAWScnK1TBr+2_RkfXCm=tGu`w})gDLc=j;2me^q}zU3yp4T4U=r&8Baj zGpEYmODDd%1jYkZD+ny&x&oM^gxkrwFh)q!yLJtS-0CoooR>q!`cq^YE~T)J6+7Av zF=O70rSu)oI}O-Iba;%fH#1{i#b8Zjd0wW|EXubZ{dFPW3b9pwkdClDO#f)|`@OTD zqPD4svgtQmovjbJ1!A_3{N~GemJzhG2hXSt!^O_4_n2JiDE!vMcHGId$TrTtxZDKQ za{>+2ENG7Ru5eTla^wq`ErOO#hGl7+tspPmr16w6;7B)dQe8u3JTH4QHo7iX*Jdc8 z_A?=4WD%+}AkJ{xb8}JY!&|ym;q$?yS#DW=4&FMJ!pdr;NJ5y$_mRG3CcFn^JSEQd z)yCWW52>*KcH?+9*;;yl<3R#=}y@cI3;?l{lbrdS;VZIMZ1=(PBGw zFRK{)#i`~VjC^uel3Z4|XOmhX5oYt@vj_f;65C^vM>FzK-q71d-7J3J;Fqg5n7cJl zIix?a0a}p-b}eG9$=kwYEwC*|cUyUT&?dWQ!2HC4&CY<~g?9&}8e-0ZMSXSZM}dHe7%23#7E7Pr}A3 z*le(7TW z1mh=hV1KH5xm?I=XJ;p;j{Uea>2e!qgSbG89g^M4Ju|(c*Lb7wrt%-PM||yiQ}19( z2Vd6izbeNfeyHp^3%nZFeygE42XnMc$>f#2%TYmTBJX^KiM;&yaC~Wo{C{}$tZ)C3 z(d)rYe9mea*?tgp8C*5rT=glP26iS)M}xfVx(eTut+7%#Zf!mdh?XP7zImb;yPNTh z&ZI8hqQLVtCgDb_7@|_PUGE^}U4gU;E%``%i>Njkaxr0f1YMydA*q-Mx|7e7O2E`= z`iq~sqmY4Yc*fO^yXJ^%(mw?0jsfjeNw$1pZ{@kz_PrK`4qFFmZ^fevcU10>wN{z( zBPx#yzV`T-l;Y)Fz}F*0M_iElUHQG?_mMzO!%}kZiL9!%_@+UXss~op)=Dh&S8Tx3 z$Uc9C?S<2P@Du7It%4RC z9F_ePXTf*PL@rqCM+O?FO;COZeE=3+cfuEHFIZya7eBMEP>7IEjs~wqp?U{p-py%W z{x`li2nF`Ug^E0%WOL=4lb>Rgw3ZCdf1!C+_3n5%DcS)yhxcCrT{}OOMufWkWLCWVaMd^iAk0L;IN^SAl5guRVQz_R1o;VT7Hbv5;+7Yuz{ z8ywyu0^2pHw$5}RHu7_C0P%XrAYdz)Dwsq6hzt>{4bo+doJaz`+}-07q$jKTR{`k;R{Yx(-Tyj|w{~P|a?E zrSpxf6~Ga9P&sx8ACfzD`0Bm3_y<$>U-19_cLN!LsGk48?*0Gw1AoC(bt_r(DTlVo zgOD$OMj58O9$UoX??d_5vAA!shv^x9eGy>}S@oqA5nR+}cAJ~EwJ`@4i=j6Z ze2o1~d;rXmhJS|WJf+0_?34FN2DG4Ce@6@gf)$>L+Mnx{dI}z9Jnr`$9rNwuVI7-d z91ZKvtk8FhENN&sFACPX4PS48ay>MtY|+<^#@lY-YUPS{!u)<+UDNIcVq53nk`)_4 z|UIB zOSlcP^u;BT2I-f^G%!rZ(s_)T0La`^0br>?4fpZm4( zdjDOUAPs^!qf(G7DRIxO&~C|2;T{B|%kv#^>1A>%zcjIPr2z0tShoYoucZHO?DxeP z07rk#Db?(NXuTwD5Dlc4(p>j1Azebz>{yM}pg_h**x2Ug_c>Y%$zGLSB*RKd6|x^Q zetqB6LSF*{5pW|iaSSu>a9mSa#fChG9PKnb1Q2^na*5SSt!Q{L7Z9`DdEPNKt7@#U zZsU+X+Xit4pti!tHFkPK)-RHDF4|l0=7&mKaCtY+zzadQL^IOvQK^}%A3<{y&xpP~ zG4p9_al)kx#{MipvXWfHkp9!_{dy>bfRt*W?Aa~X=!ISy*pVHw=5`8gbo113B=#yG zxe$dwJkr2AIO0jW9?@D0mhFgmVPDga`2;}wJGFDd`pwK}=pl%pNWIcM3xrFq?)duk zA?V5T7$CknX@12gjU^HEL)tg;X=^1It8OQZ%%h#2#;}2uSJF+(_wTC>+1jDCSvZ}F zh53NgR)eE?;eqc*j$UtrK6^^X($mVwWfP2vtI{zaP zG(Bbx4fAe-4bohSUW+(f9^yFu-po3DOD*il;Z;c;mnSJX+A%Go&E4ypN$HheV^$n-?WL73y0V<_ zU;Gq6hZO9RRHc{UZwIJ2YuYzewClMYsIP)KC09H>yg0><>!Sf#hboAogNb;h5_6;$nVop~mjjT{V45n6|=R+$-t$xE026@QLYz6}XdZ!>@8JIHY1YBo*jUlB9jTd5rLqL=YZ<1d856X>9IW-~ht??QBA-jW}Si@iC19Nw#UY2?~|2|?oUg}Mj6+Rf++lw@<^Y2iOeN6XLC zhQS^T>9#(-QeLVzn9tH^kF%Wb;l9^tZyh7;Ve|c=swt4oC6JPbnqls6Ov`b}u2<8^ zyIFmYp&PN5amF>2LAzPY9nJsCJT$C}u zYKR;##ePNA%`jO~L;;D&dD%2xF2y*$!&#xQ&xK+4}jWqtwm3UWIjaRDxsRnCqpix0@&?6rKH$hyfbjDLMNa z6nw=eIp?XSjA#tWS6lJ-0oMcIWVkb%-D;l2DoMLq&J>@9m zh%81J@gl|5EHOMTn9y1UZko!>%d0=v2vr_wbOB*amjiEr_)8B=`iW*bfEq<7|`A4C6aU&gM0@(3EZgVyl8| znMleC1<`cT{x$hJ{3CvBUl{#~zs*v9G5nfHXA3<11TuJtf-Z+PJqRYI5>oEV{9q+= ztG&rJPPlzPu`+Hz+toATq~^NWfi;7-XAW=4j6@-6A}-CWj8JfC_mE!1)o^&D!-<;c z=LnYRzxlkh%}v2aH7TIMVq$)oThPmuCG!4E*$kc#Sr92;S?>p7rThtTLAd
Cyl-9so2~%N*{Bg8wY%isq}dX&BRpa2p^) zj;8em#IUuPWyfGf%PaM?dpCK5%yCy85H*#>jr^Gi^{POZg8Z66XmR+FiY%rwlUac@ zi-WC51opJrz@)PphAjG54S>MJhP%J@!@7oRSrgodzgH8?i05n@oTc+Eoh<3y6E!<# zmlJCy2Mk)>VkzT6^pm+#-h<<@t1u;%DZHj&4zNI`S%;wBRITHu#bV=J^+F2-#A z?QZ8-qFL`9ja!dp>Un4g-GKp(#RHjpZA{(r72_+=AQG>9ipZzOuE{q8a%)x)D5g)X z!XVu_9}y*J1hCBGyWF=a4?5k~x2V0Uz~H>Gb(8gePwnRA^0LjVqb!(J233VSGa=9N zEu09y!yYn##R-p6Rz7ad@Ji&^x$=4gnQ_T^TeTdv-ig-F=PTMSg-s*F z^2zuJT0GWwx!CQ6!MV(GmFgt}yRk8{;_51vjpQQNWk8`hDP&dAgaZ zwhuJAI%S%d7ZNiUT@GNpB;{!gzS`H95+(%fA7l>i))IF@9MZq)ldc zm(Q1{i!H^fzJ<5ulJDk>H%Tqm^WdR1tO)Az5%CxmrR}{BP_u)3`Vj&g#f&*=WS@G! zcnL*-GkXSVnUGh43*aQj>b#NE*6WQ z8NKISI4_WK5e`umF4N*3wDjHHACSj|KYN;wh$fGxL++~2$80>D zZ~-+a|MJw}2hfu_tOAZuP8^rkxVYN9I%KhA%J$&}wL;-5UvEA_eJ0)V`@Isr!VI6Y zD-7p;5byR=o$^-kZXUPt+o#^*YH&41r^n-X(Oe~qC=sDI^7{wLLwWCBQhcYXC_>>$ zgm`nT)w#1uR;C|Aj~j0@db!rNW7oiLsb5wOT>VyH+i&z&GGEEI*{dGcE@Ao31S4HnK7@^!7htE-lPW|TMMARRXaqIFE4EZc(Qvj$BZfpg8!>&8(HtY1)%Q`VK%)Vd z*NyjJpK4PdtjG!>59ei&1H63{(DwD1#V*Qour za-a!M4W=k}H={~8K_?8Jq{p$-$i5`v?Ad$fnPdko^;(%Sk^;Jwd|Z$!7S@M4`|f>q zeqob$DhEH`hp-W6Hc~DLbtbg?f1n-na7@V)8r)cet(QZZPcSlwQhPTr>DBhjs17LK zmVE!$!&8d*w2-FJQ%!1`h2I&XH;Im> z&6qY-tz;OhzU1pJviK#aRl=FN47L+tY`)D~?*`^;YQ&9xJ zl!?t)fX!_g^~DrTxQ8gi2t+(FBh6tdhRY}q3mKx=Ow4h@D0;cRNNB{^c!uVjClKx0 zI^DM?o3;Cd5=LB*sg2lX2&)q-V@(B?B5*S^hS85+Bu(6T*ecXopz7D3jt~1?H$C+J z2|_20eHRZ`4vc*=Zy@=4+<^`gS($_+&#j#kb_cRB3G|k-znJH(;U#H_tL|Ip1x%qc zp`prf=w2a8D&92{Gp27oLL8S=vMBj{p20cUTVlt>_f7P-r!_od=akLu+?e@2L{XF` z0v(mB=fn(@r{XA!sEckm!-+sm25?YAYeLVy zRa{LP?$v{*OrW*rDC~);Yf}ooRYdQAU|*$7Ytu$@SSxp4)ZEL~jXQPGj_mwOtH z*iZ&ziqt|aW{%lxjm&DiZ&P}@fnA_X(w_9Hvyl^~))wJk^eM^i>|7L_URzQw9}h~s z6eXsh*c}O&+KeiqW8b&ZF+Eb>Sb8uOA5`W=7jX?&eWhUl{gLN#pjlDn>5cq}YvV^5 zkyEV9;M37f6IhJz#5u*st8-LkJYhlFrbafJV|^%CoQQYM8D|?Ji^wfv_zF2}$G6I! zOF}9n?+1MsKirFLZ=>Ka%XiKhJFG@AyfRFoOH@<62MP@l&?s3kfL%~(Q7=o{o=$dK zHbtkC#V8^|9Tp6az~J8uavY{F5uJP*4I(ec6jQjhx_2?Ob!?a+>)^ySI7Qzam@ajtIGa!mJMunT{tc)l#Gmf!0P|*VUU? zpD>2jRTvpJCn|`&hcC;B9Z13Z%SSmqPGYxK;^~uzt?X}aRLf>1rLx*kMzvn$?8yaobn?wX-mhw$&?0l2ga_e6v(~wcI!x>o* zCkl;-)+~?09g%vIJ+=MMu}Tv1EwaXLzm`>k@kF!ys|6ghXlUci3S@rvq)boocg=En zEG*hKIZ1Zb^nTSov=4QPrg&4lDyP(O3x#xwAlLi}uRxforw(i{bIPoE)0o`J-1D2! z(&6$A+nn!=z#Z#hYG{r}5-Ok`YTyt;2&cR)gv)|SA)kiB(~aE@Hw~vry`v8S(Ci5W;?wN}@Dffz0MsT}7$k96P!Rc1XlG!w`Z~KLUbr-;_GE_;{>8%Ea@u=#g*KnD};J{S$*tY?~7XK&2vji*z@kBA=Gk(y)Z`9Yc?xG zt7t=KeS@YvSFyc{MJ{(|EY7TwLSnLz<1fMLRWbPCrCuua6!CVOab%A(>nP{`R1Z;g zZyyyaW}>35VS*>bavY`<&a^ctqx_sbo!vGBX}*Q@@gh|uElLQBQBhYhF`h^bVuziI zn~;1EVu|d3p!eCN&eMOooqR+dlExW%Brf=*mj2<(vY%aVslfN%@h9V5Rs!bHJx;AY z&ypFE+JYQ=*3X8kUnJ$R7}~Ix<`i$-BsB`jFt?*>iF(fDFkPP`a&g4|y?Qy+p&j)3 zB*WgQ#Ho(>l@X=xo>^zJrTXn!6kDB zEq>3!9t#z8<$J$4g}6LFSy3=5+BQ1bZtR!F9hs_l+32VVzq$pjx4FyVUu2Uf7S!rp zM9w#Tw48vlE&gEZ1mlm3LzG(`Yg9@2oSN|?7nofJzU=G%3#z}ahV_wUPJUHqDUYuw z`-=-1xkTla*JM(ZRbpd|=0-DIQB4_=apX4Rd-ojkwg;Q^VNcuS4E9;07gL4&VvA9n z#^Jx9E9<4VpgGk3vDEh)+hoQi>{7=~$}i34aOIbmcnxja`eEH)XT<&!zAgh}F7ABf>PxPY)(`w##vIkd2lOmQV;LhK1%zg}hK6=0=S(D)PHlr}64;`Mw(w$giB>=gRB7AxWfh zs^mtzK(>tJH+;>3)Hu!@H5E1Fu(N+{RaqCsWX~$^QG~-z2E8Ot#4PFx0rV@P4Q#>R znqPDaKIPjnYIVbmbk}>VZRxouymG+bsI7sjx|UHucU^}Q!o*Q?M#PTqM5pucH<~WW z*ICa=cY1M2%hfk_&&KMkzrsd#qn}_BTl~=(4Grxqd{6<^di`e%q(VdH9yO8kR1bj0uy@>!)4j^dO+_&JkT;~r_xsYQl!52V&70j+AaXx<X-81*A5@VkB9(4{Vo;+n#Rh2EYw(ykgn|ZTMv@p& zvW||fQ|93};p%mF*!DF$4MRSp?p||%cIA0eQTzBsq9=)_Ww8*Og?T99e+GI3bcczA z%jO&56oDt~G)X>CHQ%SWS6e>;?OhV2#Id*h5B}P1<_+J>yCoike}B-L=J3ViD}^?R z0>Tsl@CeWczwOzW+1~iM9J25Q@Tx8_hH?_cAO-4rEbpOqJidKy_gf|9GO{`Uq^YuF zRBOoHRr1D62zTrebPv$RwD-_j^BUKJ%m?T2J`GLEL=$9`B%J_uG}{6QXc$7SY39%y z$#X-h)`US??M4coS4yv!=%$^H*Aa4TN5MUx9hIn&xa(v@G~>D9UcC0 z(CLiszbkTvm)d3pvo(~ABn9Zw({FLc;(~vX-5jM4+(GfiA(aw=)1~#Gg0>&-E&C)1 zMvKH`G^?Z*x!V-_G5#|pw%f<`sOku7lnxe3sB#o5A*vnn2YQnRjT)gcUxZ8_$`;Ka zNSJZ%{#72h*-+R5YIy_+lTU7V-Cur@g1fbuolGTp4q_J*oH5R=FQ)!Akv6>8ZqQy> zs%_IFqz{;_gsm98Dq4PAEu&eiZ`EM4v;eO(tz^m%Nnc})zts=Ye<{6ltkpOLY0WgHgE@Nm;rGi?0G^QjUC^`$! zPeMLQv8z-QxLQT?IX`Bl3^1LYoIadr746$H{z4;V$aUUL@@%> zNkm{qko5nhu{-G-~X@q)s3 z{JieWvo)pWNrx7|h)32_UqV%~ z=KgF59X(WH7a}JTkENh9 zEQ*%V@0Rh&op5SMP}p)&TllUH1Vj5Av6&^)Wr!G8Yhp!t52fttkYs2dAeOOYdKBZN zjkZXdNw+R&Xdu;Kp#Fr#fEx51HkPc}O+*jarR7nNv2Qu&(y+ZMl6jHv+a1)rV zI)E&77Ryh9v6}hJE|4{2)x#9!K(=Gv{MHpuc(tg%!ZZyp4vvQ?!IOA|EAV-hhOmpn z-M!nSP&xBkKVjZbls&b7*%kq+=^AS>WxQNh~av70a__|aB$J5xK6-!sDnwe1$}QguYES;b6k+4axqvC+@h?7M&bJ_xT*C6eY6#wC zPz;p%pk}*1G}eP1o9XaxNOdgaqbM5}7YuLthjagjEdN6g{=1<3H!}Yu{9EDq|CIi> zDd&GMPDyrP>e#7dUgj3t!Cw3+C`+Z}Tx@G)R6~>A^a9iI6Hk-EV0*B>R9H<3$(l$~ z^Dv{&&W?B?nHolTq_DlbY#39zjodTskjBePR_`OW9tzjhSXq{)O2GY9u>qR-(>Snu z)x~k*OP1-4_MJUMTZZTcLQjLy6UXSkSZ~~mWtK>bs(Wkd&25CW6~w%%RGBDK+s&>` z_VHCu{ASp5%#|#= z3P~XUOh_O?OBdUud?#2-{OSbOM$?AC70)x&+GOOhTPTL<3cL5VkqI~S^aZq2TBU!4 z-(+HHILT6?yv38mRbJww1Mzg z9-a_AtglUw99b+1)B3FHFd7jdjVm~diImME_dobzIp3CSz@)9QLZNccbGQ%%{OMle zg6S>%3_>1D4bak4c+_3+TrE*8%Ug+s*7KEOt3?eV6O4V}8$&3Awt=tNgVvuJ{tSUL zE?za?)G}l3|F#jhc-2FC+~ZziNX0)nypk@cSka z#C7h+knHGDVQ*%um7wIBsr4<0Q!Vc&gB76-f4Yk9cOY%9!CC9c_U1UV9{rLK3+g4f3w44$rnR92ha%aJnj3s5qAbZA z@jhCa4srgq?oK0vo~y72)jN(PHJx=gg^8<;BweFlGZgZ^T5TnLxwXXzj1(7uqOc z3)ZMR$QLkcU8y1np*J&K3Y~GZ%FoYRf5pSx`$x?`(`}2ES`ggUs@YD=2699$I{gJZ zd#%9&Pi-1y?(v&?y3W>da+?tb;t`vMv1+-|S@pF6lTjH$mmdma-HUQ4!XDLKw_%GftrJYxcVu#(Xx*m9+QIxz&L9_-Y@8-skLp}+YQqT&Xxe0w6-FfqWx04CH*gO5{yZZ{ z99^y~j-sxewpn^l^<-Pl;F@2kEY=yL=uJWa9?gBS#EsFGW^wp-9|gxvF3Ofaj$24^ zx*3tF^yC=(^8M}53GBu5vEwbOac{sR#JCCrRvVM5bh|z-e|I)CfY_3NPm?q#X}1p` zn(dnxk;j%&nRf7H4cd6=Jlk>Y3fI$rMgd7d`!E7c!b?3*l;RcK2XnqvPE377fh#nwWekO>8hVo3T28K4SQXgcP4xG)9J(nai+>j7ca=A1>|${8^mLR` zvPb8J76K`15A(7yH6e{G)3Q*lyLpL1Ln0Bl1bpK?LBVi$eTZf)6n~ZD(a4`lwp%+R z$nkFOvO-jwC(T{ym`Bk~(yKc?s&*;YOYg>243ApxC6HETODb#>8f0llKFz0H6?)tn z8%Y%zp=)NWCxFU@8QO%a=C>uk>le^y6tq=aC7#r_lE*afaXAv{T$IpwE3Yn%Trwv= zksB_cD{Vi`N+*=d`$;%6UzMvMhX`Yeo%^0veNYoP($?#K8(P5$`rmgbJpn&RQh2I2 zjjXj6l8vUiQa%b@=S>3H_~eb{G~ck8TvjHTT#$}AcPPH`fbcMKX`s&wsw$s+92cE{ z53^U7N!_Y<&Ws=&?B6UlHupV18h_KR5-aQXdy^PNx%k(wI7{t?%MPnws+a#l`1bn$ z#+&TW&{|V-Ion)~%&^yZVOxypaY+12vxM>x*Ugs0-h%Ed9`PS0BHs>FQDy?9N9z4g zNp)*kKYsDB4L3ZTiSHcF75Y5Dzf12IiT<(6`+()durI=k25-}8c;)`Mwx)eZf}WFL z3|D?j$J760u1W7!LjLs$KLp-+sNUHlPa-6w&-k-7g%TgZKL~|{JKaX{dQHTSSC~M( z<{9Lcq+#iyG|af|suDHg{ezY4iN!A|3{#B^5T)}}IUSWSsX}&lZ)Rs1q4c5Sx1f5e zg3X$YpAmG}MwK(YN?`^>+p9(mtI(;BvsGt~S=<_n6OFZ+`Z_&FK}QaihBju|WMaCF zO$Lc^QgJ*c2LU3fI6;!{>J?bYZN3~6Xq?S-Ki$lDc~GQ1{}ZOXLBvR5aM#UNcBD^l z^UHx~X2#96uFmSKS(Ww~0=G&O!yM5SJpW1rgY$^iOY|Z67Tz75DQvm(FzwCWwD=!k zRxla}L;redf_dUzA=G8JN5(}`t#v!P+VcKv(ec}$@+jThTibHgse<`F7@u4rX z&LK*N%1ra(xwxEym>Db6-8$$Zx#LTWtQ3aKankddcsxB!5sDd!7{Ah58^{kiRiMg zkw4{O^#KG4%~^WclF;hban``7$QZhzq&y{t7qls9n>$kAJ*M@neDAYPTYqzr^o)fp zigS8iIf*wQvkC3RLCrRqamLv|E20jt1-ZYeudwp0Gg_viMOx%__RGEPnP&(2Q+J-b zng7iK9y5*#jjQWyBPS95;#xc>C5zZdw!D6QH~eQ4Up~gzY%mp&@DVmY=nFoMQ}+U5?W; zk7PT`RxrbwD#ju^x~8`$B?nLhw6!DFhYgnXA=|{s6-}{Z#ztif1I}e+$Tr0`VZ@+` zhbIYfd+z*hQi<4`k0ZpkVrB~iBiipkp0z$B!B&TY=JpsZOH0gaD~icv$tK7i_(=4Yzz&!1J*u_x#jeJty$k(oi zonc|^m7Q^BD?Q~sShC!#;q;BvL9-zZQ>c+MHcSkpvQ!G_jrHMv z%BuOSR=m&71d*rR+XjJl{C6c#gW^Waqt*b4Mgp0la_6(hSP+7uqhtQ zQyJoLNUeBcsM~;|n+=eiCd(XZJv+1_Z}s$;UyA~|uH9yln{?3$PSo6LI_LHb7$(2= zfuM;R{eC`AtDf|Esb{efgD`tnjCQkFOl!cX8CT9q(?9sp6WT+v%RN}UsNZDb%Eam6 z49ujgkuz5rYp$VQ3Nx&1G9+_iNC=4`eW#&KYeSu#QJ{9p)$IhrB|uLI>0L(|hUQ8E1EN^8#4tCK z_5xETS4!JeP+UjQZE+XZ8yK*w8d?wa=rnmcSqqPaNrEFNMpl&<#E)cI@uou?%a~9nfadi>S0bq1b4wc{eXw*s3+wGg&o&s2Smt5*%Bf z%B4`n3hv4lJ%mr_iQAv;G}g#9wBi27H2tG2S&GK_lcxr0QPJ|>G=msG1EL_3sp!(* zyfzT)ZYxlW9()1>(y&isfXU~=EV)@M@ASST+2vHu{f45vi$kz}-f;<&K15~w37bsI zbLr|fZkI!)Er#SWQ*6ntkw01NP!2XkYzeXlJ8EiD**XiAR1(d^w|q|f{V7t&&4y+-vFF0f%{0Z8f>@^p*I6~YdNnX}7p*bu z8S=sSB^k`0>fA8W5pIb^DOU}%R;@W{%3U#F$XpmJ1HrD;Y1UK0kwr90LRIy#+YAV1 zS+<3~oHO;=2*!j*9T5MrZZz^oM;U3Me+}5<@_5hESsC~4LH{PE*HVO)w%xq2S@Y0ZCvd}x#Kz29KkgP~A|*9m4PjLA3G^)^ zYHn^1W|jn)=MJCSYoQ!^xP3EVUa!W%QOP zA`+r8&aJz6hBZm8-c**QECd}RX+*|M$QCRYQL4+~#l?J#wa5DxpU9QikJ*}lNrADs z?zl=bZ68pPEW@|1o+GM>#)?M~{rBmhE38N2Yzy(7M}2Kz;Pe4X|2{px)#U919^;*O z9RtxIJ;@TASiC2Q18kW((%gqQ5_vv^lMlwYK{oNPL;aN=4G|IT)9v0Vj5z@JtkJ#M z+TD9>)S=fvubo4UOd|(0Y_yFI!>1?uFpxPfb2m4yuow`OoZ&z9GzYEz8 z1hCX%c^zwU$r$D=Z%@bMQ%OrdN7;U5_m@v>I)u00R2bHW=;-M)V3njah$#&*L`6R0 z>D!ecN0rm3En+;7_xH*NGoAR&lWn$l5YI z#?CL*wyz~XQJS+_aI+T?Odi$+PF2tVo~Qd?!coA7$o`kO)n*Lr|B?XK`hOy2|1}q8 zU@_rxH*hE?P|(5Tgyzy1C^nl+iMekHcd=5V%hEF>l8Z4Zt zwnfFT_)p$}k9d=M*BM&yi1^NDX`zKGPe7p3pv-BCh@uoZ*!&{ae?N*JL^@qeNOkKB zJy^-y9^BsqpMH}nc*Tq|Btbt(A6?CxdG2LbhP_FN)24g5fL&l~7O_OqVhmY>B3EFO zgW100-0GemzU3*tpKn93oCnPaz^6ok3;e5)gDanA8aWedewzV;Ogo=xJGuhb=ts6r ztmv+1A`wF~z?plEvCF?Kq#T>XuMhp&3sj%Hi#bidYHfOS1z*~)&$FXj_W+;n9jK8m zK3M_1X~t@tckJdrc@5s9c+wI+AwWX@a^)s_EAYUkVZ4~Mm@N$cllrjr;|$(=^us`J zrQ!g!jtq<7GRk&9$p8g*4&`}~6$ZsOoyA}m&rcXS#U|VyxT($H4@BpcmbS&S9fs15 zA$*jg9tz3VeW+qGrrNT)ivSWi7yI)>6<%h6(JuDtIZcGEbj|oAjUJ@gOpvk+faxk`#I}dU%%}XSkv@ zd?!fG#am6&8hr|C!X%Z-roecHTfirR_xO4G4o-ncn&!WiqW_aL{y&K*2Gj4y#_Sf4 zIj`WxnUuCEr2-KIuc6O=2Tunqs^Wy~IG|vovl#=F5A1qtWnr)2x!@4bixQHn!?KS_ zfoOaM|I^LTDG0y>92nIr=A=vNgg9-qFgN3nGMC`hmkk=2;>YqrqQE&v%gB z_!^)+y^^y9SV-&){P^h8_BYI4UIwj#QqMnWPTz>lCni6gH z27n)rJHXgU{Np_gd+MWJZaL%WRFXnHaPbYhF*Wc{w=k%UKnoGX8CjTFz~c&P2K-xh z)Ev?+|MuIxN!|6!s=`tmV-8MQQ$|pMS|~w=8EBq`nR?U<&9Y-!8%``#dS%i%0Wao*O$n|wonN%%=+De&Kr$*{X+q;JoHREIn z39wU-LdG{_DjTl5@K+Y5Mr7Iuii|aykBVKp$~deWvakV5g#;aU5`|O-hv7O%<-7)) zN=C8X~_aqn{DHbiPY5HG7r;Y6lFAv;*3cT zr0sI&*@{!An1S=X2&PscJ>QNQ!hNhCchWlN+Zx4@z8q7UT3Y8MxG~m$52^<5@;-b* z^5>|>k8(W%F2{A|Tk0UI1Tca$HoWpe4o5cF)mUkZnkBd1g$^pjfMYT@x=hg#Cro{L z13mu8kR}O_e{U(BSS?|!HaX%nMM6(l>vNonaqMagTYN_kQH07I&I=IYELFDFcHD)p zd@rfjt9CPhT2T(5``Am^*EH^`;LR}9E=jP`*wu4SK`Z@T3Fg; zPSL~NF(r7pSfg`y@ZifHOg%W$1^;_#{MQ7%3$S-C{_$vbn%^RH-dlUV!zQ0-Fyvz| z{?@_BNz~T7L5WJbOa6xN_zcSTy_5>jw(PF9sulEU#0lnmGe8*d3{^xtK2t1ymyoZ; zk@>t@e`J!YagYz<`4D++4aFs(B#x*a$&AKxD@%W0{5KbUHAaIYbQy{WnTxcohxm23?hbRk_hZ3RKR7s|por<4FW_e!t zTAPhV+V1@R{ndI!zwU{i9Ux+!Ne+-{_Ym7DBIZ$^kLJHjKL8`{dD$<$Ntj+$ zv5YC}upVY7;bV|lU2LtF^=B0#>#dFzvR-WOA!5NAl*i`A4nA!kOlV(3FRf36os>H7J&?gZVf;Za1rWj}2%_Ro-4c`NoMMkBA_ zHwuZ>ZMSA~`Qa6e8(%@`ksW~gML5wgSc&}h(-*(V6R+1Zp4K|FyAsK7<{VA}^o;F| z4xL+YqPD*nds`1N1oZp6=k z{M+J;#?3K*PMr;G2P#_`OD`B5lIyec696$0&rZmlAuNxqy{)g^szHCI!RRD}QYERj z`9A7eQ==9c7%e?}29t~SKH#6nnB_|(IXkqz=;Wk3R~E0_hPpw!Z@>$I4R(_K zeQRM8$EsYdI(N5+q2{>}{4uSJeTXC2k`&YCcCbA~{APuV zv$q?^rV{6HfpM7N2Q(q)Kbq(Uzj(X;?r0TMC08Ui#|i- zVr6v~vyJQYoALh4wHPqvEZJNe zZQER$Rn`gBKwZvZA)!wgL@&bPPbIHOt9e-qv^RMZD$vlLSR)Um z_MUyqWwb-~m0xZFg-FwWcq@f;8KBI5_4}UZGLttd+xvYasbEsD@7wu-N5K4soihQJ zsj=I8ti-DEOVE1zuGuVid%LWU04tj8;q%C93L^}(pRWvi(gGG`12L#|h0@r-{P~yKpXF&e{9ZU0X z68J8_e_#60i?hBn9r^4n`k&Zgud7E z?i%xUa(OS}oCi1dtsu|>rw&4c1olV##nUdm5~e>tVYkZRhug`%G5>1k?B`=dS*@4G zLacJiLtiZwf&q5u4w%=2=~NHy6yrm?u*@^dv4>H-;);0lHu8G#e*kBCux)gz6L+cz zPusJts^|I;FOv%1yp_D(haG4;dOoMFkk_~njPkPb24l!A%MmoI^cP5e3X7)}nE)&i z(Vh+XY|4_n6qr?k_^ zcw#3oH4LFD=OD;DF0TYs^b~-jE0fBZ$}X%dfmFw{d=&6C%^t;>B(~0;Y`>#_zFz^P ze>Q-Y7RYM+JiAt$#DmQMBelquA)~ScbaYMNc}tHN&-UJ;4^{WHgCm$3G8XV_4*_u7 z+kVpqG@zUvIeCO<$1Vj6i0<-6T_uMYwIid+F(5i{(8^-4YcK4%5MLggyhS;&3c zqOu#y9$FPsao+=#q<{tE%@{JyCKpDFdJuO~ah@Tqpd;bn>!TFU09{VWq5T)&!I`1O z#oKAX4b)y2&qs-{noKynVh`UKeFV^-4p%^Xw^55Suk&mlgu1=dR(&YaT?)6$0^&E# z#f0c`#0v$t0B}#{4?xse4g+{XxYu@qaVJx8+2w&nDr%VtD|RfZ{cap!_vE|KCYwyo zk!rf}KrTS=!5E@DJ3?vElR*nV3;l_76b6+XH-frmxZMp+bkWA4x*;{|S9+j3b_`Q- zx$nOmDLFMgX8~}I3yy_HLGoLMs2V|UBx8T}dd|S6rL;m({@c122BUi8KSdAZ?qLI) z>+cK5>%XFG|Eup{)9L|;+5fycs0>l0o9!TV&p-Zz385#y@^EX^X%#{SZbwyZXM#|O zy)2)`Eod~yw`MsC%E2dmUS_^4ui}&S1~9cK^12~aWSbOvp1y}a=PM(_5F5+np3qL> z=t+voOk-nkho01BVG_M+mI<@XYr!tul#$hg{5kIdMh&}V>4#M~C%40LFO}3$w*4!) z1Rz}0mIc0y@mvM0_P?QNKD)o%??t4#FULX;)t-O9eE7JwCfH0Cbv{lk83lvjv?U@qarDXq$hd$`>4d z$;|z_m+wV7*|NBvoZapfwFQ{|CmL|?nB1%K5aWA((UB*>1(C30mp*{w7YJ?J8(PVy z|DGL&+ZZ{lkK#L+h0iNC1TPnAivv+8zWrJFupf?sCqD%06&jT^yyc`B+~T(5!Vez- zfS1XLZP71h8xJ3RGRCv`j`q9px^d};2cW!xZ+|vE#dAx%2t`>v6N~_Zouz(`pEom? zK8yqdb(dj(j!@E^Mt&;yqqjJLXO@(YfW)u8G@d7i=^4W7h;HW2X8{ovb$t85@;N$) zHwahW8h?_p={d?2gWkB0+~fQC7UcXM<1lf`2If0y1oP00OFQ`WquDxQ;_St>{34Mj zyanRN^$)`G->eF}g@pg#U0})i@6gNt0I-3v{CC&Cu3Jxzk8TKLKGobge_6R799hnH NT~qf;_9e?_{|oeqZrK0; literal 0 HcmV?d00001 diff --git a/assets/cache-http-pb-swagger.png b/assets/cache-http-pb-swagger.png new file mode 100644 index 0000000000000000000000000000000000000000..f03b3a5b9ce8990dcd54e19c81186acfd1968a92 GIT binary patch literal 227244 zcmb@tWmH^U(;#}fY1|1KJcQs7f@=sK+}#4f-92eEf+n~_g1fsk9^BpCA-KbBp6C76 z{btRNxpQY&i$1cgcKNQV^G!)X3KQ))8UO&8@1@060N|h7CKLqt8wSs+Apk%Cl;qST z5D*aLvL3&mGFy*l-cVB;r@!63y&pQe6EJ?=n;UU6k@@>~=H6&U{rExN=q)L?_;tzq z+vCml**cy-!FL(#0fx%=G59Q9(9nfCjriAB0f9lgEWf&A_xJbDPSw!%B_j=q2A z)%m6zzp{CLw!Xg3CoJdvyX$MnPiW&kDGjev)s0of{m=HZxs&bNwLujfYp5pvh_!AYCpsb?1xwFg0{eC|`?___&&gqj}XXJg{^SH*js=}O=uheU^ zL$|lL87m#$zQK~Rx?U58H%DvN^|mwPz0paTIjj4(y%8Tu9=hT^uHU_*<~Nk+3(GF7 z$mw1&v4!eF^9k??RNjg4a#xtYy2X1RktLs|KwWNo~#>W>ou1pMX zlW1HYrN^lMqi#l^*mKG+MUcS*^=KE>7UAD(E-zSkIxyRIy6>>M1}Ter!UZEEef$r4;Y zy1lJ3upDr>=+E^l=<8e>i*x(1JK4N{d&cor$3##2cXW7m-*M~W?R5%MesZjqu4zV9 z?=8$j80POemcMy^sQ1B6fbs3Z+UEKC%FX^h`GSf-Ym@$vw@#T+ z)3q!d;&=N8G1Ys+{r#||Bbt}2(;Zb|v6-nAYq$NU=ShOUyzC5;1U28uMlO#R1Uejb zMl?qI9?9rG-W-S-e9@CtVdmGX9l!f(W+3?ItF5imb>+{qv$LIz!8WN6UXc$O=J6z1b+LGO>lGfc7E;>?V+lh4#>`qImCxl(Isl?}I}SuKxI z4s(^QEy!b8`&Ha!7v6S7P1+X!+?OEX54|YgHqbnP?UlP*)L^X6Hyxb{ZjtRI|EAG) zN*q@ZP8;kPz0PtD$KQ)OI`wH;g7L#a1?H6@GEY|rDL()VzfwV;o}mBS17MV=2NbHO z-~Yc2|DQ%Xc%v;V6V2j~HdbLgL0~5Z!q2X`)LHUF zK)#s%+U<+rUBF8pbw-sHSQJm&*6QlGwqLc&bNyQMDjk{t`E7v5PUuH zalFo3tXJ2bAkZJ4s;Rr&X2FO3&T`h>PRoNLsAJ%GtDp;^5h1J}K{`E+zn^vuxptNZ~j*-pm$#11JVs7OEcJBltM6 zA))9a14PKNX&dgO0N?^eNf$t=oe?W!aX?raLH0U+%Y_jN5{=^kZV7pgHZ!3Oy$zacg*)G(9rgY@@{OT6y2!aC#4z@rGos$Q4gmJ}dU;$6F0T2f| zIBtB4WNLDpLEEX>AS7_rorR!q;!sYfi5kii3?e{_H;8O{jiAx53EbkiB3x&@tm&2tL*t_t6aqvS2n41L6KN;a>qlSS-e}knb(4pui+Nt}|u@MV?*vgGf3r^3*^vsG)EmQI_2q zp+z8={Lbft7f@`$^vS~mR!HubfRlSpZsR2B>&$T!M@(6k@iPb@-4Ga zdwT=bg3G1_b%uE8^GAKd@(O7Cxwr*BNc=zjRRCWV=m3gglK8Ta_k~^*J1o}Gq($7w z_dWL`Bhs0fq%=bq`hP*f1TB(>W&s9sV;P61!jz!-u`L>?U$h5@7}r#fDmu=8P-X(M zLEX6T9Xe_Rdt|u!8=dDap_FD2*sEQ;2;rbqQi!u+UQNYL&J`)&LhclE4;22<3t$gj zIA{A<=e2N?z=|<-5FH|hh#jbyAg?E!B&#Z~hU%#Fomqyho&0D&XU%TQG&R?&IrStI z@BJ=t&hrw~lV!5n*~~wa@hxuVPb}89whH6AzR|NPPIae{u~AdqYb)3_1>g`u1@r-; z+icp^tphk_O`3dl#R2Tb0|CkXDQwnKR>_KaJ<*G58{ zM-5SWJp&VQd-=TJ;Rr zc9{08^!+gio|y0}z-q^6w}fIOn`=K53%<(i#j{)(+R` nhP)H$!?wCX0R|WUujM zL@EeIB8oERjJlQ6mdjI}i>&*W^BjssaZyu@`W}Q4DoGwv?s`P)Dw z(*F9290e|DiypwxJNvT6&H>Y?IWDkA<+#e8=@3i<>;cmR?q7jqnnpt#4+k(~Rbt(8 z(5#iW41&icg={SiN^x9xQ(d)Khz}yprHcLd*fT@iHL{?29#w+J zQ^SlOZD$o!y2v~J9Zy{X19Uft_*!}NwDsVA6(xI@?~^YWE|$I_2ozo%bvu5i338j- z2aBDZb80|A5g&zFOdQsOyU`KiThLhoBzH?peVCv_{*VC~3;0 zi?i0)e^V#R0BcMq#6LWIT&=LVGXL&qV=iHSsnvvF5o$2jx@|-J92yxUq@!;aQTAG$8LJ8)=SB z8FIY96+(0)TW?%g`dy{?^*t)!kb}N_ws~T5i14L7Z(NjpS8QC0G>D@4u1-2aIHGl zA5e=%ORBbibCHM-qLN|~Qf`pSbuc(z`|~^NKbnRF)~ELah`l{Gx~#lZHBr}%Aif}n z-B#vHn2d3=_QFlNN`Q6O9PFtVFP>I zq8VA2cN@$d_CF@5OqYtCzg0(ZI4aTUZNF;rPpuwv68k|bke<``p}sa|gqnirtbS7N zIFhEZfF&-&?4gG$mCtHUk4-8RlOszkF0}^VLE?FqO#bm&@~M$iu@M|~e0Td7|C$S5 z#J@G-fB@PuY|)o!A23>4Y)XoKcDP{u3Oz)^*9zkbl)%tO~@5WuiSZnBGDKAD$Axej4an;&1KooO-Cw+E!r6 z3Q@ItysKiR>i)u`cdLszkEQ`}m>>tKnL^}0jPpFlPSU1wo0S$^Hd5nIAHjzS)=@q= zO7ow<9u7uhh2V<1X>$!X12vf%3j!~@JN#WUyO|udK_+M$)&T++BigLD!eviq?4zw0 z_iP#X>LPXWc*)l(BVng(DF3$c$e&Z9%A&-i-OzRhbDEz{D7oCsBv`FZnB)RIKN&-) zVAYqA-Q#`DGrs2=SAn+^C%n zQ@`oYg}l~jA9C{t2)Viag~7!Awl-J&`_>iH?<`Dcwz%Jm?W|aAkL~(ty)A9@dAPgY@B=Z}+kw==b2QQn4XO|997hD(&S2Tv>^eJX zo{<~&??G@!E&s#f0P;nGUkgGtz(`ja{6#&z)EX51LbD+0oX=n8^rN(bnRRZo!3Srr z`AK2Rz-+W$>z<(^`@lqB>T#UX#IvdRm)rYHza-swtTv-`Di)1qYbpY9r>w)MsC~M@ z#EIx}V81h&aTchg&3g@}!mz=*kBY2Ve>!oebd&UT5-aXsm%j`V%Z~d#FuW>bKqiA- z!oB&@qKvlglMpL6OIYz~5)rUm9Reuu5R&`Il?hUjIXGYEdpSA4d>mXoZ_;m4*;92W z{)z)cL^(V>Lrb?`J%b~JY6R1NeO;w}d5OjRmbQ5@bxtoD>zXZsllz7np#Ffm0@`xjKBQw`XLXZEjk}9q~}bPh{E$-*%t4dhyQR1WRP$L8S0&_v)!R z?usTG06TaN!f!MCgs!jN-ZYf(t6I>A)%W9dzAxGk1sP877QTpMr8oK3)&4s^CmZ|Y z=@a{nh^>yP`PJ6V%XykDo*efm-~w~MiVj5K;p9BFi#+N+;Ptn_&)N!^1AltUlIUbT zHYZl2$#>6B$EXu+4zXNgR#F7XDzH0G$*669@kSVH18?R*i_zE=gsvUk-jgwIQ6Fu% z8<($S6G|+bHqNbaQsvUjx+a)!kUqoTVL@G7Q^1E%qXkrqww#=g=SN<8sRALBt6m1e z79A(XC)zs9gpX(r1YX}E;}Fo!eH*5xYF=F_3zT$}#JV7_j+QUWrSpx*v5vEGPVG4w z>ZeOEFmHw{5;D-d;};M%HLb^e+s=w@llX8EPh-hT!|i~wY-}xa>}++Io);dWUqvMU zc58lf<`kiU0rle$#RTf1zsv)U;&WNjJ#5Zi7}<11jufbJT>h2AxYH(?Ajwq-Hy-y3 zQB0FW^)j~f7s8}vv@yuP3MdTCSj>IZuKjS!k-mh~H#CQ65n$x2QxiPrq=8;1#$8At zuS}vD6~A1Uo6#&~mFUNV1)epEbNq_4m^GdyD;bz*0`ulmFr(UYPT9lWjOiGt|#A7iN6#BAw!@aLL3E-$I3L3|gLzGi8OAQ#H2R7-+^BmYo zXg-);xJR9y{W2pE5sN)R@7NGxiG9BD`?OV(1ABo_pGnPqA$ejmKK;i4vdl#p*AjON z%5pdkj9|s5x@Uk3GK_*AR*~}%6bK4W)G&M&aW@ud3u68%KhJTa()RfLSSZ6+V_m6! z*@EM_){AElxP0QlvdepNQ@K!l7QGx?y~-G%30s3{Z?B|HOuhiz{fFt(ZSyxX8z5bP zHbU^%f`Adl_DsO4X=+7;)0ZH@)Re4jk`_ z*~^`pH5-!10(F8+PdwvQvx3|+2PIqQ8jU*=uKU5wKq{CXtI=ZUu7STO`^?u+OHT|) z6JkUD9w(2`IF$e?w`B3Lnl0;ePQpwXFJ@&^U`wO4&`KkJga4_nwgB>X7Lf_JmgQk! z^+In>2T*^;I}@lG$5DX?nqjOpGWF@tlaA!<_%zWp;2@h+S3m$_?j6x@iY?v&U8X3H z-M`3EsejdcoRnq!(aez<3LGkd@E2pT_yb(C0W^US9-g1Z@#UW;s|LYZ^q$o_?y@Ee z;kQMzCS8ezwClr408qOCQKCIeROWAYs_K-`1=2_(6JBVP9+ho$1_QHk zm$+Nkyvw(Xx#SqF*PSTjgE~8bGb%eY+WQk2sOzm`&@S<V70PS<*|UwScoO7 zb(Hg&6G+izND5G&IGRYdHvA&n8>giFfdM+8NkNNE3@p15i11O&#m|^y0c`m$Mydcu zxs7ZZ`NZwRiJ2I1%e>LxuG)FZ9oJIgkJpz3H%Aeb$blvK#(rA^7Rx6vEJBz{$j-sc zd^X_5h%n8|cOBGRyergKGD~~C(eTkwSM3#oILm>mum!J6CEI>FLWA{T}G&Cz# z-E8uY6#$z50jA%&;p^`!Mxbx^K&@HmsXeY|u}^_G{C+9h(Lvh4#wP_Gpk56l1nE5; zO6@p6KE(-;UsgQ!+`DyuuJlv^N@Wt#5KQuUmwu2>{)fmDl#mE*y84+&i5Z2#RQjs} zgZ_bMB5BMxssJ)S%5qdQxHf0Vykm+(g!E5AEE!04qS9;94#k-EDNihLGxK<4EdK>F}1{{xGiIOh2#fk>Pu%m^}N@}TJ z^j-4!dmDg4pZu(vCV;;emV51S5h%>zFa%)3rj|oq+kTD97k6hjcE4~Qj zkEl;IGw*=U8*I=bJ}NKX>X$jI+TTd)WrjhhHYMIvT2Le*NbsX)OfzI$G-Tk9-iknE4jF+vab?S@M{##5IC^28a8O+h&^Mv z_8KDma9PWNSUQ8IaIIFc+ozu8V+X2C6WYUp$wvTQG*HcnlLb0x)~H`9;$PcGOr0E{ zmu)Rf+J}?P+bL%BxX|p=RwC2Xz$)LsWYbNsj>F%FNXY0T;-W?0JI6p$z=-t5#IO+qB^zP?%~%nb-azrPpE^*N1XQ^N z5W2W)*!5O6JlF+82}ablPqob=WI-;kg2X#d$l&of2V`*|XF+s=c_&8{dPu#z<7Zm6 zTO*7s;*H|L|B>MrIf%31ex`?dh;O>*JylUw`hakY8SOUr*|h@R83RI5l3`b?5_Ns^ zkBXnKP2A*$?uuD>xq0FO`@en8-w(;&_?}mHQ*X69w}b;r%rOyj5VHAlu}Z;1FJ7pI zySGd_V{A-A$Afl*mM*N595RJ)u8i?_lGd;GoOx9m?1+{?YhBKNCaDRg7Z!3XfXOZB zuWL(x6#fbiC(gga6`2?tAjyfUx_eIju00udI|*@K8oZ-xlle_>6-R^sG|ySn;tp#y zTUu3shiA&>Yv$HHYDEKCz_nX?)@WaE`=-^ZBqz*#d(bXDcxtR4cU-qb(v;&)Z{-V-z~%Zd?s&yYVZAZUJQmk;ED+Sj4*GXi2(-P`rs}&*}Scn)^Ci^wP!Dp zzngAr#-7Q#GC&zyK6Y{nn~&A1J2N~Xc@BQyO5!$~n=>(^#L><)IR&#k6>rHlL7gwsmlr8*?WatONd%u z$TYsx-#%>3__B8O&nZ`2Ki4b7O;SH$8ut7?K7U3N@wC3YT4Mzj6r?0aF`*`kXqHtp ztC$Hag%B-goVZDHIuo05QnB{p&{Yw}CVZwrYG{oT5R{5|b3nHTGN<0|GM)iy&Eo9x z2NQpfq z&A2Q{3Y+myjuJqp*(G-k-dW09k9N~^`x7$QZ%>cnx+Z_ug`NLVc~3O7YwTR6#osC^ zRKg^)eaT9!TGi6|hF;Ul z+4qP%smISCo0ECy0?InOx0 z@1BF6uE-$50jV7_1Y#Z!yvqxKbdCzQxy|LB^H+37Uj*+zXKfOk?9K_SHh-!rBo;Yz zs|I9npP}(G79eHe4RkqZpDX(R{h7c0#%k-5*!5bCQa!}ktSMeH32q&;UUX@0tn8n# zt)5a7v4-V;mYSaFbT#)Reu9u27uGzmp(hI&rs&nTft8v|*b{`SzB5!GJ8b)9+(KhQBH> z!COx9!EQVbwRpA#N zb9EmuI7p$Xe)4zo%sJe?w4e`xk#bs1oC!={v+HbkzTY~K!U`4s218EWpPS!5?L+W% zQZ3vTD0PMFvmn+l-g~LJ>7l^pXjbxq`LrX?Zjti4X+LnT?YPwGN>+X3m0lBtWv@ZC z@N=v$YtDzCdxKsu8SiXcwamU|=eOE)$=r`^sf*TW!E<~7PsfLZ1j)Ke;bz!0Ei!Xy z6;3_7J3$0$$zMQ2#*ovToj=Xoc{+G0t2>v)VokdOaYw%5T5~2tEZDhDY)P9)$fmSJ zD{csV$R)j)ok&|e2aSf#Ap>2qtlxhd#1#3b`L5>$^ey>6F4%yWtGFS;rBcg=kS=%m z^GUR2y>u={Vs%wO%m5Ub1vI+W;MiN4judjeGSc_jr z$U$s>#=Gxs2xF(H0d8c6(=bkZD33+@CR?9_YndQ_yR>Z$U%=^gn2BPvR2kQ7CShTz z;a1md##Af66X15-EU`7YyR({I<=2#3GGkJ_Vr|Tn72~@kiVM@l1qsgP9d}PYOu6sf zm1KS~t*PZ(7@2Ta(?DYw?^k$1>M0PRg$=PXL&Ui|!on4q5qQ~Gq6p76lz!>)#{{{c z8G2{!6Hi4|AlEEfA)^}YY{M=v9fsy=)LxDgm&R&g_EEHE&06T6<72VzY^sT{sD)0+ zl>0e+qW!fQRRyB|Xd{?qy?4y#Q;{VC;y{?Pc?0%SaswAulmoJx15Tyv%Vfqk{SPB# zUmLr|k4a4lrDcZkyFMO5K%vDKbjIOecxE07k8tUe7(Ku)FdU9XbPw_TT!eI9Ck2sY zgxLr!?C)i;5jZn4S`fF2R`Tln2KSRwL%Sqdqvz=HMej}^@a4SoiEYu1mdF{QZNSoE zg}|L67hzslapAaTZ0rfhJq}k$skd8GLvu}5*69`1s;iVK2woQeo;HCf`SRRf42A)A z&iGdX$8QCCcyRvOvghm_O#YKhL`mlW=Q2uCHU`i@=l(!uPCq`-n;|?!zV=l>l}wUs z7YnUmdW^q%_UDf>SCmw62eQ-}(cyTGWxhrhvDD>kd$Cyc-33{#8~!80Q=}d)$PQA@ zeGn!3ph--oldJvhr3(ag40@xeJlv<*6Ncd@3eJT zjS-0kgiTtdF%kYr0)ZM+AWDozpF}JcDL&r{IG~@Gup95WXZ#M1O(~GN*;=PX2Vqz` z9@CSLV%U!3!{Q0$Ey|!Q&`x8y>5SJRzNSrYNymTP&-`-f#vgW9RC zq+p14tM}PRVGn!z;w4OnI-a0d^I01qixw_SM@{#+I!Kfj8eE7$KIXI<@ASe@R2Mbc zMHPta@Y~fNaQ&(8^vsRWbeiR`^Q`&vygEIavKr!|+f zPUk(FRbDa!boBbflO?_;)uKv;<@Ic2O<=WE7g7*VjG|0F;cqDmq_EL&GZCoSD5Ea_ zEOyh%>U{FD;n_S}LQo~S(jNn^7R{d}IvUe`fSn+*c4EkQKee7d;vyJyT6SGA?oo{F zBBZaj(gt^CTJd4qdQ>>2QvL_%7=>1?bq+nd2zpEwzm-{--Pk3IP>iVFpvmL_z`wNs z#T%K044&{~h?)%g7mf%VSCr+S9_P^{O!#18bWYXpW1>`@zxciYu`+u)%FQ-0#K;MJ%}&R z8ZF%{`b@>B2SuBqNdXl43Qw5^zZz0n3P0A!b&hFP=CZ1Be82{wdt;0{i38r7dP~Mj z_SX|gPsj_W06%b}bci{Uq>nkbWse4VP!Y&vmLpR82R%rCE-l`Z4SLX`{po*!+7gVvaJhr{v?NL4Qp6kyU110 z;(5V+r-J`5mp5oXYx_h2O@7C@wL?V&C0(h=MFGXA6d&c~cfXNk87ifr7;v4Mh#)HR z_Y4;i><;JEhM++*8sg=0^xU%%X`HyC)fV3xrB7EKi(pg zuYSikw6!@ipQ>`;RdasnCl0O>sV2^)178Qj68s5?x%^cwGXbQVsyZVDLjNDKV5US| zJmP%3bbOd-aeB}Ak_IRgT^2L2l0b^E)bpzJbyp-O>?;ejF8Ko%qnILCi-2-{AmGmK zAEE5y?hF~*@^UODKW{2QUhPtWQA6KwXrUMx5GUZk^b8WUWO`F*p;ciIH}iT;mH5n_ zkkWX}O!VK*-$YKQMaRIN&MIkmu=?1s?`^p9PrO#;Qa@QD>3rUpjk>2dWn5nZd>7+F zd~G!4_d~*Tbd{K)0uQboYm0RQdWT7;vU!M*2MzCqTNVKTmS$dM3MnOd(ACr1&pJ2^ ze-4gyy$QVE*!2_Pt;LrPauFJCp&%xRK=_?enW~Elk;%vsX<>l!mGR@E{=sTlQR2nX zdQ(Wf?CO|qp}#*zX&c(SyW_!|Hb_*J%Z2Q~5Q$~kwmYgp=pY(4j>e>i9nbZ_Dhu|L zZ*Y$VL3Jo1*d*~oj9*VUE9%K4zKCZiEz8RQ$gR`I3l6pR9<*xy7Z8B&o<*?_t9UDK)z9qscNTT=L4U4oMulx1z2pqZnOv z!kM$-M;5{Y+be`9p1y*^DnOX=Zt1LQ)Q$i)Ve?{^O&0d?_ul!m1yzhR?JuVK=4f5{ zm+OW6zFdj)*|Imuqs&dsWd`USzemJncr}^pn{_zHJXH%;zq!2g{oTR`CSx_@aU#{O zD0XH^!kNpC`}iZlM)WQ(Lh@F=rSDZV_A5LUAP!GkmYQayR$a7|SYG`1UKwqHUuhvK z%>$cjX?5!B8U=|MvkR#h*;l`0KY}jpptr^6@^Fr<{4#79urttw0}qb-+c8LrUzzs56Xkyg-~S6h{!gb3)R()? zwtcEcL!sJN!f5xPx#}W>EL{bCa43Pl#1WakKXGT~*Gi5Eb$R!(nU|P_%J1LKW}*RQ zKUP-0{a{TXrl9zS8C+}sZ-WY&YTJXPm-zkFOstMfj~fM_C|kl}&=f1^$u@QjA1<1f_=)xQ(HdhElBn5Sx|TUr)|GdmTu)cWpd z9rHHrHH;Vu?p8s8Hr9;fd%`a^G{EB2u+|oEx|9fni~HXR!JQ5ffJ>}TgNfTD zCPY9;yz(_3WkIMu31#CkdYY4ghfBA?)pF_QD4zFohm^LHn2N`j#PHQX_#c@#-nYx@k7lW*E@dbD~KPlLBiRLv{cAKONPx~QJ2 zC#j$fqCiOY0|O#i9S6iAe`1Xl`_swyx;O64S;A)>ca#qXMYj$@5-3kX4w~Np$B&0A z@()%yT?7xh#-S{Kn;co1T5>9UY&1PU4|9F{X@(9$ylVdj-Wy!&4H+K(ZW69KfW5Rl z;^qfD*ZD;Y@ZR}ZM@1}<_BZU8R(+oTtBvOeM~_=7uz13t(_n$(k-5t|(L0NoRe2bW zlCErn3;VX!ml%txU;hyc;ppw!NrS4@hm~5L1kXn&YboA4`03K<3+l4_@kS$jJz~m+ z3P@>T(2FAW#;vL^pEbxRD5{aXkK7nn_tJVLrs#qVHJ^bDtuaB_;kaP9YMRyt=UR6E zqEn=T+J3F2f%3TIH@pL)X4^_5fX&4i2?LdO!{`mkGy!gsPkOA^e>Z-7y}_V z4Bw-G1~726uIK1c&i@T?%d)xtQ{!1l&JWq2LbO|CeJ~EbHE_0Zx_(x-Uaxm@qra-W zIn;g@0O!x?GBCZ^=N}VH6ILE(D4;XCJjAKQt>>xQwHenx8^nML@>xK!5^XsI`~9A` zXz=|erB!XZcM0?7m?vZf1B4&9OH_VH95SL9c_Yl%8mHNq?juQpyeclfYEb z*%n4=#e-s$8SuF9o7-{nhQq|4GO^%sSQ3zW(iPAYDrh31m2p2@w(48mN9aA6O8`aG%Wtc2MK=HKg|d9egkbW@!(uj| z>_b}Pas=6XDn(@^xU?-dJcDI^2#NVX%tH?yG&!ddl$>d}9e4)ol6Wo@J`r1V5SFx@ zG0A>FsgG{7HhAvDF1f`se~VN-XW>xW^k`bF=yBtNq1eN*@aR;5(eH}(4o+4dDe&~k zMk*&+Dpkv^KA5;X_J;pmUt6(U&vt2g0Px(d+OtYhK+G!gY;JVgIA;;RY-3m-Exl}F z!qpx9r+yQI>Csk42|@D2k^J1FxV6iW&Ep6&B&DuF9r#`84_}~`08HOJ>-?>CzsI=D zH+{@393cT(#dNJ?ncB-6ZW`$1UpolS~l9Cwig z+o~y2+r`1Pk&26)7iBmn4K#7H+`#o;{kS>#T=Tf4sDgeC2S(!QS#*ze;-|BxLj|Ie z&kPdsmnpM+d9%r`Ey}9qz?>JSr$7EQR&>sMv@&4E7ylkSRQAfOgg2avkK)IdH~gtk zM|Lh@zR7t@o0GGAj;Hr)scxhe=?vP}t>8F?a+ z11+0!71#W({!kAC5}uYX=3_C9usUJESWUHjF;+@xD;?HVAr0O%Y|23-f$KkIQDQMx z!uyNXg(~wpfU4!58pPy!ce05m)WSwxFXa}$lcXfYmL#BwQ@cp(OL69x4LlkFHOFK# zQ=3<=J*51CDRzgIlSMtmI;Gq7_A$G1GV|qwVES+hMy#NPqp-&K_m;m@a_U+6RTT3dwg6)w!*)z`tt{Q z)2MU_UjzfL)|a!<(n}LpKTe!;*|T$7f5ME=G@HdUaKljgHxg9PlGCA+fMW!#%Js;1@3wH33=`HvZNy1{2+EHp4f4>Po0 z88)kRO|wS>?mn@{QMs2^m_RSC5nM0SCV=S+&A2HqVj5(Kz*>$d>C;|T<+E;mG9gCV zdS^Ug2?;({=%8`Yf~Sad zy99Tjb*Az2v4iKK@e{iVAyI)^ui(=GJfi~1XSV;u?)xyR9 z%8F3;p3O1q{`LR|wo+}Ml?GG?0)|6%+LROt9``Krwr3tSuX*rRR65+2(-WTOgT#-)D?P_#|pFNJ3;Lp$|Ix{6VZZUxl&_6tK6 z8h}@f*z>P=}12mX0<@l9n;^*FFm8_bVk!n?@HkGvE@R_gOA~t-3)eaV_pM%{Pw1vyh z&$$sxRY)p|CsG#n%SmgDf=Rcg@I3m4mc9an!Hg_5FrMnm+|v}&!hV}N&FK_&TZ_7$ z$)*mw4~mdslSz}w9Ai1G$y>B^LUM|k#FY=4%CHYhzQIfDCG1b?AROQ)&sWjzlXR|| z7j=2KS;x~qzN|IF`;~Ne>Pt$DvO^qzn?vOZL6T=CstMBtH zY$+z_AQuKD)4Q-k#8RDv)Xlq0zO7{#QR|R>Lq|BEf^%8_^>@~>9cF0E zZp_NNZoRiO0&BVZTgiyiFQ6-BRa$2%r4H=Xo5c}4+%5WWwUOI!_raCLtqh0xaiS(L{&xT5%Cl=g9OV<@{3($BwooQ*&rPllXkVJAchsZ%?*yabR34)eEr9VyOnfgvCF=`mz ztl^4e{0ZatR4qnHJ{vkfO)q7cN=agv{n7vIbn$FkD#<=sz$KB=ezA7G)_b0RYToig zmu*o6suX6L6dhw!cYo6QjkzD^A17_=@t998(=Ano4iCDoVJp|q?r7oOM{ONgYZ58x zL|Gft$~2DL>npD}DD7|j$JQOOm4tT_eW0bpK6 zI@8XnAEEHo=8cb6l$qd6Ch+ay!g`eytY1=Dqes~m=hK0rH8#(xm{9j0aCr3sUL)1k|Ilpn@B>cgJ%2HjlPl5>gnnufpF*n7yTuE*iD$R+2%r8d>b=BjJ$ z^VO5#8z2;+)-Wk<9df#5i8@3{*97!?$W|D1nfqgkRX98wbeu=Yfx^O|=w@DhnrQ_< zsB0%td-{h(WtBuC6G8Ty5e1;V7~f10GqJ48!nFh!CQTXqgcv5sBwV2D$4bta4LFH?#+xwF*Thgs`at-mRhEWCiK7E?_* z+TP|NSaq%0blSVjv8YfaI9nI}=9OUs)0=p%R1>X|P}-rTGbK%ta8me$p0jH!8uU*vu+s zCQoUO?N$L-WJ}4^nt8~9^qZ+fSD~sMzxgq|-l+JiahdsJ#b|Os20SIl-oVYdV~94Z z|1xLzP5(agvS!b0fj23b!mY~)?i;+Wd??cZon5(Po}Z*7bZStPHq;t^%3RxOJ0A?} z)(GhiEhZS|73O+7DodthBku zQ_pJxugi)F`eEg}LR?AV{I8{wD5H7X>|T+Ehmgs{eqDDW%Nm<`zM%L4)A zN6Fstb$k%Px0{TG(>E`Eo%Q$728+g3gYhiKYJ(o`3Vp$plWi{h;dmp zft-`mUOB!RjvMsu>ue&yJ&d;rJ6-QJ>1v90uj*)^&(ymys=_Fcp<#JdGMz0`@GlZW ztI*Op1gFT<4&DKsDb5$~-ou^s0#AE*iIK_IN!_0#^8?zmI?0~ejicY%2cdg}mo}NZ zr+LHz$wC>HDDbcGVDQsgLeMS|a)H_;Qq$5oImDrzq(5oOC-miJ>dSbW$_?9(ze+ z(_dWf(QZB=ib0A*A1C1ujwIF|`85S8i95NUYxIC;hp%%dgMjC=-zEmxez;(l3>9bZA_I-B+@yR1Lxi}m)|H__n0 z3n&3q&we;89`y+yDGW6Ep`@=2GBvS-Tn0e%Z$8%+<)U3a>o3F{BFe$DjyZrQoKau4 zG`);0dI23w>=zOT*s3J~Z#*1!{TV#xK7tyfgTi@7f~^XBcfP*aM25^eF5#9 z;ild7@E zaGET^O0O7nHl@$B&B#_2m0vgzVzh4A-las!*tgls`L#!i^@J>4pp(e!HGi4TaX*ra z#1ucizuT0{#Se6IN}URx1Ob^U3A)bbj)icR<2Q&CHRgTB_%VKT*b5JK z9J{9f+$lSobC>;64S9HGymq9j=6zQQQE7v!ly!u$A`|bPzux`AotxEVp0x0D(vyB9 zz3r+CZulwOglEe}b51i8#(zraxXvu7&~i0`XT9 zsTXzio5E%+yRYbis|iJpXh{#hxeerbG*$Tv=pCaU&*&v$+@6EoCOs)+tIReCeRSBY z8Ma*7k5h#STK6;eJ}I&z!5PM(0VVy~lrrT^evsb!hfjlsu^naKEyfm>rq1nfy_ibH z!XStfv2_2dqpJXuZS(Gu>l8t#4IOmFLH1T}N`i~Lk`Cn3wW|}Hu;6@6_GvqRhi82D z9q$}}#qiwuk?|##`B#<*zEkr%ye7CIF{(jJzbSXusOjU`Wu{BP6(J#Uah(5wM0Vk3 z9)&ri6Vy4ZP)}BD^8BkNuwar?#I_^Lc=UF9x4zO1%&SUrsi)@BMUFog4!o-JG-pqy z);zFvZz9S8>gVgYgf@t*2a`#3^-_+0Jm)@ow~FCT^8@Ac7#y)~RY0{4bc{6-d{x+B z6Jm}t9D_}*-m}4Ox;1OoJ%{Pu<@hV$N2NH>6af3u@Hb&oTP+z^?h>Q38ob?X*8U9U zvOf_5)OY{TmDv`bS3OFk>F8v?v?eV!G4t)t)<62eh-hJR{NAA`v=&m@zXUHQg6cCt z(YtjCY4wq!ELhzvJ3VF&JnYCrI9-1wkL&TFJKdy2SoR{>2W(oHL`mh$qNQ(A_@(5z zv11OGiCx*OK`<4$?Y%+C^QA*2Hnln9b>;Ei&%qbXlGTn|7(?m}kYPR~K~-&m43eJg zF52WOgl+{LRMJKE?evj*?46iXJR??xP;6$?46eDz1a7{=9 z!3GP2&f(pj_?7-{D9h;pQHeNx^aF?I>8Jmy1rU>LQNU=>BF z_r&za^%2?KkN$P7#|bjdmD69Nj7oE&)S-Xz_4Ypk{|ayVz%(O>&WCS71sx17T4{|{^L9oN+NC5oO< z1O!AyK{|*Or3wnt5kyp~(mN_e>AeL=Y#>OHE*+Jk^xjJp5a~^%6M83fLP&cjeCK`U ze&)@a`Q5qqpCtR7UDn=f?X_3iD-sgT{6`G^i*hts%ilxIWdgr^g#_eHawyiFr{iQ} zPxgWu3P3JNXO~OelyILgV;xW6nR-q!aA#|RiF0&?58(~6AW+xGmVNHegtsK1Y zd4`&-NP5>fK1$!44Y;1gMVlN}L*5e5wCQg!lY^pp-$Y~mMB{CfSPa^qz63a9W}}U| zy{wqn@G$)IQt?}Sah}5&dZAwGth!tYmKvqYP0Z6E&wz6X!4vMgnlmPG>!+vB(seT_ z5ZuP1fO>K#f_})^naw$e(B7K&K!BBp#!Zk#D38@oJqq;bmW4|uxJ?$A(mPuLl#G~a zU+yVwpu2dcdF$xt&tjsq@YLG0JB@XN4rTf*ICU3h$id~;tvp64t9&i+u?eT|1nDk2^Ko=LxC4 zX)@kLELUinDzZ;$*)0QrGUiNp1I#YH;TpCnxU(bqGmc zgD3286R1EgK6%#q(5*P9wau^jE8r032Ti42=@6^a>ilzscgXPJuj-4+s&X5}&Nc0~ z5CbXwfQFd^7-3#0U!Uq zG&dY0M04t8F`~8pWx&#oSL4#g9Q>s&GLQq2N~@)hJN)t_eN%ekTkEXuj1i`S;xvZ*MyIfwh@I!880+Q3^#h}n^1<7`ku2Bof@J-bT~Z@&9eHx3HjF3;UZ zBgl;XgczT}fuDXk7Kp)O^0B_(LJ7MII6jiYWSZ>LrQpX!Z_NsGpU_7utxvLTw$-ZN zxuIGt%bUW)!C=&sd}baV7xR3pf8m}}xh+@S@IY43nA2@1JsQxeCb%+ST>H>=`I$Yr z{LRhSUbn;WGy(fU16FmnTZW!ASx)IbPu`a~ZYMt|;E$H>ubQy^aY>c-zMZ)b3ui$@ zXis6}GI$XvwdV2@@RA4APJW`eCe>K>j@iq{E_Hg>Ud#-q6`oz#502y-Hu}^7 z;ja5re~z<-DJATdpi!M<*FtqfUpOy4b+#*Xa2BqKtfc;-;`D&C%pbASv-zhiiZlOR zt{n&V_kMDngyNW2_jQxUVY!Ra;nZgq42L9g<6MuEvq4VL6a| z30gjZ!t2Gizxv7>H!r$q;TllJG9^S8^-V{udH#-ryvN{9ojEv-F=sMCf8Hbx7S2qL z*q$hcyxb@j(@?R1=TC$Ej!MUlb~cZH>;G0Sv&oBy*&V;_kEq(6`<<0HnO2fytD^UP zZa8#Av0{Po=E(amWpQ(Do({#{zu3%#Mt?TePqZ#Dj(@cCee3uILp%H4U$G;+HP0wC z#L&KjH!N%0mW4nyX$pDshIhenS)rQ=ch2yr#k+IB+Qu)ix89p~T%`luUtjcN75BC- zx;B39JtkMK=DKFso4#VHs1(UtKTkCy@P3?A7d1zN*L9mamp&JLdp{x&Wg+YH^F;~s zp1Lll`0AJ=EwKz;cj;EOY#7^*M)*L^b2Fm~#|z38Yp3NikS89fniEHo;mpwL6Xas6ZfnG5m_3Zo)-x8iyDmsi`TiY*9s;Eu& z?NmH5EE&21k4lfdkFiTP>Q8dLgS_~4)X1?yFFKQ?5sX=oQ7inDW`>M((}d)g{ogNg z!%w3_!7$a+w*Q|ZH8;sO1Nz)gzrY|*mn)`%86{)-0=2y0bMV2@+rRvw^ZcqtSEoR? zDxeIwLL)xD;YNe|@85zt#@>;=5bT<&OMP^Idg+=p2~_YlHR%fh#!N9N-4v5)Pj3jj zR<&`#J&u>_{0lHf1P&%ekX*u}Ei5xrQcl_$em%J1vZx=C{j~W#F4>(!r*QpbU{W^J zrtWYsQYLNVPH}}VlCNd(Ap6MF$HBo3wb39Er~)1bvvN$zMU602Mql``bLMPu8dZ{4 zdi}{usO9*jmn#)Mfx#!Didi5irHm9i@phG>#NE`o$kp}_(UucOVK$0Zztx!Ep2x4+ z(+<{f66AI_=i8;NbT>E~KTSSp9O!*~b}%xtw`F8OL#eTBca>2lxtE7-Yr*I=AIlgI*_euiXOJk)yV^L7tsXZZrL$Pj zU^JAcBq&ftJVA9W>0vXC>)d3_0&9X#w4SPzq_f};nKQlIeI{1g2?;Iu!j958fV+U;8=7w0b*$i&B?VhmIO{aH{(U~$Fscf(x z=)kyBoNrn~E};(k`x~!uH6Ha8Os3T&{y_z=tmM*6<3Ao_q>&F2aI4L(s0+)LL}VuF zM45<9?XS#O*|iPc&kxwjJ{qh`s-M9t1Qm(n0^1id z3KgypU)}LrMzubA(sI)K8C_XI*xCrKlsF};25o@2o6LOMq!#7cEK}9=63W5@NB_3y zW>DHS`87aT9-Y@weZMrkq>1?oV}|*7Us1(2`a*iHwUkqc>k-6OP}?48o8_MCnOg5U zwP$Q+J|LB@i7tkI73RADkUzITzk-l- zP4Bxa%YnZWHLkTyq>%2~sHfqk1ppX>F$;YJz(lKsVTVaG6yggPl}9yf z(^g`=(zDco?wJa$6U*O9nuMusCWSy`OBcC)B`o7oAKL0qmfw>pFg3-5y454CL<~n9 zgKZ^Oh}9)C+sciRq`+M81_g=IX~FnrXhzw=Mr&re)~#VFPR+ES{e+08GNaLq_F^wB zXuz8sXJHN(5TX5=iY-+@xI)IGw4l#C?hLSCsaHTM4?2>(pRaWE*vIrP8+U~cn7*Dby>KEbH@nYrUzAM_P z734|*m=8|Gsr@ApF!2QhSFodzVg%TBb&?O!%F$ZiZ`6O(_-DhACb54-+YZU0l=Cx~ ztBIXaVC)WU9w6Fu&{`)JUZA6teQtMds!{&lxHr$xq$L!#-jgueez)g~*68Zay1Q%W{1dE!<`_dHS-Cns20Gujp7G5pPv) zw8TBFYdJ}0Ee$?aEU1sr0NzE~F4iIW&g~}UjaZ|UVBd$lx1m$@=VfjQhx);;X4GW7 zxkGqj>vZ4YnF)1PY|kD6i7S#1@j)kHy0Yn1Q^<_LBtc_&=jl$FhMXlZa(?Fxy6NQm zPHsc53#};;J*R{kB66Vkj`ZZ~oULF~) zUB!Ks2%MD&i2r!fhtnO!`y4XrxeX=iP1l>$4aV8kFDXi$e71uo-!CCp%^CY?G zL~LupS8@1=*sk|kHV%4~$z+dFIhoR&iA-?#*0~mYu8o!Dca0`4%VbcBQui}T`fqwm z(y@~(O{$2A=@yty;mI+JiAW5OHAs=Q^c1=wRVcZGkA{8bnQWZ$!Sm9giUy8KQK2y6 z39UmzG}1rF?GX;$CK=k5&I@lLXdVyADLp<{BLb1%$A(Hgk}ZkO7*Y&xap3%+`++m~ zOEBgQb8w)QIz-bh*zx|f98X}Gct%AkGRP#-9zXHK{-TMZ&Vax3<592|zpy2-*snKK^V@&@XR4EZp zpmq|)Ji9m2*f|t!X=b4;p47%?1{FWj&pClePz?UOPa_+vyr?fPImPe!n1CHcb_*s= z9^Z>o7?Vgz9|2uzRu}3WWM4kPO4qeI=JjMFTVxItil*KJguSE;y^rm-HVTOHM#HCV zc1K~FF9sC}hfq7Woqe(G3#JWT4{p<7xR8POPa|V}z5{0!PCD$HgZcK^z%Y1DpXGr( z2EyY8MV^APD-%{)Gkk$-(pE_ZY%|e1bvf#GTfx@+)Gg4YQq>V!W9*BExvvBv@qfSJ zlm4hWY*6(R29dq! z)=z99u$1fFvN!W%dtx=|34v3MAGw+x&dxHl#FN*LB5#Q;&TZJOFjmE_hwtq!LOn_z zm$}M#W?t%w))!$_U0oXWHMyztwGr0o8^}hA@r83-g9isW632zf_XmR{y3q?rys|uj zaG0YyG40yZ!v>{Xic+X_JJ?a>YTzgQS5+fupSNlKe&no0)m$2GMjJ>C3)p~ zc+WhgnBj>GW9+jVSk!Ds+$GY>)7P| zMH25dBBC=uF%;A5aUVM;X$9N)FgnA5uhL{+v+|$ym;5@f2!r$lW~86=zU$W0vlB=C zka1_VFAVWYMsIxI#4_gNQG}$?+}!dqrAF8te$Ab)0=Xwi&B3z+CCnA{;>wi|@BXGj!F3-*p!GDG_^F=Ou@TV*B zZz^?ycF-wDc*coPCB1_9(3ca@jBjXDtOhi&Zf-kTRe{6~`>*SBt!!ZxzD-iccKVz_ z;=8%G_po!RnJ4L%%T+&qRP1Rg-n4q8SR+86SjB~RD!u7}d%pXq49vg0PWSTLg zGLAAVRNyO-Sc6rRWS-_toRtHeWzxG2*ADmExF%eAh_@nhZ|bMHXR z^@F4FH4fTa>QzjwXUvZQW}7hCI(5Io?8-Th{o0 ztPT~=l;sqdw0wpw=s!k=b1`&H;%G8;z>dctr%Vj+O#$r8FG$rstX=pHJ_e9Er^+6v zW0FdzMxU||79HIszB-u~J_(kKmPK=;JHdhp5j6NI2?N3gcjEi)H3&X=^y`4ZM14R!jUH!#ht@s-oOU>QAL@ zyE?of0h4{^JaisGnySKFlz`=>?xpaCwm8i{6Pv&BuGcx_Jr@6@F27D{?wCC7M~`CX z?&lYIGqOivy0UtJZCFxf@X=3N+W8fjUbO&jI|pic=jd-HXN>?MOicx-B4Xxau-kQB zoA3098q`?_ElGrjERg)1?DQOPBdueg4%A3K8$^%s;YE zU!!6CZO(TKb>GypsCv?78GA;XeiRmp3tUqw=>3wrLj#fbcqDqfqsBsZ4NPYQiVXtP zS%0K>93Rd(68F_ELoQVx4VOoZ@-5X9`E9moFAoR_b$imzt~>$a9wu*@{Uv03J$B*G z+2grQfg5gUdm6iku|K69g!Xxa2mhd3MT88MuG(SG?nKXq#DT!Aop^g-lu4Q80NQQJ zFF)wOXU_CY@N2jVK;)`>Zi1DQj6bDbIc2~=Dm3gmT*>juQyB~{CW$jv$~Rgd(4Ho1 z3i}&xSiU1><)nQe_xXMdQI+srC5-m;7AxTM5lGp`vXUJOB)6KmjLq@Ae7lXI&C0bU zWO)QGUHi$f^tPou6G%MOfZ!Z_aIq@tEM77{qH5Gf>jJ=Gf+%x1e<25|0!-cm{;32S zPe57YHr38{6gw!}co_Qa>@Q~{3?ahZ*+^_OJ4O?`wBa+ZEGKt`bkG4~VPv051I50p z_+{p=rn7YFX%JGAd(3=l4!rLHOFt=J20Y=j(v_Pq3_IPgoAQubjQ$3DE?>`O<&QrT ziJ9Ry`ZPkQjQChf-2adZutq+|#SJM7Q5?_W4LOugxvg~vP1aCDX7fi)!q7TJY&;JF zyZw;$7aK^Af87n6RDxqPS;O>}dXUp4*#jUopfy=xv8ssW7}2miTCuM0Z~nni)FoxD zzG*p+mdzDtR~osXcdzNQ`vB=Xk}hoDx1w4s=SWq;k*Y4=NNaPYbF04`_l5J&ukah{2hfF{zqBfHqx)@Gu2$u zIrs#>D03t@4`+3)(JcHK8$7XLsv4#>zw_Z8BXVGNt8QOjvQ_$V`E@;}mw4EuR@tDr-+nIcufmaVI_82VA-tEU{wAb*>nRz6(p{&Lv7 z`~C#hfF{!}2tp+qHH9H7Po!O2Z+lNQAw!RoDzklHI!3M}>$R{3!8*_uPNdOx!VBn1ltsD6Q>V7*}RI|sg znqakiR(rm9@GasE2YhY!GjZPUb~_EpCJI&`+5KA5e6l{1xpNbgoSotUuZ!Z{_MlI= z^fQ^RZ4)y%Bt0`6tjkSOCzVMg1P4fD2O(`7GSEc_P;i4E)XLz;c{up-ABz8SyMM2E z9v(;<;s3lNrzxN(-vsQ+iJShkOEzJEg#xrgj#=v={x6MjI_H0f{l8h9|KE!L({}&M zD*b23_|L|Jvz|k|v8^E-{m$SGCG>=XX6GGi%Gu)%`&3utjN}EmOwpkAOsmSxZid~@ zAim{wA;-Ig&q77I%eRuPL}(Cf^?Q^g^zSHB&bCe`He1u2EIDeByM>Sl!>^QVC)27i zM@UixkH_yzyWqJ9Ed&mtji&MZj4}eCDp|O#+Sl#h+`P5#fGqbhW z(%H2fF+r@pPkShcIpa(K_vO3af_;N)bY!Hr`NIatSS7|{#eI}Fc%zA!;&+@;_{()` zrU`57y5+0z|*pUS8qI{TG zvD6^1)eYhU(v~Z<2cn43C%&)6ruIf*G0{tDrB&NYom^ZPlVG;$GRtllfvAX{(bL_rjkfxN$| zBFX9^h#}cCStseEv560q8GxyGp=FDi>Ew z9%_1`z=d$5Fx1MBmb`Rna=su*_ySyedQB3`pOPwI$!LNWVu9~n1r5l*AGWLSLHzVI zDI1_O9|b5Cacno$15c(XgVn$uH#p(iRZWD1+MWfk;q94bKG{-nVESEFix~ONa!hH( zWc-%f32J5ZPc$w0j^SOvC6USttsk4j^+dXbpLe)95GC*r-6;U=@x^*xA`H&KSVP|* z-veyP#fL->j)F1mRZHrCEwFqfFTxbZv=AYwP1YQ{Tm$Z>QUc@~0bR-nM>6RlI*k(P z5xX#kjXBTpIFpWt?ncotV^N?Z9jLVIC0wn^^IJFD4*b4~Qo9BKS)we?&djo6S!ZR% zDyvvFF2bdyG5bwKJ@f2c=R>Qw`n`QC5oY-O)sJ7~IywQ(BpT&I>{Il~+%x3KQ0jH? zJn{aGtGpS1cjPUr((O?mpaXf*5d;+wfJQa{Dcb17zdYQwN*j#JLlZs(>{ZMuiP0mJ znr?pW5BzwsB+z1t&IJSDR+!W2SK8)ZBQcvxl>z(HO7B|(D|~aYOla^9Q?h=eJ% zLAGtK8Q4FV)TlZ~@EOk)nOo9?Sdrqt&TE%VhK=8}#Ib z(|SV9d)ERp$ugHlRNuVsA;rM}7V76x@B>AB@|4#kg1W{)6AN zb072C)NqT^z&97>C^_WC--*29$09z&E8LmOSFbF?)VN?{!2^Q|6g5 zgaF8`{vv!t^b>|hbZrJO#x;y^kY1Q<+c6IOnW|OE^Nn4?Bo6PO8)!P5i@B1o6IhH6 zjAAD}qgEyzG;rV=Ng%lK>>}Ke8IBHgS-L?c?q-e7ICp7$xRDo;`2Ya+u0YINfkq-P zeB8L<%eBWUj{~G_F_PfmkYnyM*48>t)+`YhryrHKL@ULE2F-I|Uo3E{ZCAifE_Rxh z4oGApGdF%PW5)_!Z)JegCPMs5sJ;sV73<@RDm6n5ScK@MI=^b8HOmK51)8AcPZ>}& zhJ4Ei6P$iTG*raqoE!;GPt(Q)g7BV)+g8zwx)5#LcPw3_1Ciw71;1ixAGv0xv|n5o zTTXblXc7WWh#Tl$Lh4jSEdPEnxh(?OhJm2OyYSs7CWs-Ba$J2|r^^DVtt943@>0*MTWqUq^JuB3Au4hY+7J6^?0uwY6Cnj#>@sz9GCwnJxf|d zi1Dvy{P6>ozqUR^ro_P~j%3dwoTRwPcIY6rnm{<1BB=+=LZl2Nj!6~qRSvxWoIlw2 z(Z^L4jt-&_KW|Op)_&m>4P>PeF|TxA6pvs{HQrYKIH5rZLr5ovGGK@r)_mZo31wid z(p~0UhSU68{c}!YPR(k)-|v#`{5Y#zOa=k0o5;l1zjZP!FpAtleGTk!z9_6^o>_~2es_^vtgsR3 z%aq6H89l85oGG@e9H_z`cUC%3I{rL=IxIz&efNm&)mvNHLvsmwYrd`_X+bDH@OX$z z|1}YGZn^qNW;o#F*=#)RTT}+fuc9g!pVsjv(BfkiXp{j!$_)q>ys`FaS%0?66VcKT z&=0B2WxDNtNuYXE*%~Aqoq$K4`o^F2`uNq86|*T=gHSf@y@=VO=9>G<7eDi*)m$eQ^+x`I0fe>4Qd_Gda$Wq^ho?fAwwexCg3?#Rn^@R) zWQ|5m(C$F(36~zGBdOm)_jD33fM6r9Y1-5hCiBeQ^ye*#M@_9-E}Vpuv9b=%CmqbN zpw0Zm0hK<~lh;}?F|B>=FGb=8bCuA22W(cj#j!FgB`;5R*_+dgn<#m= z8=e(+UPNId+sfK>r=K$$Vm##&X%en0i;=4)cW}ai&Gf;9VX!}B^PGI(eE)(pQK67o z@2G%iaedlnf(Ef|M#;9>-T#e*P&P?`d1nV_S)@w$g$sp4E8&Oii7M?WV4E(6`H1aKT-kgby`dH+sL@HvxSG z6kY>5l!8~LeUS>-KKhlprIu#sU(5;1;bDb6(wnf*sFIxganh3T(2yca!qa8+6<71H z&{=WJ-4t+}So5B-p3QFndYEofZQjqMQ|@IzM6kg-SlJk_F0it(DfyC%f3{@>6I(m- zZ0}u=o>z(d@ImQvj-XM3JGpMeM$;Ol^?hxKbbaO9QE2qJ_uGEGI9>C|z!gdw8pLbh zAYj$k%Cvs3y{3t`j#!U5G}GS24DY%CGXXj%e1aali?PKtk4G89X*5OB21`RAwW}6{ zjcn7ab0R}V{J=<#5kz3I{^nKg=`pBwZdOmTI^)dGAs4{ma|~lTF9tYx->oy{7-k!! z$lpBYTT<*p?Y3|xt3j~GwZ)4w__4kh6|sO0p^kHQ0h)!AqA<{#)_xl?hcdLfc#XZt zB8SIB4wmU`5(k>f1f~KF_C*OF_bWoTFTrQLuX@<~GTcN7+Re}JLk%6HL(rN}B zy$(RxSnm_RFdZN#>FHrp`KMzh0XVn55-F1))DpW8rM2kN#Ti-n)tSY{V&5;B*hcAU z0dnygYioBv5uFPo$_#!mT~J*Z-+hCW$t~J*ac8rq&<&Q&;e}s5V;vAgzN4$E+InbF zhraCB>M!3-vXh_%fa&WKKcwc2U9O-|x$vgqIFZ%H%?4K{;cJYw>*r8)9k9>lK=_h< zW7&`1;dwKihP0yS+ZW*7Q9!OXL513;rK$^v*F&rkujRr=wirr6 zn2Q453bDL~9wuPIF4HFewxPqtHKmeKS@CiJc0MkTSKG3sfWQl$px>SVU2)stsMOt$ zbmcwidGI8P*;F-%U+4v88R<=A&=@9o{$&0o@@XzSiC&ox<{52pX104AfmBrdvP!U? zm1VMSHMw1Yx`;;~rDLN(iqDdT`0al(e$TfM-4vE%VX12*LspCF)m2Fqy+h?TTUs9o zbQL_vC2qPPh5I3NC$K?RlM&<&8<K=?(I`*z?@FyZ!gl>j2 zvO12DyhU%8$aT*#ISju^PwyxWw*{1pl2XYM&8h}Uq92B)HYQ0$Ri?k#Fi%@HbQ2OK zU)aX?j!@2Oc4rWHp?#c0=GzT3jbx48|KRfxP|sL~(Qx7F^^0qK=5Xg)NjJULik{tq zJS37=0rFoyO%_2WJ2@-Nx$b3hZaX=1j3fo z$aZQp$alC;WyoV|$4Sv*UQ5|Ct4?%5L)9s~f6W2YM|&S$Ur_F*LAYWe$9Zgt8G&E? zD0W(c6$%Aw_8`Z_MS@?&NaFr^I75_k(IlH{^0Sn`wN2iW>rqC}A;-NnV(Lt1?*r+f z(#^l4fus9r_8RvAWIa1(Z?DQ6P%Kmr5R)YNR3OLJHzz0VhgVi57bqG#Q_hB{p9-GE z3jZd~T?772<|GM}B(4K$4*#!g3BLJ9=KIeooj(=xWFF3CO$z*4MtRr!<+7O6eraI7(2FO1m$>NgJ(_Nc?Ny0ju8P4;j|MNt| z0G3V<4ICe!CpsqkYL6cy2^$KCm!z%F|i@Jlui%yoRWtmG%?( z(#iIkr7e%Y6ylON07R|+_yG#lHq@nE5%)#nS>&%YMMFN(Yxdos?b;M&wg2AIBgdt+ zZM6zoYTq9G_*1!j$(oHz?RJ$6K2Iy}T<*3ba7OC-^mO=83syLs&f7~aQ`1A590bE} zTwL)J&AZBhdG36>L5#9$@{e7tEM*$KlY-}iHOp1QSiz-0;Z6nnT53=i5@RdhZP)zS znINMd8TaRN&og<8xXrTMMk9)`KIX6Lv^pQn@U{wfgLK@tvYO zKtCPQ64W1PY@Z7Igg`*7oTc4ae6UcNcz5K*a3rrl!Oe+3y5p`-F3m1RRrCtFAEDz8 zxB#FX@=3wJJTE~Kou5~=ve>yPOOYk4{Bn=(kz#leDa=9nv2+``fDVBze|fP}x$#ET zpzfNh1UIAG#rAuILD1S99|f`N1;DHsb_IHD^a(`%bc(i%SBa#xO62Fx57-FHe=%!h zV%9>^F01|`)BylgW5~b)h<=dVK$81U?akca4*V}F{!iQeyFvSOfd5eZ9{~2>!1*r^ z@GnegHkitq`~28haP8=M2e|jPL3=(R^}figotj$}4*q7d2gCHZaB`#+!1e?beg zDZfvS>A{tEiU6jj<%AA=I2MgS1eHR_#0xmfMM9sUqxXeKr~)A0@_Q3SjqwqrUGm2b zFueKK1M=Gy+&@`TpE^umE06-;_)9aFakKI0GEHBZ5 zo`0+Hs>S$Y5{K9ANV_(XpFe(hazsCZf851t8B8G#Wu_9WNgz>|_cW}6-ZjmnhO7`p zZJlM`fl}b$2<_v%vmlCz-P+ioO*X3bDl#%v#8tzWvpa7Jt2tuMNe}Ex5ma2+R?Zf`G5s}v zedzjZR4MwvDf1C{;}o8<-JcEboxZ*cOkuTm_mE`eNEVK~>&{Ze1gB9n5th>Yy~lNJX_m()UhKU&IYGlBHziXuWIj##)eg3ab!+? zMoZ^3u1|EB+EM6g0vdBqPr$(bKc$7NtA`C@AH6ho2%{PaWNt&-mA_G6?fW!QDW81% z|H?UxcV=_>KDv{v8M0Ne*Dy-H$3&?-U z1hhHlqc4M&$2CIt@<~4?Q@YK3#|j05Na?TGcNQH6@+ZBuXw^O1lnR(9ivw{=2RYwMfxA>P71x6s5O+?SRJ-sqbiF zCu<-y=xpf?h~EH1`Qk^@#;(joF$x8Pec}+1E3W4d_eS4Lp~(s#0Z|1OmTx$^yfcX^9+p*kBZo;yuY;l*7**6d`O##*iA0crhHP)avfQm0Hm3D=B%1q$TIzOA-uWN5CC zi-V@?X#xuzQ{YqK;5o+D8MogQcY#m3tYJ}dx7n&zDJdzy?>zzZyH-uX_R^GcY`Q!Y z&X(4%0PEr9_DK!4?ZAOLx-jUSD8sgNcCIonBQ4|%cmrF__t1H~ED+%AL2@rD&lSNc zo}+I54E=-i{y7(RnM^lKI9})>$c;(v|j+>3VJ5%NB%e zg)NcoWSuYv<&ae1S4UG|j;C>OzS~F5IZdfbW?(}@{{>jLoX{^|dy>U@Cku_sH#&0@xszz6(4P6;Er(G5bK*{ATGdgnhqcUIbUiaBP33YveNn zUz56ek`Xh+?_>^NYCw4b<7Qy#?c<#Iuo-&jQ6P{Df-F5#uwW z3Uxe)%7EWyLnUgcBs1#z7aX77s6hn3h)L71FW(|@>zZ{pp4HdV>4yS-YCzNnoBb!+ zCOeK|1)S&&lrF&%q29zu-Ws~-qHuxui@Q~5igs*#q$N~5@03yXHaOk(`3{;*XBL`W ze^~uc3zHXl)c0MXWwCGhh@tX$)TIrlrGwS$sQ0D!3T@GtqYv?A7d)rzo;h2=MCt;V zLW1sjjsQ;`)YBfbR96BsY9W2*Sy0!{(qVfAb=p4KOVSLADc+n^Ax#@9I5U{dhy`J! zEqnwv%lyp6-?0!G)%mMC8py{;o|T~oPrcb-%B+0O^462UO@vhx=d^!gE8C6+m`mPy z>yW(HA=9;HF+JFGFjwGk27^g_zqHNRNx$Hq^y+<3;qOgcAog`a<-91@ZfKYx!qDVJXAb5a6}VSs35g6~!**XJ9xzktsZ$Gew=1 zMsY;z7C~Yj##qua>V-eKbCSd^RG;pir36d0c8rs|pJYJZOIw|@Xk{Y=4HsMJ%^$Cb zd2P|AxF^c9&SQ3AGZ;pnY;0VXMxO}feF~oMh0Z|!`eIvQoKLmQ8Z8G^`8&Q9`S(>q zPj_ven2gOiD}2jWXt3WeS$9^drVRL zW|XIZU2?44l%1SfdJ%=^%M!Kdm2GgW@$-46o+VBYEO?=%a&D%_O%Z zrIlFsOH6{Gl=?twvnLJbW=%Q3%?>zTgi}pJYL_Hj>gMViu2h?05a3heJd#xLzo#1f zUl1ct<-dP5+TtJa6Lh}XlajURzTqc#ybN#fVW$F>mR_0Cg@^z*XEN~tun1?ZyxlDW z2W$Picz7zoofb<6L6?;sNKd+QfA0VoIQVN_9>PXapWu-OaQcE(B}uR;+;mRB+DD5_ zmva&$DnP0(grhKzyP#Y`)Xuo_62ywKb8ThS5QWpXJH&t}EqekfwZ`qacqxU|Yu0?K zaQe&iz%TMwKL)-J2z-bJJIG=ZmU2k`)fs-awETi{9#LZAxj@egR*{>Ssy2}6G@`;g=r;x8MA`X$$HIr?UkZB;+NAlAVgd9KjT=0Okh5oiUc}#DJ=Xfc zyt_qUH>Jh*62d5Oi+}PiYmymVgujX_O1>{GNrIjC!YWQDPb&RvVJEfq?BX*!@orKQ zMrEzbSzjNs$*>s5T|g*5&z0;W8BUcWFIyiC>%H++qB1uDwC%OUNw=bP9U)gZa1B4I z*dR_M`y}8{1dNIUL_>M97k@`Q#N&lP?OPXH!VOf6&?Df~2+(*^QLIgs4c0}xF@vw& z=7982Wl`s^R#*4Y9?rH^zeuM9+bH)^X05h3zlr2261~Q%w6v)PJ_Cxc0{hIb-V@{o zKMh{|e3E6$Dz_l~j2^UoAfT*}slT7xyYem1if7-JAY~8+AF@Dhea*C67bK=hr?>2g0F85sK_Lh-#E${;Y3t9rJ+7aF{LlR_Bw?HzFyep@#%O-)>!~7H zx!^!!=mo?HErO1uLZfOl);fz&vL!Ut1qP*O{?8V6M)_;E5 zS~yRg#jbPf<)c%ZPbsd0c{?>T<(oD2K)R$TV?f_8!uR5(=tV=5mx(fFk)03xLLV0a zUdYTCDH1tTBTQ)#Z_zEjHU3-ofk>fn%e%pydyJCR8rc5YZ)Rn-(TnFglb2?GyqymI z?f|*MEZMb|vG841L|Jk*ov{;E1zO}f08L@^82;+#glYDnsjaZKY4&|ia^39vEz_=s z9`x^De)?Yf*i`U~N&P0K*<;-HU^lF5K$}7Ns($V7Oxcvac&AJ4jYP#?~ z&)co+X3XxH!;8;0x65Msq$-Lkym#}h;2!%&F@27c)m7e6!crlX9rsPu!>gnmMmQl6 zzGKgWE}Pf!oZ0+YKiyKf7-84IXCP%h71XOCqG8EjfAed+Xsdp zMf9TkZ-g5Zvpw(popbpNINS6nDxnj-cx|=6eiQ=@J;%k!M1QoHc<*XOOL9O6Un2xy z`$nAavpTXByDWJ8LArcL+kSmHp{yjv~Boz6PYf$zI+{5asY`o^B57x)1ao zFHS@QtZOf9dExGu*9|9dE>y-i$pvG*a49Q>bz@yJF@2X&=gNIg*FoggAtQV}lx7^p zkPs-42QY>WH4!HzAjY5{e)pwi_}kF03F)vET%u@ldyaiq@>A%GjRyX|{$h^cUrukz?@UPkvY^UZYqTCkp;J&dfUgX`$ca-JS`RXl zQ7>MsW|%hv?U%o~-nxa04mgWQO;1l}o~mx}7Y2cpCe-9qtf=UT(xq=P_)E4A4-dbY z{L`xqq&S0w;v6UE9Ai}-RMN!@@6N2LQMsM+E7sQfUf%eYLN`ZeI$*&HpJ#4zt@M(W z$uR3MDPA3wpJ+*htXh2DWawLJ@g10E7oO;cIfnbM5JMIkQ5={0*|=Kn9NLHi zTEu35%7<%B^x#ogx=U7C_d^7+NBb*`faU7r>|FyR8bmzvmY2^IcRm+A_qNf!DYavn zohvBAhwA8-y**1ne{Z@+Pp?*uB|UvlXOPVm(TrGz%D0B#isr=!c&=<=9IF=lejfCj zb-Q+ntUL+WkXm^lVCv*6^Ri-Au)Sof^bwgZP9<9Cq&;U_xjH8BMYicCp_$LMKrr*H zbTTEx7`I;-51WxbKzi#=vB}gZ7@y2hOuwA4Vx`<3HV=rh8gz_6yBg~xHY>AVZ(dYK zgO0mJDPkT3*nu-=xlgV&WMh4OlUoXwj;&fJcl|6VXAua*6g<_J-1IAMet97;t=H2t zZ;GAnSQ4>7o_FRV&2EqS=eKNdatVOTyT#JZ8(~111)a&b0(cqu`Vbhk&k33t{;!fPr zMQD9Yx%FUidq?8rR}`(w80cg5&-96_Xuv+kKD>V9EzRNr)VscyO924RW5Pb|&Pksa z1{%^1O**jJqMA*eKZ}gE6qGI79=ne2Ldy7Jd!MKHo}Hafs}Y=qFN-j5Y11pM2kqZD z1I}}XpNNyo``nY>S~eD?5h6#m-KO5}rc=N2$mzIojUL6a^1N(pK+&=J+9IyXyp1-x zBP29^L6ddHDDGD|FWlqu&kqF5+rHLs`_s?l_=n>duVb8q|2|za)X>~1mb<_VUha;f z*9mYP?s1)9@%_+6i#X=n#fLs;UJ&Zhl&->yOWnFK>HZVLlR?BM(Ejo)8-sqqq)Y^J zO>#`S-;FmO=xnLBD64p;wBla6h1gAsPiEj$YF>E#xZZKDOfG#(wH2zjTg|0Ncv4eE ztVNUYo&-1z-*Y-KyuNzrYlc0_eYt$*ThtO?~m_&#{uF?cLp_jZc2MOmTSV<3F1b?&_{$`1KS zM`#X|@t5>QnS&SyQ-EkU6T^-gA z*Y26{@zdvOSWOpRZ}hHv`G`wSOxho^iu0>huoXyZ_-&k)#vBBzhOVC+=G{kA_2z~q z?98|}DahVlps+ruFj+d$)4{JKCQV;J*xNUhD0v6nn7S&8KOT=P&Dlsyz#;qJ*O)XG zK;7Kukrhpyc^Lr?>wlqI>3fvn3G6}r{&$Br)LkMJz40fos6e-$9KY#UxBGAOVQUqu zS~^Jz5Pq{@ah2+B;@dNT((&jh-A>~%G6&9K^hv8p+sY_J{P zHU8kNRcrq{=Sk3gO)(Zo{K#mU9J%;Ni6DmrbM(f^TPUYlGH@xU+VgJRe2L1cyhF`O zsDQxpit5WTPbVa0#wDMcJdac9QlPE0@2Qp*4NpJ#Bt0uF<7#HHA`m*c_3ncQyf>39 zW3%dL>sC@zManR2TkxnT$f8%>GEjxW8r>81|FHIzQE>!Ym_3682_bm!Bm{SNNw5SB z!GgPMu)&!au7Myy0}1Z#kl^m_?hNiQ+vL5oXZM^vd-li9pXsjds;;iORb6$z@7`M^ zJ~mH|`J?@seVUuHFTgXps$VeRA^LpvzKCt(Tym1f3$mN3xZ2{UaW;(eyfDrntNOgj zIB`ocmur4yKD5SBl2&Zu6N^@GEH4pnBsStzDjPT|qj}4|=?^>~@~Q1gg+8*sWD+@k zNpSlBn3O=f=vd~I81jn_7Dwuc@Vvi?_G3 zHFf^^dH``!6}_r3FLmXQ5r3m!?Ax*RTms4aRW5lw3glZUT0LZL4<;URi{=xer3nlZ z*(|BFG&%B9I{#3N)#^=AUMo6yF9fyZ*>I42KJXxE@5B5nAzpLVB>veplcdjIuW{a_ z#7$NWjPnO2EA{GJ5%HgE|8FMleGaqf64Zq3#F}Vm zWdQbt@cLP950}KltFQwSuf*Y@jYj;o4b;UwP0vqTC$X^Fmg~>vGpnn0bKDzrsZB9l zal6|cYt9Z|lnrd;vSxg%`8VqqL51x1lnwf15)oKSZT*T=^s!n(N{sP{K z4Di{mNN|Af10Dvr5(tNM-__|?BCW-NrisbwTjwdC#3J9mhEbZTlrnMtUDPz#k2G z=3P6dPp(G8onZhOK}OlWqJLZhlrP1rrS>N90?G49(YX86NV@4XC2(;uw#fE8D09R9 zi?}7%=VX!}tq)y__jbD28|~1;POj&iCfpr+blU2!{7e(Y_8U)sG0m60)}*;T#JPTh zKwC;M=(c(Pqte@~Z#PV+$h*bbN_nplyVN=+=8*N|^uDKgnco^w>s(9aG}``Ra|`>sDIEetwO$ z^PS-ajuR+HN=U0)j}MJ~s=ZTV&u=t6n1wHWlK&|?R7(J%me)IYab%+)PM+eym8_Es z#!+&wedCMVn2U85L&-ukRzKBf#KR4xJEg5#`)c`zivz8$(xYx2Iyjxmri@s1=2DB+4YSc#a)Jfrr_U7}R!_`eyT;&9Hp_ci&pVmC)5-_;r3@2)4`qGh`YHcUL0_uBdO8+JG3;a<0EuyKkH z*9q^#`NDHf(cA{{qeiqpO+Nb7{IRCy<4eT;Q~n4m8v!Uo(coKRHquK@jgi+l=T$my ze10kbnd_`J>AM)GHxiqH-q4DNu!&)^^IRNm7e@~!(l0?jw>vgnJ@aAntzR3@ z2quOHnv4(wsUn-GY3a=4z3d}v`)m(DHOVY|{z)1Gs2qx=M&^AEkw6zCdvam9KbrKu zot<;B5XD_RnjQjzQq>j@x&@7DT4oum$Y;hmfVJXeHuji4-kC@fs z`ynKufZH0bP5Z4YK-5;d8I$kjwCDu6;N}NDw}F0be41&xdJuaeO{zkEVzm9JNmET# z8&Y75SAH~ad33|ALbM51w2|b2`Ta-q?Bd3R;Xq>y_%TQpZM^}jpftwUH}AdZ+P z{KaUMv1gssb$S>LGi+RxjgWe3grc#;26{}C5SI;?IM8mjaBbK?S8Jc*;GjNPyu|^W zEvr2`GcAptk&z69IB(I>YqQ9+Nxy#r&#XokyZ#@B)o- zNRJX4FPx6E!HHT@J`=t&&r@LSo^G5&V&kVe$|2QQ1BnL#1NVtfsuXSYMMih-8p^(S zEFupMwARM+RL;>`{-g$V8MIt(5d68(tpU?($d8~#Sl||qvja#}pYp4UYe*_%BmQ*e ze~$l*b#zsKO_*N8tG_xA$c+dtLwQhSj>Z1aUf%M4h!-X|ym$IEUoYHACktd8raJ@F zdYyV3IzmteV}#t0Kw^F9#PH1NS<14!G?xE%QZMNvuc~KVVJ~Z8(cHs1kJ_d+L4Ib* zaj;kIPy9o6s8wY?o234S;r)~y`t;JNI)cu>88RZg%tbQ#cOsot-|S7>){J{E0b5Ul zn$zGRO-DWZqgXFD!x>Fb0_5FTpFmvLPj2@$$Jab1Khd?k6o`3O;BXT9I=7%*1Z zmPe{M!UeWBSqcinj#YWa;t9XUH-n}bc4O3ocez=Ps2tk`DAlH@gAFkzwq4Vq?L_hu z(R%Gis?8sqd)Y#{9n>w6`i*Z8GeM2m=RFSGO#V;XrrGUd+EtHZon{~RJYX`87G|~` z*l<9jrw<}=V-v>Ql}a#9j@`KqDyh{4+Sj!E3_4ZMd%?(??i#3usntO<|SCqpMwA!pIcXn&d~3KANQOmJl8puAKX`L~MxU<0@U zZOeo9#P8ssVp#ya`tPNmD?Z%!u1lqP)^lLJG7a_zv4Uc}L1&>l)7cqF{&mF$G@N<; zt1Hm8#Kl)*9@(N`pk3@a5K(dR#Oax(;O`&S9}uYD3@~a3$oIIy2)^3XtQzmP=Kp5y zIEF#+gk8Qg&?c_|iyxSWO$hbn@#jppI&RG#>^<_sx~OL(_Tmil)d}S$BXK>qr#2VL zd?CDg{^PZfS7%9!T1bQB{Hy*tg20-AfoUiDh+5W4+0;lvU2po}EhTs2vYd1|bPH|wJCSL-;!_dB6YJx{}3IzeJCg@{^y zY=i%?T4Ht4>u7K4r3751PDu*ftEx{SjlbS2JJNChhA82X%m7nlSV47?PO<`|PB821 zI{K%!h0XO6K@Z;7szc)&=;QX35;dsKeuu7g!Yq0bLBi^~RwXfKD^XK1PwYn8a$JVg z0?c4thp$Eg$#MzcCd*&=&;N!gR*DQ1o{+nEQ_BU zahb!u&#WT?{8VvLgfSGn56ef1sS^+CUuIR(G;_NMYRQvDR>WD*uMgdnJmy@f}HRokHlaY%H>BCQ&(o>-Q z>hNV|zbZ9q!!c=UDp5!E-RS6p<^JX-ybhu&V$z1YR^HE`gMj9Ca+DkKy2P*xxe>z( z3iuIfQ#xLAr0%Z6l{Ooacta`Z&iR!ChdAma9dVd`na&?pQ3qx7(YEkFJDp?6oGy=^ zZTzgoQyov1@s#q@JIZ;H!XaL>nKwP9wq9x^w?vec*yvYB4|v|~jVom=$}MFjney-MOAoUhNB^1yZbwPU=Wq&)P^L@Ch?RN-j*wfC><&uCnkWG*duPf`#g z^hn8xx&O#8aWOSUyNdbf`VQoj9Py*@NpDt&&Ul;Vn;6T=sZCBPa#gJotmDF{Np~Em zrkQ)Hwra}y?ORCF#i_^|pK7>0A``dYM+nf{;~Pu)O(xCo`W;kK@9)<(6bYUMR&AFZ z)*?R{UR}a)=csnBcIN32`uyh6=VJ5D3muW})R}mb`g>hyMziOHFa+`$GUf@r zQ{uAu{_Sav!LQhwhf-hkIVgFrKkZYOW|s5ouu!2UCegPJf61)2N<98LMFLB4JCBOP z5*`pRcp&fAM_tcoXYX1`Hos?!rT_GNj{wOr$FFiA_UZ8E+>t6z1tk3x<8=Ui->-|@ zP~H+$xkZ1}#P{#_Ydup1S4cGwn2&@Y2Lu0lm8Yb?^FDe(Fyu)hlc>W$a$pSonP6&F zz3&*0G(eVF$GNW;ts>DsFuj*^)WhBFjEYS~)Ks|RscFUEt)4VTX=cB^@V?$KS~z~a ziN0wZi@$5G!I|*3DE796AVw3>-uFp3XjL5dJSgp4cd!Yo-Hsdaa+XCvW{{_cYATeZ zq50MzV{tp?lS7;zHS9Qx8vGcM1yu+`HZ2tE>uG&6Kx3Y&K`LY=1ED!GjjwJtj=a#X{R4Vf%ia;H%InkjV^U08#mf`$r1;^| zB9$eU9F4oadD-hGfbr;vDGF}Xo26y3*+^SY!nZ`1#8_}d{D1)#=ZDuulR0SHKTxdbFb~kjnHJPHuGXt>eFz zng5%a=KtX&trE(i;|CGACv5}J1AQ4lxZ-VkBlIo#J*6FhR9^w1Rf;yQ-~BM{YNHd~ z@6kiYQ;20d3HMK_xlo7@f~QQc-sV2J_#2Sak7j6(QEZPNc~2<@A|7age zaclxrP~tz1;=jlAT?B!qWvV3QThjO2Xb{|VfH07A%=5jYSL&>ihu&8QEb(CsY8qo< zPT)A^p?(*L3WC@wxSWz2!bsDSy+;WT(#}#z`_K&Ca6p5Jm$G1$N3|g`6ZWVaZ@nKq z1cUymEXYB;!u_1P8Ab_Th8tsW zaLYW<^7H%=pUN^fxEsjs1nkA0N1grQKJN*zlBw;X672t7>)ZcKGgoZx-FkApOupaUB2 znI#f-IlnfU9!lWfb42^Ei;e)bwu|TBZQDFZ4FWh9RQ>*epKRBx#+$Q1c5rdYKLWx% z-u%CK`1r?1khqtd{};CGzZ>t1`85yEycgPgE&%ukzQX^p3|9)c)+P0QJ>crUwALW; zV5>_gYOMgPC}54Uk5O&Ykls2w2P3{{`8W<;Y%%#b1c3d)h;>*byx; zhMES*XE;lZ_RAS>Ge8o8w^l4pr0QcOJyhetnSDET;_!5kUm&?F349j`yG~Ow+1Zy8 zu8L5O$CFn(f_u?_qCoZn*dS5LF166!>HYCxBoq|7G~z&>5MR&jiF>{S zG5oXhOtJ&sYV`fv{e*&pF2GZ%eZmx#<6uX@CTR$ib@;TL1b3t8i08R;^LI2ulg#K# zP}cq<(SDkLzJiI1E}3EA@&HU^ZazsPfscXqk~1#rsP|-mwFJPaV5B^cs|SU=#iuk31EU*_#}W_8!~ulwhX#BWj;zOZg@Q5 z)BDatumWwrwqKhtsM6zy(dagT6AwmnH%c)l3EY^cfcGcuE7#{gtv+*UgMEO}JQfbL z3rqfB$n$(7p-66o1xn;&_&EoZfjFr!^@`wU?rqE)!+SXZDGvua!3MT_x)d}4fmEN;NAaW zK-}{j2+u?Ex>;8=JYJkjpBJe0ZQnM7EeGWH04~~{kHwc89z<+N2ZlRsG^3^jUojjAJVeTiWVIy zB=k-l%|g7N`c-)s{W4~>XT>%Oe?SSsh4>~**H%{95C=^|lE z@FY?lByshc%(e$IM|wgTg%&a$Y;zYtou@q^V#}d%1GYlFp`RzD|6Qt~4yKw6M*V4S zf7IMs8iC)lt5z=hVZ&tJ83;ZsR$S_&%}Bpv*3?{n-4g5jp@ycb_;pqvm+}+JGw)o_ z=u)Lo(soB#@B5hOn*HY*VO!#{^z)_$M`wm6zo8zVpj>4Ld5(w9LqOv#yuF_} zTHfH9%=m7&dkxK6rcMMx7X%2%BG}%SFrB3iadQqCX>Ek9Wx}uRH~~vCl0byHNQ=ZCm(H_R^K11ul7gw8tIp~hE!T^rS!5MOunAb0XU}mW+yE?PA zXG3)!%9)2{G}rw>c=gS7!mUkmDUaqobl-|NhxR_NsD!?~i+q_*2vBXt)*Tz<&o6kGi=w8v32Z*{M(eBWZ%ehd9|`KmwxfQS z35K%aqt;U}oy{fCn!Ko)&6nxxZ~Njl$q|A!(-@a?MCd5O>i|>d0W43XVgh*$GFR)P zDoUHmM^{(DFu!>jt7hpE^fX(>*t1Qr6&^bG2 zpy6`ox3m4BSOii6e<#Y?y1cp&jPYwi(q z2Z9G0FE{rA^&qwBA%Xzse0+1WF_RXI(i;Dyu>~W|%KIkW^(rA5q4(Y#m18pI4mssT zy+~ImvJj`18MExvHEJ4CtCPd|HfWghg|OwGO=t1%R`Mz7;_rcfZP4_vva%mmlHxPn zE>9|BU$Xex8F>CCLRaid)Mh6Dda-X~1ZH2lQ(v^Dp?@>vL z|DwKUv@LibLxR(rS1wZVLz#c$L)2JB`;qMAtwxEb2iLGKkVYf1D+A=6QX$}bdzajx zDuOmUt)aI|Xj8yEu0`FO0)3Q8V{>`4(k9{jR0i}@f}uGgkki6a(W*Ln;T~Bkfg0(y zF>L(d(16tAIXwRlJj>@((rWgw>c&pO)7O?JP)n0-*D6bRnmr!}9uSEyKr zm(_1Bqrc@y6ec=%QYD81(~sUO#WWynLYyu&96XXlyEj<7;sda+=>qi2?3bo)G*ic0dEz$ zzPneVCGfayCyP9ay23ZkTqw*J1e!wEqHW*l7@t5)8z|4CLg&|HfCLEB3Q0S0gk34r zGGaP3-gPvEZ3|!BTwZ)8nS4r4lWug*BE@aRavwgvQ-nNm^}Sh}AQksW+J zU1rzz;-N3qFi7)G@l|w9!AQd=TOa9~Vn%UQUXE(3%)xJdIv@~a*4?<80A*9-plvYW zoT!p;AI57=V{=sTl0j?=m`8WAf4|CK%U>r}_|d&?<@2nEOhBIC`eXE}e`Hpuk=baU z+O`lYJd-KB#;h0z5~@8NNN9SzQ~p)k+CEXHd2iHlz>?=W#wcEkK5W72kBiP(#NHxa zsY+@Ye{y$?1mW_XmTd(a-;QDGs>?V|EY0ZSoGSx=_C9^KT%}T+U8s`jbx!@BWagW1wvj8?Vr3Y3?)y?Nt5H;wtLd+~+OacCn+scZeygcy27AV0=dnTK^F#7lAV@&o^W`R9*(HGTfV;4FogPbC_s!@^Jn6iV- zaZ0p(n7iG2@Mm53O{?iI^f)8y2C9(qgl>E?#ZAsDZ58Cf4T+Z7+mmc$e z@>Q9VCXwKKq#i1Z=yV9r+=9N2DCKkG`{IH_KCdMsbtya^@OgC0O zLRPDCVCuU`JTYxw$y@A9T%FMjBkCh@st*pM9Z7<1Yi~pUxI7qfyqB<82zvHHpY^-r zCx$|I^h$Bzz};c z1g+^PNPW3)J|_f_@xU~Xh__zxyBuy3SLfBk4A-<*X?-cZ&_W}JJX8NHs_5(_RhYzH zK;Rm4s{d(=e(bfwziy={NrGI9B$oIv zn0PmUmhxelV^1iK0AjkA(ILccc13Ld=K;}Ln{|AutK2vC)wMACP=INtwl2a6MU8ym zZm(OyxkouBZb7J0F*r$l+-ED>%xR?Zrl2`%;s>)N3zjUqh}W=>$!bA2^Of<-jIQp? zb@`u$E_1*Ag`8I2!OnpQ;A(>RfsoZXUt3_u)Xf7FXCajDvU>>_X+D=abKgO`E;ex+ z7gHh$UQdp}(NXW1uIU~J*Q&{{u$j1~zxFX8@-sPiM4&S@C8+pO-tC!1upcGDNg~3F zYFA9=0TD*|VCt8qfK+R|tW8;A;`E4>5xO&#J*);Jcmr1HsKxXJ%~j(i_SZM_vU4~S zZx%}S_nFh(ssw1&V&{#TmBdXJ^=NNh9=z=axA1Z*KnHJg_LWp2^7|K{aPoSM{CSxP z5wc>XgL?bBL4WH6C;J zbiZIFi|AnYKu_-*+e_)ElEz~rbZj-|G-zrmeHg;1nJkKf@1vFxEQ6S$s{8fe$@k5YqqBJWw|NYnBH`55 zVIDtDqIRI!Jh)|15BY!7gtDxdhFg6!P(mG)7Eh7f=eG-!O+InAu*t9RP8)9#`{~3o zzCUWJl}TeA;i~im63T>Rn=>BCJ+qY5b3fEHI9;$-A&;o0)rt)~DRcQqLOghsN?y2? z#TsZLe4Mrrfax%5t`_xEF$TM3ShGCnS}V~nqFwmeQkcRS=x2r0+@f8(C@}EpX)f!; zOyf2(w+Ueht}RoWWUVt088Wg}3Yf&NfDMNz;t_Iz%8|NQpET)e-|39*KZGND47ruqM7tA|J4ajk)L4Dry1o*Zv(w7Eu;7Ld5f{5r$Iqw~ z=8ktdzhm<^Y3J8muV2MEKdtHPrk4`@O5a40w z6Tv%Leu_Am$;b-J+6e%6ED~LqhC=A?Q|#d0cU9LL9;?MXsBe33Vs9G;YbN>~|70LH zpEc9p*%o2ocpBX8_M~R$N=A+3L3;LJBLc#(gjU8t##LSArr#WFVbs_ zZ|$kAq9=W=yceq7lq)2k?`h#lP-&@tX?Gp=W3^>Bk@2X+2FH^+CGBq z%aPmN+3l;$OQeG$m+}j@VR|hWJAl&pl1prD*0pah+pR$G=#WSfm=b-xpdv1+)gr`{ zMR{2O6>z#{4(Q=kX#1A?6Im|ho8vWAdC=j^ef;NG*^-}(?>2wBVvP<{-1RFfx5(W3 zeg^dc-B5qoA~^7u8p{%ATX@NdW_ArmqGFmI3cUXtAf z-yYcR*bisDz3cBP_R0{g_STp3e$7WVQ!8{EvuS>5S5Wk`U6ch;&bp8(K9{<;4+p!w z8P)@gQ6ok8?=?g>CrjLU$k<8L@6u7eKYC6uJVmhm0k1&siJ_W$L*KWGM?4|)l0C`X>#8{;bOMD66?%dbzHS2jcGu z`Z}hJPn7PrMW#F>uZSC~p=7m%%*nF4dER9Rorl3DL_hFY``M)kJKbfcUy)aheQ}^L zlgGu?r#JcjEOFalg5^0}fyrW|jOA7QmSqJklp$;^%r0U?g>UqAR84TfA;wk)tg4Ke zM}I;H#MXt2&Kqp9<`!cAl5ahzz&WjG-P(IJ9#z7BqX6)B9E8PAyH=>o*mhV8;a42K z2rB0gzHRVQn|_j>(V(~*e;1rfhaD4}>=F?+Xir}Cx@vQBd@N-~itjAn>v^k_K98H6 zS=Q$9Pw1wb!1W}ht>$Ug_0be@Oekm=iU@}swK)5xz?|IIu@{qZ$Tc>(8>pQiyBB+@ zq%%!>MDyp(jAP^PO-B;#hwvp#M99Bb$E`;YO)kQ4T&p|5L+9g&_`0ampt;$S;&D>A_U-K z#L2c@Gl+2^??Sfep&pK+Hlsc-V;sqfYF!qccQz7jcm+?qCHpJRvU5^rXCJy~y@$>KuUzsqA| zWc=!V*?tTxG7@z-3xC^$N8g`2zB&S)}pbcT230$SZ zN~7_@025=;!S8ybXr|%Su~bP@(SkB!D-l%DTZqY%VBB0)HO+7|;7N$7Y%3uC5aWz@ zl>3qxyo1eJe5n%o;*u+}I>w4wfxmOc0?n2k2d&Y`xoeD`mo@M#KdS27WP1$xwN2L) z|Dj-7Xglo2?SqA1S?&U>#A`b2_?-l?Ozo`fGYUKYM-~1-1CJ#ZLadXBY99R{Y9DdD<|*D)tuZxpnc7qNrW{L5;lFggf_I|!#Ed;` zC@pg7)z`M`TpT|bP$bEP3InfLxw2Lyk$8g$Rc`!*{|1dN$?xRs^Y^7mV`pmCRax5YVUrsMW zoXo5c9zy`Q2!HZaT&@2R=n-i(a+neI_B28>Xl|+T&Mq7%OlKd+ul+WKu?i#41T{Uq zOQa?KeiZEN2qTZCT&j6vX=u>sF2AXK?XQm2(IkwtVG46kOXUZsJvEYyb1w+=dhnVV z*bg-+dt*$E=G}wLX5Q`OiDeEbVncX*F~E%jkEgD|Lk5Mg@1=*78-I%Ivqb2h@IHs< z%G>aMzv(eeLJbK5hG(kA%U+_{bedFrVH(ZL+z@rPo>PriJI|LyYPkT3EIho zOS~hxpE9HNp>oW1>bpgUCgTSEvaNdtw@NKLPVYQct`=`h=a9AL#InVP+?cK4DPB-F z9Ca3yBtL5y4b}o2M0;FA$rgy$4Q5~FxK5ErIEvzc3{mpB^v&;Ae3;T13n`oQ4+eZa z=lUKvct6#L$f|sgn@CQkX(zpHpwcX7Z=!Uo|JgKb&l1L@$v=1XyPIq9^EQ%}6MGzu zqmypPK{PM*N1s^I2Tt-&Yg?e=ltB^Yp-h8!je+Yl;EKdvReZ!o1i2ZC7g#f;e>qnqV4`o$mR4FhnuMD3wJEEi&K&!|Di|wgJZhiKr(6lQZx>UUrX*Bl;ojp8FBjzeHZ2WAMq!1}q0 z@w6;~JnLlbTkQ#W96^KM@l#uRLO>E=Kgdy6xoc};rAWx@6@P}x8c!^7emP?=8!}jz zJ^WOtGy(QPiR9Dz&%p&}xyVfd2?sc`r^9tC7X~>Ph8+YJ{ezOvF;}bdLix5{JWe_G zTJKF52*p|SturE{f!mI=Jf?b-!2hxgrWs+at>;={8x90p=r5#Hrn5jy8`asc2dKhR;d?QPZt&En_^>QTj`ESW|ncZ+qsA*X;C4YMBdnlhecG@A{C1N82ZH zPPADbve-4qF;=tvUz)Gd(m+Ocv*UZ`o)SUPgukE9Y2c$ON(;YI?ROuuW5vq?KE39< zZoaHK=jH==5*pKM4bf805y=*H*cPaf1qEIjC+(<_wLkVMX2=F|`!Ec@Fr~(y^|=t@ zQJP_<1-`N;fCvy@sixn}h@icQXNBM7qR{66(-~zB)^^kH^RFmrpxJ_x4~(&K*hj7G z>`F6gMF=3?E!2wGhM@B04!w%Pp1{(mN*W*!q}ZQOuBn)N1yGy7Fw8~uZk}NQ!0%jm zEhJ?%kzJ}`7`i7l8%;8eHP;3`Hc9=QZrFrJXDPC6`VX1 zhGQsVoDx8RhB2EH4xW>qFN-b&$Ep}l49uf+iEjE<~Rgh-qXNK`~v z{vfm=7OXLcC@5cho^T`}zr6@MV<^|D0%wdpf_AjGZeH;$!MyPF&`8D=({E4)YXWrR z(sj3M-HPCn5m1bxCox+eOPHI7Is4P5&l!Fcq%}Z&`=uloH8SkM-fnDosJb&L#8kBo*_~yzr*>;UIR$rP9s(U5c$TXj z@!`puDcjAuJrzY$K#LS-_jEQm{@`ids0AEL1GhLXp~??2!f_~Ye$c>|I+a#N0Z}t4 z;PP|Hwb!sR$vuF?V1%Of6;k>>a+$0HY3_^PnSgt_Z|_Zl>(hR|bB@@|0}z2HhF88V z==x90i%Nt$ol16Q$}tRi8tmD*pY062-WBMX@;2s?1+-pA@T}>ebe1_vdzDF1nk2Lq z@;SdW%L9(Sf3i!-o=f`Hla1qU`YNy|zLppD?M;glS1eoSYu7G z@wuU$WCuyaXJe)ON-yBuH9PxzEQh*jrkO35BIjl6sxqj99ImvUN`AE$1R(q=i-u9Q#Yb0fFL4^3r>~=7p_W#O(o&k5{!-GSr^GU}gn-+jJqM zKC=CTLF0?ZjCx!gURVO}3%*K~7rZ zKT4w|E^gSO#VXMwadW#AJAM{HMx(=aBHB)qC06Bq$Eyov-)!Jk)x=m1;We8SYHOg< zjZ#Dh+V^g;7*+}x9pOntg?Xigi@3HqNCApk&3Dg4e}5+(u?qSg*iQmkO}v{iHC6kkUx<>j7;o%a$UY?JYVY;ylr)P73B_1s*i;ZlGxGTSc2r zmJV?n@cHX=xSXmdIsUDLM-vB+G|r&#XjT?{&URR{rd2UNE4-M+a6aX1BF6%9zF9Xk z-kq=9h<kFJpf(JFH@_s%-C^Sq=(&FnAtWiYH{REJQNBY}#Wzxo7HZr77h)u44Grh=uX*^Rm z^+=ySUCCfn?Lj0}X#P67r3)cnpVToIN4?V19Tg|!HwUBLg|i*JX)Q)DIh#4Y$3kJ z>;m9a;EJ-!+3ZKQfRr<47AOfJpQz#XH1E7gy_?JMyDQATnQ-^#-*>;fl@-6<9g>wM z>l9aBAZunYsIr(QkR%+jA@Zv_z&zF4ARhkIO-^tY`jqdE0i6$=-7T z9&2T2Fw}(!2gbevMjE~(Et!wFio|3+IR8}P>Q7Pnkihr*yE`IzNpqU5(Ts1d&mVSV zWql>N5T3~$F*77AggtlcqEE_W#Xf6gZ~;~Kr%-N?c#I{;n}MLy3;4XH!`xAR_6B>A zIc}Hf_x1~z@ETLdSC88*K}QW(o1%8aFrBU6Ln^9Xuvd1NB=e&$b>35o*vp$ZxNk*t zf?>-@XZr0I#bKho8|FC#-x`_uJhStK%Lp)dMTmvf#_b}G!9Qy8x2*g7nbW713b@FTc@0mMEd_)+P6Q z=%sz}ssr5oc1#BFq-uAe`~oMALUBEZ4ENfL6%a0#n*pSG;}36(OrOKkw|jyc3>u?+ zvMwz&N@|Q6Wif7Tl-S8Grphs&;%>cJclFZsdSOwJI%ZS%BDxl`!7%n53^^^}bFv+x zmW>`-FkcOf@6bjb%Z#Us*hncMjxuG@^5(vn*=+XALVpNKbgqk{sI5-wzc}$N<;?5N zuq*MD&WU)#end8)1e+tUwRu!YfH7cvSXKcSwGB#;tB6Ls`*fw@IC?evg+AS}Ks0l8 zmt7Gp^1}Laf&yVQfZnECIiuC{_h;||5#RW&bPih&O0FKO1^FV;g*KA{poJDQ;;on5 zv@}tm_^~;QzNN}HXY=bt|G-v~%{2B!^BdUY(9~#osgiT*M#V+!fU^$jpxfK+I%168 zspml+@5~q|wrs3(%a5-(qQ)~b={$qSu1t7Q9O}BB>=kaUcBqwrgU0qUHC2v~S7OaD zEQiLKG`^&cH{2$l{BJkL(HA_VU9L;hNg^|gGsGonccD{8zMAq%IUs1|tzrM3&V~aS zv~&1}beIj7jI!P3>7~#!_{@te74-IGVu`<8Lho;13fT%(9?^tMReo?oVph$PF&GpC zs%~savWO24Fz?Jh+EH!38`;XLqkQueq%KhgLe9K-qmbWY04@}fxDRGzE!m_ zUt}l-X(pN@LJidMSv!A{d7FPjQ&^4f_vtEP^0X6{@Db<5Q3~bIvYB2-Jmdj0=^+7x2}N_~lDywaC@eE2s(zJ0^b#Y!9tU zi0HE|hZ?glnvC{#znT0^f6LWZM;OtODC=R?!XR%-CAzN9t4rJsi3zk z9a^Y^dAZM5NlaBZ>EQars4J;O$#MrJ?i}xxh1r!Ik`(kRYAYMH$ikV?Xy8Z{?OwOB zxBk;tvXe=OqU$DW8=YD4vwVhD{yG_(>)fWcRLsBA-IbtL^!(g!?aXeYXK~l;m{{Bl zuen|E*5K5};1pb7zHH2H_@JLS-gMzN{r>m&Xd^c}zh9@p@84u^&av(z>i#m(vYT^P z+KN})rB+OiCkS^-0T#$wzf&tK&W7N)hoddO%4k!d#=h<%n5J`fR~}b>Po|zMf1bl1 zznW=u60h5z2;o0^7~4jjici|sbN+f)4S5;I&$>>Im3pxqO9e?fSC=Ab-U-oaj%=LN z$T%Gt580w0nz*w%krXTVp)|K$_c!_%i8$@BLd?|i)?K8ipzw3}eIIi)S89niY99O{rfy_x_8Wag-)`Qtm<)fLJ2pkA^C?F6CvM-JJk|Y*wpL%d0+9Q z_KdauE|?+%2z&6YQ1FjiU-WTCcFpgRj+YA=B+4qEywwl2((dJ+rsF3iZ$m{9QzUoo zwBu5~3WHVRhWQz*YnsqZrV9S7!%_k3B-UlQj3WYWb-Tsui##-5EIH52J8 zMQC?>A>4Cu6W%#au^h-UPt5+d3lHAKxjypmm#g!jkKANtUZ|PbuN^lYp&up>5TTmb ze!3uh?$9H;HPIf~kCIOR14FP;}a{AV_=It2{%aA;Sgrt=5DK z2*L!XSUuXJP2aP17KCZo7d&q6Sm&>Io*41yqLagB%R*=i>)3{66;*KH2v1B=a9I;H zYAT6OSkD{@x_Fmh}X)X-grJ$fDo-(m{NQyMeMN^}Xx5 zycl;qtjz*(Cnb>Pcd4vdlFRAYd4MzCcI`#ur}*f2Sc zaClzFg`!Vk_Vllc^?cRcU#VvC6)r+P=`W1Se^4!FxLmdJ{gqR-P##D+GrBVzmM7bK z@6Oe-&+gO|9YwAn^2^$M?vTayDPKDF7|+ix zws+qxwu!!HtrzFA+fEQBGH{t>$?i-wrT@)1*}q`bk}L|`91@!vmh{Y>-|Qr;G&T8s z8^r0Ty2f3NCy({I_R-S@{n;pF>sFpcdaeO-@r3wl@zO^8T6dX{xBMgr4Ed)|s{AZB zB8+mG{>iJ#NG>b)5tLZDuI^}mPAN+=>EubPOw%5WUnMN78JKw#dF zNR)LO#Xjx9mW|s1w{Hw3-kuiaA6(6XNe)+N=b2$ydtoadnQvud$~5%$J8N6nu@Kz0 zRI|(W(@*n3>_V4~kWVGL^JzKSE@!TNMzWQn(nR75A@66|Ek{F)I@HCUHVBl9y`?>F z2sCf+C*L|(39VzYU0eLUEAZP);p)BGF-O7YBj#}z-6nSpw75(s{)(Lh+T{X0yc{~W zTNe)NG4{HgutPCxkQ5;VZw@6E=9Rn1%xT-OV7OlPL%eD0+wMP&r?4S>Ww(-}4DGG$o{V}w ze%%Usr`yd%Q__F@>~>wl|?hoPWOT%H$o{95jykTGzABV3D@PU_9#fQ)abbm7qC$ ziV%CDPwdaekI{f$!kZpFC&L(YDfD|{i66y>b|nbO!C#{G)qQ=@o)*IAJoOH3^EuV( zON@&tDVUWCW_}~>GqgFXq{>e3F^V_D_zc3(ztdZP4>*hCC0{4NnoYZSV4+Dk^HJot z`Y<%m0^|$=(0DsfFK#rzIox9V? z?mHE#A0kqxMxWAe9egDXmGmx%7pT~FdNJ?4Qvn!@!Pnng8U#D`mCu5M0rU(4EppL? zS;90PJu-F_FMLrVu4YX0v#b+lV+F&{&(Q{(EJ`w^Q~DSSWYG>*HP^9t!9oC3$IQ3x z?2BDb-#x2mDrIWwfKD!2o)>=_tWj-drXpHwEY}H~7fv2*I@vD&dkDtjEw=;yzIV2*GRS$^)E!GLdE&OMbokPvP#=c+HP_ zK&27@82VotjHVJdd#SR*%_8PsGK$`(!bvUQBTpc|pn?DT6=+sC7N>$JfNjEm3cY~A z^kwKkb}i^SP=TO%G1c#4gfRl-!$Yfg{N_AvslGAZUjxiRiV*djV-jW>YMqij0f zRDP@UFQ1|L)USAwmmTtr>0egvw!2NF-_d6Bb0-NpR`?ag z!66a3K5neirN#YqI6~x)R}XTne?e50IOz%_4Fj@&wG}LkJla?Obm!&!uZ#m$XGDtl z6VKZrTlZ|-%P+6*`N$ee|A0BPh~Y{ zFAoO(mC;-^5C9Xqb`WPJI21?3C__RXgZ*+fowD-pvLX7QDmnX%dJ>X|7tQvG;S}4y zWDZpZ^GkyvJPA?sPD)TE3jTUAKApQD>-a^X@fs5UQaGX+WH$dJI=}++EweswsV!mo z^!{3NZ&cIsTZHd_MKXr#e+ucpoN(|ITx_(`@7APIb7Se|Nj zxAVEvA0QrMQ~$6aqe0@4Lpd66K`me~Ui|xei1e`t_P-p_hWmU~OGxIvN&|H_q(0Y? zE7$JpMj)FP{KRA2Pt~ezGBOlOgQXflK-e`k{kFnL@X)UPmZ1(NptqUvUaynB{vI#) zMN<+6^biP_KT^D_zJVG2+~5ks@-{<~^Sx3ch~SS6i?##VJE*cLTTU*R|*+A!vv@Z%x> zDqyLjLhkohy+q#gVXXzE^_E$9+uNv_;d=k|$P+nWN4^Nm$p_^hOvNWa$M(Smd6F!; zH^*)AU0ni15I4+weNU3+msln%%55N&7Y02&Jr5Z)q~vD1D+|0p8RlRp*>*no4VpCe zANm+TYdx&#*T^{#9c2&iRQM4B9S0E_`dS`HBWbO<%j*&1f)LS{a~zil6pI8rqPWeU z0**}ur&;zJ25SDpSco?TWJ+J_x~AEvAVkjKHjyas55z&;^^K#3pL*T~{txv2|3K&4 zB(l+eIjiop0d=>51->rcXww~-mp{{m{c|Hg?Y4rC5N`<)6UBM#iNAf0IUjlED#UQf zX_q%tcp8*bzjCcl6z9IIzD-C0r(uSVyjyyF^{iXw;7W(kjkkepnJ_yVwRfk%$Z&xq zp*3ScK8j2;x@5kF^tP@LCPRoygZF$k6^a8)?Pox;){2i#jv8*AHfi$UPOCfC(JGtn z^2@f$WA%mXNjj|i6*Y|gib2qKwYWow+YC@0_SLG>ZmgTvL<2WZ);@4L^jwom8u?&VyqwfGMsazrl&c>Rr3HZ9PEiB~Yc|-F_vU3o@@1 ze*bk4^d*0WqAkEr3U#mT zWW8{Q5|NSU10SoM28*U=ej2rZyPhH4_F{SkW4qkbIik9TgI%6Y-Z2d0rW`weX7Dj0 zWF^kCs5*`jzCm6Mgc0bujXo2QZh0pR*(^e8byOw4SrRRq z{mzPsMbX1sCdXUAScHSflpad1sv|=#ISFLz0VCycv|q-9pUCL}UIBn!ugK@iS?JMh z%M}UhJIGVwNQ*!GJrA|KmK zEshD-p{}9hf*~ye2O=fqf!!I6jdzeg^lDJR99g*Un1nhKBa-Gas^$re-)ruZjXQo< zmIP5oSXDIa);IoAUkV;7@0>U_O;=Ut)kpoE)z*F_pl!a#_bUMQW-GRWUEv)no8RL^ zj9VPHRkuEOn|K&;V4FsIJ-*%5_d~%Gleea(rP_&WI|pk@N|i_ac8Vrf0nDUuStAqo zv~rg!+e)?GLF2g_-cL8Afmi5m)zsu?-!tf$OrEwc(P!1`GnH1JJll}m$Wa8MUld&m z`zoA0^rU49f15%F%|)KSI;hQ%t0qXJW(V1SL(65w9DQ0S8mo&(3LgQoYTd@|Y$&hB z>C1sws)mujCifkTAcBIQ0lWI6%ETKdy0d9YIM_+DBl}nb@@A)nba<(LUG5aT!N5${ zxH-Am@;zV6nf1M}JhoTH?Yudtgkz?=@6C??(G@t`kUpL@r-=`Sy(xOuH1TU)wXC>O)z(!}tpLpIe%U!0 z;>pamn>Aul-Bq4EhGeeG#C|5dGkv!?wXpf8tSG>ks$4g?iq&-ns!hnPLwisiVE1j45mXwXF_LM29s>+4@t-e7l4A z#8yp_^Mb!%hk6&-2N8ATIqwmDyOTdiABl8ttRv57$u=~5Xd0L&4RBC*-RCeD9%qO3 z7O$>5YaDz(+ZxtV#=Ym#-gPg1D`_b8*mdAIG~5eWh-BRwU$nH(3R)e_w^>bL=E~ej zaYI(dk8(~ml(yqpA3f!_5-1(yVv^MAnRRqLdFvG@xOLdu`PSIIn!POEbjpaRmt1{bDKcTc8R}!dB2_(~$ZJ#jFjIDef(;PK1_#sXj7_mm$zN#XG!k=oohqHt*T&1=2v+RpWPGBoR4HyJcA$oLA1GYewHb+SZg(E z;d=e6+&rmd@{^CDW%(0Hi4fHHi?!{7s2;E7mQ{K+@te-O|(yg|Y*VF55 z<;>q@nSWI%504x2kdXd^FZSGaE-MeNGb4CDqvB=BGt^Qek&_>Phb~6d3u`v_va#l< zH77p`X5LHpnAR(IQgLd+JFV108d0){4-5V!@>vPN>B}3~d8p#**6??V9d11sS@oB( zPeE*=LAg+F(!0E!^FFfin#1R?^lUFCY2Qpj84t_)rC zW^m~CI2GBjE++(QE}1qQueoI&R@|ddE^AWWtVk}dJ7Us)(ittg`%2-y18?(9Odzn<(;< zO~Ar+YOFvUAAqe2col$dbrD|_X<1^H^oKt`NuXppZC~*n=?`n588HhsQG~c9QdxJ+ z6f03p6>2#p`0CX`iQFu*ms}llkajB~N(^@G{^d{7)lJXeG??NpIyE~J84eg;e|S{J zRTA3S#0s7tag`lDk(;IBqFa=(9sGqX{9;lf2}E!C?^3?i=Eg7>#Cv$rM0Bz?VK(|E zW}|V;tJZBi_#PqRtcI&Y?OZg9P(P_d4Bl961IhfQ>SX=wpqCwbszpldB|f#gf#+{E zNikG=aNd^B0=F7L*D|Scfh}6*2U~p!U#yDu$22JCQOcRtsQ1`4Dz913mg*qNobcGN zbR01x8<*2#GN%5wwX*xj816`bA{Ygh*?IH&6_EFP%kS@=-1}x}29Q!HtOEiiQ*Z-| zd_#M=muhu*V7yKilxPk+nLB21?Ux)-K?T>0}xeFy|z_V98J1DBRsDE3X~gD0Bl?=VBeU1b#C9OUoXT@h?u@z{p^1Yyx^RI!6{OKPWP_C= zpQLT5Htu!z#}`^MP2_07*vN*=#SSnHTus7hCfS1yD>bVVdI&^aTD1bc$dBXddzk7} zC9DHlu+lE(Hukz&xvsIp$`?;nn?Fkfg$rx0?X%GQD=kE~#*{*% z)}pXI2`5(ij-9-T+j0LpbnB@bP5bOo$k@{e@KobJF2Ls4Vt*JlxJ(#fg0I)dxERLb zc!y%nnaP2-VFT{*_w$1^@9#2ZAf`cPB?2nP>g;n1wU~H>hGl7|tFFC_`XJ0IB znaM+x1vsRN)NqAaQ&{WbG1G%$;AO)^HV?5^x+XiT`9gtq(!f)K2K;1@VvdhUmjr6+ zOw)!CaXiq@`!+p&rBdf=lc#D9fq>l~D)lfW<`t;>x8+!m8$AYi29J!18!{q!hH6@l z5aC5P_0E&X#m5h1NsVAhPuTHk>&BpH|5E%@f_*o(i-$w(mS2b(_mM~3G^(WUYsq|l zKM&=A@+SFGG!A|43NLun&?NL2a(_enYGF$WT zaHCI6OcrXQ1ZZYwyULDFe#!0XwjyV|QYU4gfG;Jut$%t}mK9fYBEV3@UmU6|HhBo)P~&0{{$c{eIk~E0e-FJ z$j(nzGq30@8oCEP(A%s*HlO5jcE^jco>2go1r8EW>^eHz`tti4q(9zn$hoiT3>(8) z%T=$rV52uNpMWO;>Expgxt$?^FIGVM2yH=XnW}HGUX@#E4dcKQQ34FC$B#h#((0{K z-`w5U*f+sYcg-!XXSNR4W1r~>IcYQb&ezn*2=8SIUkW@7%T$DzizOtfQo?bQu8&G1 zzBW~|-~1>trTbz7k;~3$;&D5hlC*`n&7l}Sm`nGZ=a+?+Whr~g&)4ZdOn6Ssw8Nc^ z*OpDp9?Uh55(T(^XuNUokFl`)QCuZ#risE^eMDXy-DwpHIldP6%u6SNYoJ)b)5If=i~5)P-Vr=?K898YsGn6i6E7foY2z8IVHXnU+x)|F>m}f#(i*Y;7 z?=rUoH|q(nP5AjJm2_7-jTr74dei9>V2@kyLEJK$cAm@z3;Cw(V-huE{P z@0X7DPuU|+_nL7vQBIF1^H*IRHWnj9d|y{#&b<}FIvxHMRE5SyR#7}jVl(1o8SVYR zV3bQenFPtTH@8;MUpcOU^^#8efT^frJo9no;kg&5vG3^|3x!hcbSEItCKqjC2*O;{Ld_?#y9kU)q- zZlc_N!(%SkRMIJ2LaQX#YJ79Un1_pC)fjf!UX7ife=aXBP=I4$EhuK!dvu)Wd_I*} zXEXX4i|5Ra+(w|TNqNQ~YKMy5)gmw7uF2tTA@(M7(h98)bLA0FP7MjvUCH*#n3@AI zN+p+FNau>;AIIckM`WfO)G@47NJ5xzx1%3e?e*GCs#B0iaY<)BO6Lit2N5(^EfDN7!`Y>R6K(k@9Rf66d~*km zkgwjC8WlGAs~tv$TM1YVGP+Hji5-tQhPBo3hFls`Y)8hG#Dm?V^!QD7K7FMh!+nQq zu5%RV{ry^hFr%xFfh*d*p(zMmcHj%+*|n+sxs`+us{z@kuMMqIWKku^SkPLljuyB^ znYS!3sPxSxaF_}tA-UjDHfh+Ljw+@IeJ3r4wdo;nqzmZOPBzl|OkdV{X-`FQEKk}! zbGn~j;U`3p@xY3+OwRsV(DP;QpM@-QZp&7j&^D_Hs(}griIm|f8T8wP!goGv4u!{GXv#i-5RB-x_8@txcb$Rs2jhL4EpPCYjNtbY9$|>j0vH9oY79k7_vS(n-U~l#Qscl zQPeYbiTug#1Cjd-!NQ?=YO>kGkuxSP8VAnU&bS_5zr-_UV$&0(wZ@&Smp(>vx^3jO z%O`5A*PKnsc=`|4=sx?TGVK~v)z&c7WY*vroz^_UFvw}Mv0mvgIiOZiJ3Yc2#cR5G zWF}C$!oSmBmYBcTTrF8$&bo2QY`B$Iw+tRe9yF`zXB8s69=Bs{7`Y5gx)gJp{nV*q zXCi-|x6U;~yGbVCb#f?Dr7E9au}0(Ue2r%tS(jid4Q^7wEzR83`cALasc=Ni;M#qd zykq-M8L=#S-iaxrIviADr1Oit6Vq^vQyem>nSAk^$e)$Bw_E`uO&YSq(l$|XSu9&`sNA}yRMqoEn$(r@flFevoX$JH zBwC)FS$!ufi9r)Z{s4)hqtgkCoz@mEF0zb=LsH4QGZ0}lf?pd^2iVa&EOM$UQ>^0* z7g7mC;pdr$yNSt#_KF;l#xEk?w5@Xwm^3w1m-oDnq1GiF!AhDMm~fXDJtFbU&1>Lh z{%u-<%7ET05^B!E^BYZ{$m{)Bs>hD=5>O*~q&J`~&L};YGxkk!MxgPXmwNK}5=WJE zfE1c_1XAm(+SPK*&|&>d-%*u5QN`H64_uA$sjD^vv!@H()h#x?`eWQ3l5XuZti+AC z<^=432@Diw8cH}5+Uen>-(LHmp6axqd!^~okYYu?AAWOrI-`EuB=cdPz95e+j;La) zim?R^;pbPn3bfkNg~q z%GF(}>M(u$7Fx8b{-`^xycX_{VH6lDd(EXcez*pIg6%x6lJP8^Ab!gEd0PB3)|ylp zMH<6r%|wkt$7HneoG@g4N>ZY2KhHzuXvg-*5w)APY$GV>@d(jk@~heHHE5jv2v-zo zOft5)P%68&#QKiQ8P|5$kVh!$BGHqC)06XU+@s_b=({9$9sNjFUhxw;&}Cu!KFPgk zr*PEe*elAzZx}gg%F@r5eoP?vwJCy3Pj!Ta$_-x+Tc%P&xzjsqbfUR?c1=2Jnngc} zPWX(~VSnKg&#;4Jmkt!X!RrTQCa7N+<|cKvR#~&DX?q{c7P}*o@IM2tRm;r5qe*h> z%HOd+kS@T zqJB~N`UO@pSFf{Q!a%fhps{@PUUSPwDhsoN{$dRK#~cW-aNv{-bI!9d^e~%z<3wA- zo=?ygntM!3=Uc2htq`n&?HqoyOE;w4v=P-6bWTd_7H?%Q$ayyZyE-~#nRM*k7S_bZ zykel0hLZWE)U8>osRnw=MhyDG3x36aj*-oA$Z&;z4lYqZ(;OvymgGa%%*?ZOx;rD9=zn%*)JM0B^xORWZzs6_ zlHPs;kN!(S0UrYG8?s>`MM z77{go*0J}GPJz1n9_DZ`hYXcbAxw-Hw9>aD zF{rp+N&KAM+u;T@?Gh0VLEY`wVGfib^mlpn ziV~>xPIC$2AatAaHU1-x#9K1?`1)9k&dZ$ZD?#1e)h({Bm6OUc?&OV-TUc1`U?hl< z_e86G4*8+3c3dD$peu3z&ctd!`z>`s-_dlR0}%}}G6YdI2=os-zh@XE{TEWs(NVZUG2d~B(YZ7-SC(EB(dfiTTB~Vm9Q@FOu~To#T(YiDp~y-9#Un5 zB@CSJ-WDI!auO1JgHH$^mIZkxvy2V53Ii&tZa4euUlxG|p|96#w2T zz4EHW9Wp!$_53IC8tkERqdpq~GLqZGh!d>;(XYFQ|F6gg|5u6sYheobT{)n+`R@Dw z{P(pau;a0FS8;eki!&BQ>YbK~S~AWt-Como`vN_H`E=0D7Q=s1_16uA&Lj~){`$nl z!027?3fbKfZ!9bD60M* zkX~G;lgu^J&#Y%I_yk$m#!#uGVL(F$Hjkb#SIlRzLI(YMV*-qC(L9@Ha zge>^*pOAP|me;_?lI_X&(*Gz5L-%(N2(RC$ax>$IRzr7X{vS#Muj{54SFJTa72Nbl z;MnKk?aBzGPgpt&o_$oejZQ^jQ3K-sYb3LD;M%^rd@TgHhW?X18y@WUe~+9BW6m|^ z;UmhY=;u^0-rL`)VG}x(ciI|I&eIpH%aCy2cY027n>`2-4DdXdVpCm-iTivPc*OEH z6hw+QK-$!a7{>c2w}xo|4hW|MuHJfYuTOp@jkP_z4Fl024A>q!S;or3hf3gx--dw? ziUP`Mak1Aggr#p!Fy1Bui4AX)AZC>RgKXHea2f)rH{QRR-=nx&``$vI#6KYU|7Mc? zpXm&jYBF*ccU>3`^fN)^vfe5mX4KB6X4?QKJiSKtX`Y>nchRU0Auu+V<3p9;k-Bde zA>mnp{R(wT_++oBj%MD(ocZQqc?B*74*v~I%?}CpgsT3N3RI&T8gH?q&nGFoxHjuV zE|gX8{ny6ob&e`t-sfK^GDU0p#`W3^qMR#<1$n_2f8}x0JpYnc`$2E--vuR2!w3%^ z)|6r42I9JBhhPcGWHmWeXI|QQXT=KNfb6$7$mi?34XAE@k44qast}bLUHK4TX3FuGF1)^ByC3+CVwFj7so(Gw{n%#i*ycvxWB@GT_Pw8A%g z^|_OX-lDWRih@4Ab({ftWvz7VdMEi8CTDN0Gc~(glACuB%pncK%)mo&SD7_a|F3P7 zQ?M^*O&a=Rgv7c~UFX^8uveLqqB=uOPC^+4BpSTw9dRQv2!y9w2F*e^D{ zu(jJ)4fC{B)x!zN6 z!`X)i7a~`N3{-II%H!4vBh>GI)?&F7Q}Mjo|9n?rs5*Yfv}3CJ!uB%eqojgnc0LQL zRF}m!nWf>^eU>;)Ft}T`Q zX2l6bJ%2OX_Mu9b4@p+&ORL?*Onc^x~;=!(mqR~0XgH6y^Tln#x5ot=`8B@`GY1BJSYjThcfm0tgcW^mN_j-SafS z*Vo@h4xfG5sLd{lur!J60{{hqMM=)g=WP%VmpE)UV4z6X`EpI(^}&cxG_k9@-%r%t z$3AB+?Q_RlM7^Bjn^zm;@w0@F5TDTr5y2qeq3&<8;%D9dKLwkeTAu+=sU<;ZZF;`0 zcJvcPC06$kCYERoQPUz(RJO<`?C6)LwbQXEd6b%Z(L%6Zx$$T4B}uGwkh(V=fLfZr zpS0k+ajClWY-8+q()QhSC|>DW-NjAT=+Yf*prU@Qa^q#p^w9=?fka8*=w7-iFgsOa zT)_pk@DTwImy=avJx+or<<}<`h~!C`Z@1>1b;Q0dqO1djnL@KKy@)yY8zf<%kAXh^ z&)wKFeuqYbO*FRh`J|#~CvSX7ectUUx!%>r&_Jp_lsiHDbt7Gie$n5rEcS&zN zX|q0l_6!(Xl}Xo!uYY<(5 zTtta}=i7Rpit&JEkDnTHov>Y=i|qV|2Kienb*@JPyu!x4M*xx29J( zy@Ut$*Qb%Q95=ibJMH|(fA2F!DvJ(yKWByqZ(>(+^DMSTp+?;-mVE>n9!7&Q6c}V@ zI$u0pl(hgndf(?Dk;PN_1#;MaeagqwTG`r3QIjlnHB^5Wf+yMoi)9QGCxDnF@SQ4y z12N)01()2FLcLb0KS}6KU%ob526(>;(ruQ!)q8hTU=T-C8DTdFJj-f7(tMV$PInhkTWjOLs8 zVZQ0IuSJF_siw7LQP6#eoF;lHeCIq;_g#AwD%G|xR|AHSyNES_mj_c-FY%39?UVpV z!~FaLUDf*P1XPD{Tvx%Lv#LG^SPbw171h0*WP_aI^&F=6uU#+ks&&d3_OJ-pIayv6 z6aKZEV7v13 z?j`G#2p&3e$W&yC`lX35>$M3}+6`IhTyd{J_ zu>K@x6zfaCB>TgSn?vr!VLp~Qih0SCndhhuKTSQS#{&@o&Mhk6b-n0fsDG|GkdZo4 zlt#fY`n21aPgiYP>RL^~{fSS*TnfK#kg1dG43V!k2-8SzJI9a@$l zr;Vm!{FTX>w-2a8dE+j^=nT+XAd*3Ir;LEmzE$KY!h!(5 zuL;oW_hs*;UxM03u5ohO$qHAl#QS79P3Ggom)*6K+~9r2bQNR!AnJKDW3*D*@>71V zi9DRfc{){9EQKCE98o6|W+fMODr?-zpel1y4^vf7Q@zi?et^rG?W?QPv zmuv-p^ z__h(LLL^_s^C_GYw-e2XngH1<;LjRp`-n-;+c z?>SUcRA5WTKxO+$(3+J46ii@@4@JJt;-87e7`U(C;{Hhvsp68!U!N?&N|^GRS%ct9 zIXF7_23E>L2ez2>^>ZJ5Efe0-KH}`x3 zXk(RBr)oo)6EBnL4zhnRVfeF3>$ut}pMOQFo2_4EH?Y>lmTYSySWZQoDO?Qk>h{{o zhbL4zq9*?$k4-W1{q4&4l1#1FzQI(_A3Ln`(8-v1Vct5UH8AS{@JQn(_b+Ba!)A$_ z$q3}~++Hs}_u*VbJHF`00dhHD++?e}7uA9E4>D8Z;if9N?AKSn$^Ci^CjW5w)49QkihxaI?H80(Unn<(q!6?mR=%T%ds;2 z6%BG`3`07(KYjbf-z`-PTC3NEt)+ialRTT)Z)D;Qg8+8=;9{L4!G--efBh+0I zSO3&^zfpowv0=Vt+>tn40AK(mGd;Kx}8uq5~cuBC@j=#NCJqx$0Z zGKQa~MRaTAXJqu4n{;~)IyUzR69_O;fQyU((BW63!*j_LnOKo0>VUe{R)P>uyv-Sl z+wSHfSNRK$LM(&&a1Leez?mh^#-62{w8o2w@Qoik-mC~Ld`w-5MaCK|V|v5JLI2<6 z_*n0Ssy`|{RxXsZ5;RVlkFUs4390>_)y?LPYiUL=#<}slawHe0hSAuPZQ=L31--1| z1rrkp6=@va!@MW+V%o0>X9UM*QX#hZ4&SDc#v59k;H5vQ2gb|2*l(Z7ziv6bdd=k6 zpG=M|9d^`pa>ivj7Q`O@i%iKv6}VW?rSi-&7^)F&@vB`$e;!?jd%E!E=N!KxP`RI- z;ywITjvHKB-T4K;LPBgX;z?`b(F(s$stT0yZ2;LEvr{gMXP>yZ_!^3DF0Ji=-eduy zOFId9VroDfc(&wJo5DsPQm2Y zI)i?90#@|?Gw?P{-u=@9|s zjbz?!XUd8No(t9qjz%c;xy-&_{Mhz;Pp-OK{9hg6GVV+8^gz&aY_Bd@|9>3zeUzC;MZnuRwWKO z@4uoVoU`rA9bT9hEFOle85ib^2X;*Sk*s7xfQBfA#f6+TT zY;*)P47pdi`x5L*^d9D5a-XH79x9r=ULnwZK7loc2XMI1ANl0)#EfJ7Ra4z`Dw$Ab z+s7B(;xl|YVI>*+8C$)+zi@k`qpM@}3Ge~f?mk=r0rxfx6`7)g? zuq3#Nmeu!{LyxHm_k#C^fb86i6N zw3vSij<1B96Z*$LeS1|h`Whu&8H14=7y*86CH#8%zE3Q3FkJsWVa~78lT3yk?q^8i z1UcCQx#9bX@^y8Tl8{nb{q1P%J!x(BfSNv4;A&y;Fw;D#&N(|rM(!(y&;hFGnNP$$ z0f0abs@bWBU;t7XR-p>!P4mrr5>w-;61H!mu;6uN@B7b&^=|= z7){RU_A@Nz0;8L=e;Q_^D%|4+nJtIbZq3YHxegR)U1#aT5H9R}rh{u!58Lbj@56eJ zIscW~M#9|^o^kQ>8G82uwl?cY;XUn-Xah^M?{!WTpf7fAx@lF>y&08xF`hNq#nn!T zgZH7uydzcD99&gP$ccEs+YM}aiU!Q(GEnKspDEF

uzLidUaid0&$Aa{We`u?gQ$^H|oKOt1?SC<# zDJ~;fdXrV~I18;Zyr_v8NQTZ_07dO10lHV+_a|dySlK~>YG9EotknLU5`<0Ydf=tk zevwvBn-=<;Kn}!JGxYp!ixzjddm-)zND1Vpj0SFJ} zj!Kb`uMx!6u>{U>+2bm4xcM7=L%2r*7%IX@IS*H;(vUH2Ur=%F@HQr`By)Cwlt zRo``**1X>_z5kkC=M?~+UXkQxj_z6=I3huc{XSRxurkYM$ZS=lzupC^BQkY^@WRiQ zbd)7;dUJ4c=Dsf{t$sGg&6CF$H~JSvj6SJ#FIt$Ay z7H7yMueP&Mb_P7ml`^fAhDViE{D;!4UvBYA8zs54(CFvn*_*N|{N$>uiR%_-W}&#xL_5nhA9{3} zcveM@)a+$@VyM+sN8ac$a%k@uq>V(|G>$keeVW(p+j%#a(i9sT>q34~K;MLfmE>X# zF5WciOH6usW{+}VSQm+mv@wxi$SDrYIptjUCwhDzTs;-Ra$ILDgrNYrtxpUxqHhM* zM(_Y37LlxCk9-GG#H(k7fYZh`ei2Ii#?w?cQpD6A4Ju;6(MzqXSXQ!Ui+CIW4} zkj3CdVfX8zUp6kdO0;iMGYviR%JKEG0SpaUSn7`zx}vjSbI#*I%~3N{b0* zy*IDGsMN(cK^S4L&WObyQ;T$_%^>}pNUi3e{1M}-Xun-cme-I=*;+}mw|3-@?I#v4 zH>x4k+KIFrkgm6{i&&rBc*7CzG&|$3pP9{dh7%ts;+e&w4iWU}SuujH3h9TT#fTw=l3G7j_{;a~KQIIzeJ~2M2 z$*KOsrl2s4+Kc$amry>asUe5ft1-I!iC!z^aCmUkt*B+kKCu?*)|Wu8Q(R8_F`80v z64A9$JVF#WZ@;yXRE_0zz3TjDcpk%$wzYS+)uGoJspp8zlVVeK?pze8$AhW2a_Ucu)>o+k#6V7Qw_!RPr`k9qP-a3T=d)7U)1OlSw;~KBY8|h zSbwvUVt5M@Klm{ez0p-oM)DvtRk0_i$&!cG>Fky5Tnyf&HX86Im1MPE}{s6}1yLM>TNBQ9@4s&H=1DwV08$*DH)NUXp+ znb=lnlk_VO4qmoMvUVlEQ?AR=A?`g@9eRzL=>PTp_Po73fgCdUYmdUvr(+GCKFdDT7>Pi^)})JxWs(H-6L38fPA}=L7Kfv zIax(TS$>wq7U$3yoL|30a7v=ld6u@7d8UVkIE0FPRfi-F`53K}v^;+VjkpXUU51#C zpP6vU*E|nR>2KxOcNB67(3F-v;eGIKNG@gi&As0F_n|z&>XM9{@30vE$r+p4!b(phc}HT>`OP~@_UX0J^OKA_0k8N ze(FTP00Wo&bl|kS14XfKFR<~WbXVk{@_tUAV;YSeEUw>~ru*yGpqf;vRQkAe*jNuj zqhxS~XudpWx}S^m`?EgxV&$YD=dUtSb|jcN-Z`P=p+0Ya@|{(Ck;nS2({TMU{tU#| z5wJ;iv#1kr4LR8#6;$Es*Hc@CV8!fZr0ET6gZ4WhjtfwWt?K}g4_MPbWLX-{?}YR= zzkQ{vJ{q6w-zKYr6~9pN&x{k z{NcbW0JPCj0J#4<3a#?7F9F>d)b6o@bK#ph=qo@XZF?w_8TO0%@LedE|HbLv|}r5%LM-zNh8))e2fr zSfg+5Dpr98hB~qOp6)a3&4+@ED)3z|dm#?Wv`*VW1+8`W&#-TWOh^J0$U@V@;Crec zPQfjedvICMb6^x`oD+qiBQ&8D;1ApG!91Or;X^SJ@5-y{{M{G-vD?9)%n_DpH7uq#U!GuSeV#Dd`H-x>iezTEVa|qHNFwk?rbi-tkDC1K9eu%QgzXAiIXTir;HtTT@ewdhW#D!K zjvjzY;P(1DUTCmi=zX{=RK@8f81{xzfp@8SJO%@UcZ{2um$Vfz58>XOV0=s*RGkgjzjeV}c#K+WhH++Z z%ms!sv|{i;`-@)zmD{?yKFhuOH%F{s?HpFFjuJRVMnajO#hT970#0UgIc|QpUKe8K zw(Hm1q!3)8EH@)p^XH4rpe*4byQhIiE51J2o-b&?Cx`FCF*t z&-r3RNr|=!R7VH)X1RHh(9cyf0(>@|%3?82Uu2sWF>d+*-hbNMz-o6YrgN|c7M6QY zSpMK@e(wkcX)|Y940H>m=3XD2cMP&AkC z31St;T1=j+7l~yj)tDO-=q2&3+JJaT z2DsRIU$u?k1FY8;B;e}wHc$^T%u-T*NryML#QR^mr@&%dL`gMpdP?{YkWT9`;T^Yc z^Zy22{|8jNjVj&u{dCxt+!F*s2E^eA9kC?KDccb3I_YPDsc$*H`70^Cp9bt0Akt1l z*hmLs>3{o+jnnAzq)#L7X-xHQB0vp--TZiPd}Q_Hw$9KwhKKNaSHo;u&DUC<)ePK? zrvhujKIR<=cEXXv-^n|x9(bLkmc~x;sd?n zxMuTLS>3-4M#K2{UV%Lu4%B+07*xNOAS5>L&S4aqWD1);n4wQ)CIPhjhRfA}it$Ye z>7J*|7^ZaoFCx8^M<-|{)qnClYi^8^tt!kG!YdvsWEC$Hr>ine!+a-A8^`ty21?@c zDVnHiHHQE}+)d@rVD6&c6fq7n_kK%?db0hsAoQN4=ASZ3F}l?h->sR&#f!uaO~sum zsck$l;grPM8Ma!_{XBAacHwH?11;g=^9DokBdW}i__E*SHx%ZJ6p#h~ylVB$6d(6iGuEkQ`?~0VOK}k`a_Vq#@^^ z@GqF_;MT zuR6_cx`q;x`Y|Jd+zeMezf_fFN8Rai;!ywxzLmwR)2#Q)$q!H|ERuw>QW6i7j083Q zaUxI3t8CSfpP(m~`}jG~BwUFo-g`Q1r|!0N?dq{Q&p2u#>VtWf0K@exCjwsBf@7(6 z|HXu+J=BpNfn=lEN#thZk@|hCMKa89_5>B)!PA4PZ}(4_fS0_ww!Q_7X;6iqiYnw4 zbguiC&LrISy=F&na9l!E@^P!rI&M0%CEtpirM8%xQ+wOQuzW0KUr>8M7>xexyg`SR z##&ptynjCZNBIt|bCQWAkvh)>#X*CyeMdK3CLHHX?Gu=0dUE*7_<$1?oL|a;Fm8H?ANV4 z8Q#2!%1o?46%g<$`K^N0i&GKq1gwjm)k<0?2>i z>C(1;G|*kV@CdSZ;CK_k*~duABW_la4ekDVUf4ZtLR3(j|0+`uFbdpkm86qb<0j#G znz?4Qn?faPy2-0;he?5Grj_`z>FGWdRS%=Og|I#YM5RUR{k14!X|~YSG100-&F_Oq z!SPThvwJUt5rRu`es#R)PpAG%ThtEF))aCgmhtDe3&DVki)-{uGJB9s57Cd6n%y5( zsHrFm&Qny?#qJlES4Co9mdG+ub7a41Q_`;`&-*#nPdTx4}baP&cu5Se0C((zsJlO89_!X0zVH!JPB=1jVQwl zfqq7yr&kn=dqc&japRDZ<#7lvoZ%#De7Dx@FTtv;G?1^qV-)U#HNO>xZ?waz&wP1* z^WJoa`}Sv0qahNyzr#^b=F6=HLgxx=Z20v_o2Oi9eDg&;JaRck+%uX>>L?34>FHj&i*M`wKL z@%NZGnJp02jweUxw&Z$tBS-K|T`esuxj6-(E&hcb)D+MDGQGpkIP%-gAFCao(J5bw z5FmNm2pc#z2&+RFJBhjVqke^sn zN~rK`)YdE1yQMyNr|b&|qprb1_Yl6%b{Gdj=TST9M?|uwLXmI?1lYMPb(z4ALDKcd z^C*2-(7UYDJQX?JAVF;M1njl!(`Xnd!`glySOOw`87A1zhuKHapYef1{>XgboA0Cd zK}+q-_RG|L2^AFUPG3LoXE|1L>fD!wzLVQr5bndhOqKwQ3U2#UvJCmFla=AKtlDwE z_SPNuWzI=8ggHEsTq0=J9Cwl(AL&(e{ZRrK7(jINKb+IEzuZafTLDF?RwZLO-m0^f zD-@mAI`Nb|Ev6~byk@k(YBMP`wAQCgx~jEm`_))*Q5&j;G-HJ!B}R!&-MDLO`p=)u z1s%+vHgdfS|B0l~IyU;U;0isdy6sno!TF!iEa}o-Ua9pKRlb3KCa8MD9q)X&@!aC` ziQ^G4YGprDbbqcf_}}rdC3o)AoSI))X=)YG*9E?s%b3}~rIct&PzDyS#X*&43ks%l zK_0(JIeD<908*FRW0s8krs{_aKT4#)pYOtIs5+X0SEUA=NiV3588(envrq+@1pN2hJ1Z`nUy)2WPL+|M^yi9am3aurADue>05}T`dtwA3caOs zu|`M)%$5?^jsXmdKaJZ$7D9WLnsjLZ9wU^b9|QG0AiCHADEBCLRv$Wl)9Uc@Cz~2r zqk-XNVhFFqn@<*kw)BknNlr>-&R)OF>u+WhnQA`nHu}Xr%CT#y-oG{eJQCt})OzrtwyZ*pv;`iy(Qdf?aB z^m1vW88;DpO^G*pb!y0Cx{yD1g$o8A8u_c!lDT)ncs6K+(oz?Q#BXZLs|jqZ2RUOm&V<=eTmiJD_>BH#zzd*2I?i~@D?dx&o5S{VANO8{I+NqgaI#mxx6 z9gWI-e5Imr2AGvd6vj7TSo2px8OwGLbF#>27p}!q*hi+_JwSV|!ZQ0xWq&`NV3{z$ z@>M60i$;&a6g@pU@7c~>=^sB(+4M-vtgKk&I=O5pKtW z!H5ypq`r1`P^%YJL@aUHPkDvjM!F={F8GJ=eP%AvqX$rwX^t=6Qhu;?APHVt#2z=x zno`*59^`8}S%(xcrU@n|5sxZc+jC6(&Hm(;EAnLP(NFZ#`D!c?gIrqUVxZ1`M69N9 z!ABpqG#ljjMD_cdOOFJexs)NTY87dnfUltv^5yoj=CZ)(Aw*Lb7$A=DRJWhnS7E+@ zNv4k-ZB@L_JVtJKRqg~~_9P&C&2H|JJI0<3SEEen(xTjQ(}9&OEhEy~2MsL+1L-mB z%=?&5jXB*DsvP<<7w?$~!3L}I;ev&){wk8|-I;&Jn&;jDfN92{e z`%_fSF}votg?mRJLiuPMklW8MJA>&GHZRgd!!3#rPn2kOy=Y44yI1-@#yw-V_?T2p zYV|&(gxY$j?DcEfzNy0aqb}R!hwOA+wJ-53D!a7}$?PaYG)43%ZuqXW#;@buN=BYh z4nG$7N&Ri8oSv^{CQ<8)E@V)^GP6pVNO-EhaGWIw3VCMFrs7s5c@K6kpn#4O=(fmv zV;7noe4w1iWl+5ok=78yOE%s@5?mex6`3E6^b}40m;7 zRP&VUSR6AR|LOfZjwa3U4=!3Tv=siquci`fDt)t0H1sc>wmcfhhkf8ETrDpCP(m_sON{~3Rf)KF(>t0J7(8s1bE~zw`Ut}R3Mhsx(I^C zV86w3iFyjQ#|+tlc3nV6+ZPpWCW3Kk#6&LgyEJB;Nv}7Zn?2n$q-xGt zHe_y)y~GD}p5vuxy6nD~-DbS`n*oW{e>zYIV$T=H6T%MFZB(*xRjROz@v_gLPw6Z= z_VyluEZ3KtKR(cQgFOTxNK=U|nm|ap$8hX!H-T(oRz12nzhm-a-DGrH*mQtvu({|Lr1GtP{&JYf57~bc9jf&Ui|fF|F5r! zY*%OOl8+`<&x3Q(zSG^MX#j`uUcGV8U);%M0X)YvK)?$+N(r)~#o^`3exuD!|9}Ck z7l=^Zd4^IOo0UWJ&J%{)U@+n`tJuHfkGei!RYux9zi1dV;8`sPQm;e7pdOa;W0&Ah zvEK!)KyHH0xK2XJp2k~a#a|yh6*|5EldpYFPBmklgYCkyquLs0t$tHA)3c;}>VcBb z!OQz{Dpu47g?zhY3dp=Tp!Bi+bG1uHTE^iM)%|p{_C$?VO{L2@w33-3`%hBRUW@s+n!I4qXF9PA^}$j??e3gvm8`Ha!`&iHIZzS=+(; zh2B{y-F8TRny7oKH5@k);}4fU1wE#dHa8dL6v()f>?~o02G)vnM?P!#LM;~FCbPj9 zZc1Efw4iUQ=>oqa9Db=f#W9%niLP^`R%%sKzR&_jjTEtOa={8vG06D?h$fFdq_;>r zmR|?JZU>QCbe1<_-UK?bE#190vXTm&VNV+Dq@OKQ`oPNwaEF#V?2gSWRZwJOl{pt!bF}M#fNX^6;tgB0#*yknf zld*_D{|TNve7CqX1CN9Gja6%+AD=3U3lVUpXDh#S>_2kkkp9{HUG_h~(-J%bT7Vcd zzEa%*6)wco>N!mE&mkz*4C*u`0z}S>%ex=7Ee$Jjx@s5r5D8f5O^b1@Q$g+my{|hm zLixV6h+-3p#)Z*W8l5xd{?BHPt@pC{V1Oh?njY(o^b87GKvPSoSzm6-p#!W^FGSc( zPV;lN-HBP0>p5Jr{?)wWjq4pj5q4!8$IEt62CdyJ*M;a%Iv+qveky-lDT8RLEprqV zt~a{9q>W@0?f)%lPaB$4Lj)e58 zS(6-*i&Dv2J}4Nca@ZJ~{X6+uoJ<-#bSB>&3OYHiCQ!I*BN~{x%JV^8$y_K!HfaVO*l^^g0Z{Klv`h>~ zD%vw%f7Gd+x3&Kh79aOIPH-QwE?7@GwSt42`BWH=!ww_V6@+qB5h+_?z`qO zLx2bncBoN6c=4Rmn{iDiQ9YNg`g_dE1*UbEpqaPc5BJdP5?KgsRh$Z87_gCp(tXoLZv=Xz zRgOc-sY3%I%`8A%Z47|(!7q)G5KSiRXQ3Os@6Kj6&1Gzsn>wq|N`=mVj#T5PAi7d$ z3`X?@o2)Nmb4%kVaHzsK3+UqU_UvD?N<^CwkzMFLu5t+thai&{fBbdHZ zz_1QG{+jBh+s&XwqN3Ow&o;6?7S}T1rA~g7a<~eJt^4ypso@vv@4rF;>aK z*&x&@(O&V_Rm0ZBL#8?1$=erfZ54xXjKDG5O>O8j<@~?}k`;%B(I#o7B{e`JLm$o6wI`UqB3wdz)c*eK=nhC1{$%5PB zN~2y9M=mzR>}b13de8W&hRZd(Wpb@W%L(L1-1NwO3hq1`({P9;bcm&ay42C;UHBkL zGw7x0b@I61CfuU1U&hZ!H2mH?;J098-gCsJ2*nMQaxVI@m(_3y+ubo<>W~=IrSUKXt2870`G)b7YZeN=kM#kHe zP*zBk*DC-%7Hr9%i+jkCsZ@|uKyR)l`T3}Dg=SV}bj;-FlcwC|5~0kU4ZOImQS~;7 zf{=o!)4pjU``gRt;%;K$TNLJ}{Yp@t`Aw);VkDH|OSD-A4XC#slay?Oes0s4F=WK^+;sS_}0BFPQgK2 zkI50)TN+8Mb<0PQty|6;pMxY%w+=1kxu_v3}W?hb7p zbdK*@%T1qtHa&QIh(o@)B5U*`-1{O^IwpwGF!8s_Iy&zeCnVO9H<@@_eBU(y{$9HQ zyUuqY+r% z4KE|iS7Gtk&C&B8TOyCBZ`NuBk8ch=!Q3K_q+@9<&7gA}FH9twmRC|!dPHX;3w6rUlxh;|gi>(ZQAht`GGk%O8B3x~KMG`*+%r zgL${0puXCr){nvu_4W%!EYuez2b#UQ#w4+Xx(oc(=|6w-ec1c6B8>D@C8v`s=4j z@KYARBrAt5GWJy>4{ST;LJAK*`=5z<2TpU!qs=N`uWES**6A&r*0!^jI;&k!yJzBV z9t}1`!t6k>O7{0Cz;Lm$n`A@<*(+EaUS1%b4_P#?`#N5jW49<%cPcbsEHj;5WRORi z;W3;9)1sXss%>Re%WJpe9{m=kcABF6kK<8bv#M@512;W;a%P|o=h(f-Y@@fd-DWSA z^w+sgV`maK`l1U*LL%7Q4WSRk?FQB(si**58`!4&jJ$ou<3aJy$4Cnjq086(DsQXk@al^6$%^=ImwM5lK1%b5dYdP z-nPl*2?JAAn;C!De>T3UW&|3#3nAhCc!=MD>P%Pj6isJ+u>7&_WZ5^&CCx5_mTw^R zASDm=Hv36k4=ySK#1 zb{Zf4?ew`os~`m-?$4lKtQ^cgr)D_D!x;W@#XqIUYLN^UMgOGg$Gx2RKo$^rkOk`H zAcIv1x|`pN$aHme+af8eXoG5g$+{-H4r2R8j}&eqj5jRAOzSQzg8qHXo;pY+`6zLE z$CzzeBYJe_(Z8(}f~zrpLhPHIDvo=XDG&M=H{B z{WVs_K)(@WuQ#XT%D_?tq=g_}hJ(<;<=@^)#V|YxOA#|tuytg@LQ$+ImzA$I35~i`P)`#y5#z!S^GEz&)|vj-f{^Y`LIWfKelm%`kZxaFR)d=+ZctE7uj~; zw8b>=J?8Zbo|!6g?7{LIy^E>KatKmxVs;1-G`+-_O>jgl68+gW0G2BKP#N6 z+`&G+rkpjW@|IcmC0$Cy0|^2^{s(uTi@9-~^2bS}%#OQ9^;N457jvlxk+Rj<4K~UU z=ce-y*3pdQ#F>G6b2cxfi%V^4qeASn)L0L?a#E}HZ3jxfpBrQ-o0*MN_!a@xRu=-5 z+frBukgXEe1NY>GJhcSAVw!iHM+M(MztMC%+panfs^RPP3)b|Skz&`(54f36YT^45 zJhxk6gKihq#D9;hQ^I(+3zLN;f8=PNn_%;|&XMxZMj6Ce*nzYzu|4WWLo)y#F4I?g zroq-c;>qUp#2oMw9==WeOhRL-w*=RJoV#|ybV97Kxb+4<<6~d{xUUmWeM6-$E?6UG zAK9??zNmp`SpNkwIoHzAA;sog$`X| z$=XSP_U6ENvg@p+@R&IkV3}@Uc7r*(%;5RH1OacJjUT@PhzPIW`TGIpqT%z1=Tpd2 zbHIV7m4g|O2vTR$#ue}Q&Y-n?$ZjE&RCac)CE450knGhkdZ``Cw8+2mDn+1A&x(4z zDp?wPP5CnjKz*JP*~W@6Ouc#3JSPzYBya=LizllR7F3_f5J%4nv@0L;^55l6F-m<= z6OMVp3(&`ITpmZdvh;WL>O}~s$w^y0ocElgs2vGOk4(9>RJ1SQct{N5$TrKLbe%34 zrk-E#3kE~rBl6~!0~$E-=N*>ZirU$N@{-!y+s|bO8Sy7ylir!})r*_8&hZR0A#wK5 zf`F8*%dYYE_N=@N2BG)9Xuy^fUTtrL{(er!K5|CxNqyi@C_|%`yb&!dJpSW@p%Ek{ z@~`@|=ds3d3}|^YhpotL>W-u;BYJcp4({w4A19q8y0*R9;u!EJr(>@gp(&kX8}(+k zfKI5oF|+B#+agUHa{=Sc;x3H~_Y0TbF^5wg%Vl;M===+Xd|!|FJfN0G2g83&YAkfP zM6JA^O(dv(0&(UXwiMMBb8#D=QRsrtVg#Q&o(Cw7yK%H)tcX z1?j#fiM#s5p+2l2ZC|g)=H*h8NchBMog9g?cP%2d(<<($ zTLgpRb1q;6BJD0lK_>e^s~@?O-i#rerDDG*gtz2(Sm@EStocB~zHWV>4$Xs~vknje z09VYF4i2WTY*33&;@nr}CsVUvI58WUj(%5rGCAj*@4;U+e1HnSfq3&adezKnKBA9W z`n%1=kTdj`>E>Da#y=iycDyD^1*VNMHgH;*(dQ8y^yTNK4iL)4<#q74# zkw&~@oum%hj52E4x3ES9a!x}7T#y|Y`!_DEoxapp>h%v|02oG53|oAm%Qha>kiq*K z?(I~^o}d4pCi}olmv_%BS3KC8K_hxVF(if4bUTe7rlC+dDo7$wMO0q;gEV965AKG2 zP%szdcqbz1x5n*bjyCjVjW6QRw3)HgA|M+ykec~dn+%(};hzs4?_M`XhyM*zTs2!a zAb-)|dm09kp)AxM%0%$;kER=!7_Rdl)Dc{u>9Z(-)Qunc#L^NrfFO2fA&~suwUR6= z*hxoS=8R5~IJ*^o{=re&b?;DqLG`)MDvm$O zvjdgMWH%6(Ur{j?7Npnf>k=zsj?-Xt)FIicRFgN&25hOfcUOky6}j%%Z$UBsVEvr% z^gKL+-BfpM1Osp6-dxePL}f~;b}9p~{pP#Ov;>B>xj}B{?P5E>*d0(!ty?2WG0CXI zQgKu5PAY>rZcdi-{y5fzM|T+8utpOf3uJL>wfOu6V3)IGmt9mm)f;h;5ME&&AIZ z1WA5Lka=k|8&3b!m4;P)#u)eeAVh$6))Do~P39!n-4|K)kpgbKdsNqyMgud-fmwbH zH40?D@<02XhU{+%?O$Rz4L>}p2dB~IcfwSsz|J=rs*Tzd*~H{!W2r9`;G|cIzSjED zjnrJ6BKc)_Bjz;%>>*)qK>G<7H=y{ArAG!lJOuakTrWk8_L6aDsQ8KF-H$AltFQ)b zFU}|-oX0GKt4sr$5QVU5b1Jdtnf3b300iGe?CAY7P6k>4%Mox?kIPFd!sZtR3}vs3 z9}GxKSOqt|_+fM@e($f-l5jFc*lP&l{5Jfkr4!CJ`@>pI+@&%;xtzv1u`iBx8xUn! z@8-VAEw{GyHB~ir089h5VU3LK^JLM{`Y|SKeGcz+~cQu;7^1alWZ!`-GS5k6?PIaK3rCZFFZeScd7d)W#Y zq>@sX_pMQoR=iPr4d_rR`piyeigrhXItR3iT0P+VW=$cQTvbae76uG~od6Bv9Tf(A zjxeX)9f>j<`K9#1-2`QUc~jy+hUZd_WLwu(HksvnQ;f+_{uAPJ%h{G91UK|K= z45&9iGt#^ThgsNq2%D!ffyoUbS(_-OQ?82a)fH>bgh6X#DcFrtsW&;|O$@-YcaEjC zmY>7)D>k5C(Pz7oF1GN$_66K3UM)tHqUtQ)@5t6ZZ4={E&EmQ{mB`E8p@T?{bf_|C z%N*mp-LD8unRvZgBxJljk=JQZjp_Q*`gcNqmCFq!^x)*fT4`sZ(I$0hM2LxeM6ZVe z_|{1e{Mvo8#=VUq{Wlzb^te6TPzJ>a0mh68Xz6AcZr$LLrVBJsdw-!0!+jJ2oPWqM zZjkQkv`T^Lj~(8IuI5i{?-M9ma%{-B+U$78`5)T`7q@V~#awsg z>VWVn@3HMaMs3jKkL??tGx`!aJ2zni{xtn}ybA-F!7USXDiPdu=~~%jt@=NV7}2J< zDHtZhY0fqKZzSK*hqSYBkFgQG8ISbrO3=f(f#|WS#7>~*FgA=eB4p+=5(Ar4?*?j) zC}&r{+9msL==?bCeDWC7QlLxw4bqdHW2K_wI%mtG@y`?}&V{nq=J5X_*B$Xc8D$`dv4ju)Ww3n5 znf%&_nLnh0B$V8C^Zb{U-|>cWKI!+04BeGF*i^=mRmZ>0AQNB=^giUEKBq@j%cbVD`H> zS!NYJR0d-UI+}P}0c)R`1yk0{5=rMVbABbs8_eK~SSd6Q?O>YD=36fiG*m#rZtWQ_ zt_f@{jvA1-+V`a4yW`~o*&`NV&cp*(yrvzA^M*1z@DxtCHz8AfhIdVJ<wDSOw!7o-%|L`-A8Uk{|PXRA|Frf=N{XTljcm1<2 z7^J&kDu2!e_7&m`mQ36UWuD6UG6wyGxH$F?e`*1~9Duz^C0jB7cS9M@nLCGVS|b1_ z9ZU)sCd9T<97~lY)tB#n4t)%=`~fibgKx3gcA=*Y(X2T`s$ctrVEjc4=nvRjVJ+yL zq4+mS00w$=0DW_yUio@e{dcb8e~T;q8>#vK{8V@fh~^)M8^?1m#(S|}WX{CvC?)JN z{M#K}I>3thWj<^lMVzU%DjeQ4VtY`jSoTwx@>-7JmYfJnJ-|51qQd5}=NJ8mN3m5X zJcC))IB6y257$@7utPN2(wF9N+M%JmzwTPEcx|W$3HmqF=1EUa$$5l`PZyu zGJzfI)uR<-LB~dc>rssN={mD{9jaf1_pR{mknfl9Zn)J*oaF4&2vnE%f%%D!P0i+) zBWtL`6v&!U*s*HTIR+*~i{v zgMb3?h2!Lv`qPB*0fuPuvy_@^O95t{mY`sLQeaY{<7al z69O?`4Ld?-LGOVdYewzZ1c%AGDzlFhD=*x#vR(_A17Dk%C2B<+q9Wpo0XEP3XkWqm zR11*qv^BFEEHD2et5{RdnN2HD?M18R0xq)!m4 zEu=`D&7T`=M~8pR`2~RD&Vv4s9nwd$Q=Nr^*TW0PP44!Vtr-}KHM!gZhBGi&ujGal{&fN&%oJQpH8y72lo} z85yaRuI^+uBUbBiirZyVVusEP&CEzNWVKv+O;py?`PYu%ZoNJ5U7M3RbIGBiM2=Ql z6?%5a>#A)U+o!&qPg|;Z4?mE9S3YQs#{yW8z^%~dJQu@Fc3dLB&V@$P>gDg#!S_wI z_893ZxF?3yQ>w-r%6WJFzW<(#c&P60-4)X-d)FLE$uY6>y<)TJlXOLR@m}!+#&=C- z&SZtu;w0^BjjI-6^IN%5X7cRrhhlYQ!bXk#lY@JTV?QPh-4b7BG-#oqY19!O?7B3| znE@Ytvh?z~&4F3F_Twd^*Kxn*gnr!*R7E>zoJeW8gafuBu5OL##$!{`-W)!ED=dZD zSvCWzE_+3KHt7|AEe+$%PY3Q2P%x4*oexhxyZU~Dk-3L;S(O`*N#vmz9FK>b*`Fn+M2c}E^ZV_s;@*Hb#}h6c=v92FZ@#O6Y>PcRM8Wr{rQhv z&bVSW>9v2SLWV#e@{l@s{-7%QqtyZ#^ljRDf0dD$#(9N4I?BWDha%e!nl6~m%Tj0l_iSwkD^JOst6H|zh1MA!;Wej+wb$UV%^Wt z|9fh&CRl&>$>Xc8rnN7JSa%tyJKK(OQ9S9qP75RRdGW>`DdfIdBcJysz_Dnm_O9`2^^4@@iAG~~=IYw$RM%w| znKF9(s39V<8dCt%pq?xNkR zO8SMI#?=aL;-;&)wUwo|k-lx7h{%dmKk2+z3tT*IP62E4<0n6nE8e<#PfCu6z%9At zE5T$<+pN3C4ngiEG-<=}d?bG<|0d$|23#|^F%wJk=~Rb&@2AP!rcMJ`o+ZFPmC)HjK&;J&cLp46lqv)3}&`|^VzcU z=4qRT5zo%_$#9tmgnjjXm59dI$EdHS%2vo(AqfRLLS8XFdn!OlWy9%sq4(e5?81M(;T{x`#u=9*LTH14MbY&=1T5D)G=zKWMXjV-3WFuXY zFHY6NXKD2YK2TB(Og1XAH5XM_QD%&JdkkD0L=@$LWZ$a+jQ@o%9HxJGIb-Rh1Us^X z!FRWJ{db8y=&@tcRVH2oGx57PTsp3e^}v697v3Yj=!vAcFq^JQp=w^T4{>JOq3#u4 zKQ3g{ckYx^I}dY7mpoVE1%1yn_Fzq zc%57uPT`GbvF_a!!e-xcR&9WVR5M83Q+^!hHY2 zzpT4YamQPLmqyuqA^!yTV?f~0kv9=tm zas@o(P{H#Im`pVe9z^FBd))*U;^9LLTFqPAfT7LTrFbM7=e@YqwTvZ|?IoO}uM20$pR zHW@!`9?GjcQ8~bH-atTCicRsb1MtIdN4{8$|A67y@^-O=f15htUemU#-bGI_lOu6+ za!&)u)24yR2WwYqf6lC{g4#G-^%sTwXux(&PL}T3qj$%5F*Q{p>q~J(I#1%EFGqc@ zyb3$1)~Yz|4>pr@3|^+Hbr*Yd^F%#b%>|;27_;=QvPaO#_hU6?%rVmAOiU=@$I9E$ z#|)RY4p*1BQcLwfKC$!ZA?GI(ssrCSS7RuF*cmGoj}UF$8dA%$*S}^rbtI835e>d= zf0BIrr%?!`9Qq_zq9XeG0n-x=pJkN&J4(OZ5yS@K_|BL&XSA4=%f8#&JaK@#Wp!Bw zWMLTVYNrf$I;Cyky=Q)CnGoOO5Tx9(3Kig6H!$8A(irb{!u`~t<^G8N=))K+ZS#`2 z{oRiuz`1nI{$JZe_roj<6XOi4B&_~9CDZayzke=C;0_` z?fX7gIHX2_OmJT+`^7F;x$C5AKBDcFH&p7;vT2IsS^@i924z_mtXL&--( z#i87{y*l@jk4oimLjfbX%%}y^cg4LrtO0kp@&UG!BvwoHzI?Vvs;>NO`13=ugMx2d ziJG144`XJ9A3~osBJ4)T^N-#n>HK``Ef^CPd5RmFo}M1<^vH=nR#g3_cJQMsD@(a* z{or$;5lD=(vf*I|8j}x_L-9)UP1rrlx^Grn?&{1Io-p6lLN#?Ef$l;YJ z&L?t(w#4yO!%6jk2)&=T5)U+aJB10yrd0k<#IYQm2ALK$i* z-V?*>h1~m4c&dFL|1`Y>UBW_HyuV9LJ7c;K@QmuT^L@-*rR7|D1MXw&a*E%{p5+CT zD;v%UJ4P`&J;)j$dTZKU^%`8-Z^+=Bo>}1A7Ww^rDPiIBkY0VEouzLe)t9ID4bfId;?a&~(@s)=)*@q`NbQJVz8S6m zpInS;S8rnBoscVoGTy5$*v~v%@X!#S_p1hU;Zmvc=8XO1;~+h4FzEnJDnak@<3od0 zn!__Bov3w1MpHnMm}C}Odl946(~QM-d`>^7DAes~miJ2ozMY=`Oiw>Y21Zi|nm5r$ zs-FkniQG9~qaZR$-w+ETPCt2bRy`;m>ytjU zhHq0wfV1)hhFAVYX)HaSETjMon0^iHUthF8`G6JI*IJvl;{vQ|RNA+l;E6P;n!qVE7^~E2GS# z_m<8EDRTH5tsX@#~U-x!(wl^JY z%ifn9aah*XdMET+auO5(Yw42NJNTl@DpuNF znoC2*swwHN2d#P#sGWH4r#LiP>-VG!tM(Bw#Ao7}C1DunU7cJfJE8B4|72!%MqLQ= zZX4!NtUnL%*_}g)$eONm`U-8h(Z>kY4a|5Ssx&;*Ri^TjK|eg+-xnJY^LUkxO=i$?N)M`SUysgogkI2gG?lW9kBYMi#i*tAd|iC@DDaW^x>#C>5j~GE@SME<;am zi|&VBVP`3mIT8KtzS;hLAI`qiqMm!oFL2iVad6Vhk29!Ry({bS@OK4trEUBxxFB39N%nMruL zJ0qcQ>JbrA0H?a)%+-pJcj~hLJMw=7S_)G(tjoq8<8{h?c`)7)63_G{@Ffev+L||Z zn3K$~?EK@Cl&NO`SIsv*);XC+PT#7P9_T)NYddYI4NUdT;~%5EJ$wl$C8loFy766N zzqqLLz0yL;U$Jfl@*gHjkezO4!AKkn{(4=x;cToV+|Tx}`bi6O zH_ofSe_cg)p`0K~@v)?al;2tz$Rw*OQ$F?e)Y!2gRzjPH^{Q$1Z!mp2K-nhw#D~w) z<}PIR=P@q)E8H$a*n#`MbAL8@)oW zFo(E3uWh`ty9b%->Iow^bvq#L=&p z*je(Tp!YJhH_p@@Ff69B-qHz*tG(aiZ&aWsetF+J&F?Hp=?Xh6vw1~njPG^wQ-ATK zj&ShsXb8QFdzZ$4(wp8-cs`Xilzviz$3MUnh4LBj!S0L}^uzY$2Y&G_9uVsQJ$G?3 z@kMIC)jU63mjbE=PkU0Q?ufaZczjvqNU#?SZ}9r;bXWPG6?$2vyEg{T8oc%w>dd=E zuzzaQz_T~j+f)VD))b#H_lD}WXVCke($MnFl=?r*h|E#B2YK!vZqH2>nF3@w&B}HtwUVWqp2xM(qANH0gANN~*+Hk2bhr~gIaSt+AcoB1*^r=^syH62_RcG! z_I|udqn4VU=0S7w#x${Wnny4Hro7^x+|5Hv)pTIMN_2nOJ&B-k9T@m=OZrail6_oS zhq->IN{0=T-w1oxQ_hL9gFn9io_->dg%}28I>oTXmRQ&(**sZAUS3n(E{%q%Uu44j zeFL??8gy5`GyIf`>6YKZ<7|ZIr`z+c$#q17W>xG@im3H&_KDC8v2OSf#Q4ibbtdI` zJfS!ZUwiYT00{dK8g%F$?|oleIq!#V(`~Bb6g&<$jwgBqWjOZ6|3ceY2i4JZ4Swzo zP6#1baDuyA@DN;rySuylO#%cbxLa_y;2zuwPH+hB?ywi0=l!a-YIke5YQO#mrhEEM zPs{1)bAH_?qlH5~_gh$SftDo*F9L`{OP5Y!ibwMcE%)Z$F$+#qvP2@@ol;T7ab|z3 z6rGw2*`7@1DX^lf1(G^K<+8^2QhyEdixj;!?}go{i!Cf-b{*Xr#dg7Kg@Q2Du>F3F0j-B{#xG+5t2q5>L= zekdM0En@vmx;~v%=98KfS>ENJQdatC1%9%FEa&1gJ>XGzhpc|vMS7?`j}~NT>27R1 zW*~IvzKm$SQYi)Bsbs^CZ;sf%@LCNEDGPF1?pc~?qSj06mq+403)RpHy1J||Z@S&F z2KUCLR!}c%B?BioN?kh}Ca`mA6!u)}Rh+d58R9~O_BG-8M-rXA!)X3;b8LAwGN72r z*s22rz0q)>zgI(j0;IHcZ5dK&Bl)SOdHiA)eOS8K4Q7-X_RgOjcCdGy+Xyr;kH%-Z zKN0Qt36y+4m#q6BdG9>19*#*P0y-)QTO zJu=8J!m=I64{_51K)fEg92vpv2kilK~Rtn)RZ2H#v0}lrzmaE-!p1kiW1y_A9 zeS5Wqfun~!H77ZWhnC|%BSxNeGO1uf`{KDZoDvQ1aLVK~WgfE6OqtsWFu%}MIt~=b zNEAsL#rH6M=)qJ`-}|u9ho>#oJ%THB5|J&FCd)gZ-VXM@XRE!SVsXwIW{3O`?AMZU z0sn4E&~t`$pT$lTNI`nWv{-}FM2Pu{t@EZ6ujF6GgP|}af zE~K2;`aAjkTk3%~eiBn$5i;v1cZ>5e7=jgWqjdRPsuiII+ zGQuBHp3i$Vut!HQyIU?1o68j)yvSE+Cm$^A=wdq>53T%SuambD-5%%RH`6xw4j$a- z&WZ6xN9F2;U`IID+Wy5CB%-Y zzeqF>#29JFOuua#o1YFt$kKDMB#3p(Z)kXt)tEKcVo5U@xv{?xMx7_P>#<=l5y)bF zVKA*^?`gkwHl3v~nc28iyYEOnBjO;p*Eslv%V;BxsZ>W$=HKC5rVUuCh?sGFF{-s` zi#?f(4@UsqX}ODh0=3>1Q$xN^f8<|+&#^p|@k-#HjWz^x9X||45~>W4Z&Js5s9zi_QL`8p~3+M{f7tV(wYk!Dgv~gF$;Nv_UOQ6yBvUcFgbd24{Xi1Ww9a z5r|&ZUf)v^tN^V3WdbLtmPr8$sGD>t3qpetr3@2G%U&bVdH>T&nR6CI_M0lsrEI*G@ zyQ7DksI^~MskUx^XvL+SYNEjjGKa78QjmDOQ&S&q~dKmoRat# z%w=?>e_;C`7J$*zQDkbc14TKA6+XU|kqqguf*37%pDhLSD<$6|MXy<$=BA}VpF*1H zw(wQ#Kx`pPk0mdBR(ln9vdVeT0hv4IZM&yLrmTmtl#8!tp%6q{<*J0tT!5`hF~HP0{{et zQ}Dj!o62dcH92GuV64GizQ^{x>~vHBtPdKggaYD#mWoKUHV3M8ty;?9L9`_~cQck^ zx%7TBbAH(3Sc6C{n_)&%K6NAUGjML}E(y>Zd|yoy7#Nt7ibKS`tC@@Psq*r;exNYY z9`;p1FLhE}LSYPTWP-l3{;;w^KwQB@WkEUDlGlP-O5Wi}KYyJQWBP}?Q|O{iVLcXw zmM*g1hhumbTVZ=lU7R5~s@lNnU9&8GLEoRzmahEV{2+G@S)RPgS1li_uPtaSb?vfC z^CRm@2k39NJ_U)FaHz6p_m1#-gk+G>4iTnx(y{(uGm9%1Tn94g| zj~_Ia2yMaYvzUluZPyFae23`sp)BV$hIPofhTQL&>c6YEY{nJ2bEI!s#73`K58Us> zrhQfP72ABTvuaz4?LdDOYest6Ap%+PF|h_OM})9pU4EfEy#_xZpqIF+ahG*h{N}=-;F)AyoT02 ziusrEMyi#4}xgi+`T zBrOZN+<81*SA2 z6mCI6R96EU$G~)XCHLr*nP70L=7 zV5(E!2}$m0kj%0X7+aYRz5kM5rf|RkyZ7GB$#n!K`Qd*1cXV1M11ZG#eo27^IU#Ii zzi3)_(w*|VMy8_qji6>xpk1*SzB+XF)U{`g)KqVHS8x_rX1J6X8+~txXaY z)lBFKE3xu=X*b5%`s2~s9OU)$#H`C=$He3dul@1r;lq-c{|l9ebLtIWuTx#$SyKEX z|DHq7VCAsLcOB`)_4hf-d`f87pM z8}0_1pMPNAy1a!~uI>+`UI{NwH0?$4FAV3fTleGWUEI*cFAhmr{E0C2KK0mYE2K}z z%%15j62ffsVVF@-yHlk?9PMa7dyS&UrqL`-ylS|S!Gj+6yA5+o<=8apx4cCorxk@O z!7bzBsv0b@vyo91BCU)LHA!4}L4lY)vDbFcXQyKNBW$;=!@^ItH^`R0$mY48`E*-s z;jg8%U8DBlRgm$EbeZh0A(=v545(_fenmcx3DR$(&m`|!uC>VJOHBSI^@~U8NU;xy zwt;h2H7L%jKKRgYC*kL^e_=ptOAA^nfJgPg#!GR(=}7cF2$Lp?XeoSTLr=Kv`LLF5 zjPq7y>bi(DtVdgeCb5%)BH(nnFw!Gw|PXy08z+Ed)9R8-TCjtt;lr3=EYN6 z@*JlF!jbkY)&Tt@LSij|mTxhlbwWU1O-teLRh*|KI_B@!b(=$nuO3etvkGDam~v#B z?r$bz^vl`wIbeA^V2_SjSv9n&wOi!^0O(?PM(;)OPxyqIZ$R*rjncYSMI)y$pIIRU z;8wcq|~_1G+=I2vK_T-p;fuREVQojT9z4-w=R|4gaeU$*{Q=!Dx|*ugee^7CcY zd1)Q|=(@(AfLZeQEqM-itfzdn#|@8zbYI!{c5#>Dpm!S{eD#Y6=|Kkt(qg%!eCW?U$X^SZ7LU*XGc^nS zNIt7!oEjV_!N21WT7F+sajGITG$mCZU$h29l1N&uBVL`LT&Hh^+ZVOGb4U;<7Qh-@ z8l=)MWe*kC#bEtYeBfB>$P`I=eQ`8liiHy651)v+;c?~X9x({+Q=WRngibI(OBuM1 z zyU3@^%b(JwwR>?zO7ldTrIed#K>-g?Q3(NT-Zfl;b&$Ca0lewsHRx_GC}!41^aW^$ zMmwU1(u7d*?daQUC}aT*|7W+p|MM+1O#sI`$WtyrfDSYtfYuiB+ zt3h#F3)}Qr{MWZS*li7bwUs(zT?XYlUHQW)2%j=TfW%T z3j9xjVC=lXSEb|-zF5ii%Q*|o%@M zL1zuHcFBm!u>bWVRA8?4Q7RM29Q;2{=Ud7iR1?c#%IklFx0+8iW5%AVZ^i+RtHK>k zkOuOv2Y3kmhOZXzNJIe{3d3&my}3C2E&*){=!UdO7^chVGHWlG@;=z3PT%W`!(=sS zC}s*x>bfg)*mN}IG)KDX9BFN?`r<(I6c1=hJthe}UMwuK-!3)4d>;a}cH{1lAs%k;V z$cJ(KadGL&f4py*(&~Zo{2TLx50ZD6`;D!SLDP+dkD9VDa-}cM(ze1|iO8=Ns&j)ir1i_*!@6mP+68BR92!dszjW77M=Jy3mKNm^=>x zE6nchVS)L}wR58{dumJIHln2pWLKQ0{Cbwu%Zp++2k%$39TDvKz-~94p576AU3-jA zscmrv=6fUFo>LV_c0C>@5LvH@TIACwXc^jd!u-FXHoLB#nFs^Ti+tXj_Fw;Ldi=%% zwI2MV80@S(lpM<9v0PKbi=5p~#z9wQ9ECxRdT9_kvN{G~z&wr)6S$JMA z6|`VGd+|NTfjTjs>|ap%{rVq?1?`egUX!5#jPU;;&lf*|=DB1hY`5oZ&Y7Mo3Ut?k zphs3IFT@txT0y#QS*z4%z|d{N(5X@_i3| zIS7D6S@m7>M(ZcP>j8A6yv1RAvpfD(!=P>r!(zy`m<(5XfF+f`^~(=T4uxovNsIFA z^~Np?%SjnjV8bxej*Ax`$^TbSeYdr8n?|w5Wav6(&vND#mh?hEF5_R#+_3b!jX&2| z)JBah*Ec+#v+uUnHhM_0@oha$XL@xQ{OOl%>7ef^zy{je^)wnO)nfCgE6&< z7Ha;9Pv<-2?{A-F>F|!Oe0lW!lU>qJHRxO5f2> zHuP|nGO*=kwj&`@8!ONuVE{Md8h@H{hLKkD`R+QhMFyj-K+oLWo12Y2Kc8%23qu|6 zkPvFZdqav8)$t z;2d=_1_HS^YFcOiLutv!xmIYGF%;Yb3D=4c=pxQC7sCgB)Kfy zEf@;ao|}q=9x-1?9;*g(6?bOv7?`|;N*9#7@{#x01lE+GJ)UaLgPLX+5U*1atekAj zM1ix9=s>^+u(jCK3tHi+Gmy-rqy{C3E+F(Q7}Tbrzd-#f-343l9{hWQ!xt9{YwPFz zUW?2SVyU%XtdiG~0Up2E&+3+_7*?oJVO-EtzHq!Qz*Xkou_H<;LS|E*-12oE^7@o| zKZFX^F%a@J+}2TNAZe2Gy?K4;|ENzohK;GZ%u9uH&B%I(pJ3QyfDjFW%jx;jl zLDE}F!>V2DXCD4e+71~J>Zj_4!*lnH80|<7GRPG zwt3AcAX!#0DBg+R_4YX)BF9wG+t#`xNzVB=q(-7tik&^6ZZ0jpGr!g1mE+D69=YEB z7=H6}@yN^j>?TABxb$)8d(3Icc#N1RvUf~P7n^?UNL+C3a=y9Iwzs_3y*rf(e472t z90T1mYsMuXCP^$kRMtTKN(HXM^uJ-zWep^4)_WiwKu*1F3si9KKitIaV;m>m%H}u z{oY+dR+G%#!Tb9P*2YWz_jX)5bo|%t=dMVgPx{-tZT_aEEtm*i#~hf#yLQNFvJCH@ zCesXUpmnaou;;4(LG1EMaLEU3mb5T`TBeZp7>}Kb*48m1^@`w4>rzKe%aQK&8s9e# zSpbi=WN!DNbU3nll*Sg>IS|-EizQRVHzth>w$_7T`g!sjmsa<0l8zH{#y37TM*FoW z?J1vZc42^MRFykLOyEjpNBl5i(c|zc$MRbVZpircUEcK_rI@QPTF~FyyffMGH!brc zOfB55(-DL>78@xnr=wN=tWY|F7V-Hrnwy!!SgtjsWYt$!q$VA>XzoSzxS50}bX&gO zB+_f;juL#>eF3I{thTq7mr&}gL>zKCe~r~s!4~8boWRRRE3Vyz4{5NKpz6I?Ood>InQd6N7 zL-=9;fTip8L`Q=$HEBXr(SKg%jwwk}zC&{_3c2aE>BHq*+|01c>$a9}?!ZxJ(crO~ zwJFVK*Ye)9*KOML0n@c1*4i`UuF)d!&<@)z-d|xOJAECKPOa;~`zig@x+bE6AUA4z zMf6*5k|8#;*vuGmRy6PlR%-Yqq9u$YihDlt2k1fiJ-^QbET#98p*9mw&hT>PC`)g= zRvY`8`WaaFMM1hl!WbKwXpUTP$85mNU~Alz`QFRf8fv|UdMt@%-Q{r2(kj;X zBb#-!7YO_)ekr6|Owzk2!N=^yAgjHy`dMAg)tkHs8A1V8$CGj$BlR|sk#@tJtzId1 zkFAOk_%|wh`* zG?KP3R?*>Rj=NF=YMV5ywS$JJ;q#Q~-A8=w~BEWQHgQ_fq9i*C;b z;v&6dtSs@EA?EGw9F~X}|GuVmL2H%pDx{xLmHkIp!uAIt;CvQ?uXWWZB) z&n7}<#FphNkkwX|FwULQJeM&=E;cf=4DzeMNRNCoi@k^7S2T{Fkq6E@V;IGMTKuvq z{4ou70-TVE+P@G|9h1UtBm8oL(Ps^6WRm{2ty|Nvc9X)GZ?-LH#ttIWzze=1xnJFV zFZLsFlbt}H2BqehCj$s#ec;s%G!U~;__wt{` z@v{bXtRv6*oSyK52Z#KreD+c=uY`vi_Zxy|B~>ZhRo*9NL2s?j#~NLNwids~JqxCjrk#Z>PDdL?NS={8p!&KDDJFJ;B%vkGp2U;w02$uWC zKu34r{6vBj9AW`n?p9WDH<1%|Thh{OKsH)OkTKPCRSMWzdwO+cZ3_keDykW{9GtBV~6XX zU6{~?Xvv(#7KJd*dh}&j@pGF$QzoY#L?=vA4KZ0<&yUK2Lbn*x3rPYVNdXWGLAmj^ zQHp(|Jc{7G6e#p7OinTKAlg|N5;Eb{6p(@OgPK;!YN7c#;xVlz$xX0XaxmoCz zh|D720r)AR1_J`=+y74}Ad}v#XSBj?6gy*?^P~Jg1wUW4CX!K7=3yhk<@WzruY4tjE3+l9J zI5?IlivrJ#z7VG6QjE(>0SWR+NRsY}pS?6P`Vb=>?m#pdqAcX+%Wg2yxOJVkg+(}! z{jmYMQPNV0)QXYg!7cj}d@&=&TAzYlaLF($DWrj)O&S4@vgq@#11U2Km1Ko&g1>9W zcp>gLlKgMirO@%h^nb)o0Ib$KrN!Z)gmORH5gH%F%h|ZdZTn&Um8uFXj4~xB9b54? zU~BwHWF!>|s>oS{yz3&hm4Pt$vmPCv3-bk(H_ z{X1L&Z9`u0nN!cz5(k=~iUGY3qvlzy<>7U6%;_c+#8rVanWo2VTeYLvupzOu6-p?qS^oS`|N$MDM60uHRhXgZ&zYa)sZIz5z__jJOF)8|ExG7`USXYx_ zM~ulMKl5-gY2QI+qq>{Q+CR}T6v}6JC>CgKcg;4`SS?ad;g4u2_QBe52yn)q%Mf$Mh#qp1yRI*n~h1l|mw3EL7tLBf7256=X}2k!?{NHL0d zHARPQ-C`NFN&^#mTO2|LZDqGf1fusM)hCoUrqQ(=pzOu9m%v!ol_mGC;P3q51X}p> z>|O0JG?y*dQ#$M|b&C)>jvkJ#mmg!aHo$KqU{X&9MAGb6sotYg7-@0JzJnQsVpfKZ zAfvcCwJHWlicg^ z!+E}wqLv9>6D{;5&t|vPdsE$WHhhew7N?Wa@xeorQi6+ep^?q=9;}G737`9us>>~@ zR>CG9wUi5i6nD$JoyWTkWdpk(cRp`LcWy!qucV+WZ^0fSiCZZv0=*72zJ8S88%6?^ z*VeSq^*~Q)oek2mbvSVI(o~{JG;=g?w@af$j?=QEQ>|B`5WNBeN;RB=T$xSzK8Oce z2Ge$A$pCofq?%6dSv7->4k7NA>lvDWdS%dL1A}FnNaP7CrsPN>jrvIKB-ey#Dh{RM zhpwO1(yky3RfAUoRy%3AFj~4@N$F;=9>mlcyx{zDgk`PH$iiPFkYNm$!VL9=$^nqJ zp9cB`#e>}eI9b_df+X&9rMvz#!WO1lwZkuR{?)+3@4s+ij~EF@;}0IYQySJmZK^p& zj0=wsgT5iV4Qsm8tv|rx>`ISURRzo=tOK9x{l~kYy5LUE0#)acO|jYjRs%ziP1mHT zV8Zuq=Ow}cQM_1ueXG8S(!jGmVZi|MaS%YA5isBzd{j^**qyW?0^PBO04-)>0BUL! zxUS8_f8>y_byEdNPnf(k%I(-~|JO;NNsQW0SyyDe5{n=Sco5XCDeTF~fr=w(+@tbZ zuvt*cTKo)Ik%Sqlf&AxO0Ke~gHEkwtC9wwd;DkULlLf(xeuJ>vFh2ZdGZA3+hv+oy z^qpvTBv)}FEwO4KP{|xIFsdpp@5hvU%)gmqq|OzIv|rc5&AlDbsM@rxzKV;c+N2dm z;OVO@pP=^#YwGeZC!8jTs_yaLG=fa~CI0KOowE}k?~Pf{H3xknsYkV>tD~{V9DF_)YMlOc z*NtHKhy$G;j1kA|5JR;W(DjENIhG$NcQ9}X>9}?OC){^--`%wZzKaIfBXf25hC?i< z1E8t^skO5t#sroLIbwB6C^klcRkZ*kH&SO=AGi>G9Jo`8vVa8&#i93p1b;Ai+nMf+ zk!O<#&doI}46BE|4@9pU+(#PMWW^5{eu+yCx+PN`>MaKfD5O zW%lD0ud^!_)B{+-`*B-V@mk1lN^DAtSWOD)!CY1@!vjivp&U^-&Y}Q2v~S(JyizO) zN@lI5fWji(Zb1)y4UhK|)y;-dUF7#2WAz;>nESU&lsRQugRdDNv8E=btUVaMt824^ zdwb(&g!sH32VIY6Csk!Tc_EjN+YP=c&Y%k~@8!qEWJ?UZv_{6}ADnSVu6R&A45lH( z5G4;(`ucH>!dTA@>*yD$_qR{S9K?(@NEuteG-X4}!W&QuQp40f=4pFWB%yIowgo(> zxRe-D+MD!pLO$n&Y$0IQ!r~?$C##Z^J*vWEf>@(&N(3O7rMiI?LDKvA6|m@b5VWUf zE>=#J0$&?BWY_%wf;*9083&KAyV0Tg6J8zFa*-K%gg^9<4jYit*i?H*9~S6kkefce zXsBM)x@pAsQZ&IEiR+#ceFn}Mc5_RNw%gW~q6=R55#b2Bb*wYlQc>NCWb36zA16Pg zG#OD*pjpgR&AcNjl11V77+S-6Cnsx}mk(qzf?9>2fA9J{abeRjO*V#9%FxgS=JhUg zk1tT%85IhuEROh-hzX_G3tyi#@4YTwv6X^3;=uSag>J)VSacU~?NROZjUB=+@Gg-7 z`sE@;4-!n{$=oocfB&{5vq7%VI7}x?Wg;X}y;ZariP2iIt;X}WD#m+Qt32WmL{Sw} zaueAfNuZB;uPPBT;tp%z%1z=v>u)!Qv!xnvg8p_$<88#aLVd7{0`3|?x64q(j+7*X z)QHLlx@TWBh%R6O7nEIFgW*CT!C3qrd^MA}E3mfNatpY3W%+tfa&4>SP)-AXI#G32 z!3nqPE53KcH?#Be^II)YGjQ7K7oS=Ocj(!!Z^lMG$9|^Xd34!sPxn#Hc`3?RwRFd* zkMyLf*^i%@$O$VgqqJyb9TVxQO$7(uJor~&x)^4lM^$nj@I%Vs- z{`ZkDv<4ADDO?wml1`;el&_#)P_tt}YnQc>^x?efRO7bFkU=RMsnfk1zPuGik8 zm>c4j3&;t2#5pXqSA-R^TJ@~Tj18acTLS95=A+?#^)Pa+%fZF!e{!iyq)s(RAo|cV zMlvHtdgIp|lURp)psKJF6CAn)m*L^4-Ypc=qB<5+hGyDpJdwuNY|2Em#@T{O(@c0% zp%&k$^;9$hpu{ri#;*^2c!RFkf(zJ_R%qJMfWt9c1p3;(Dn7WEfp$bF&4f%=7ziPN z9i>s|HIhq>W|qcj2`eFkAm8-FhX=+)xrU7C-CmAX>Dw7*ht__Q&`HyKs<*7C^Rs{u-oHZ z%;r5ra|&Qd9tzZ-`T|!*MfInG4cDWkMS=2luC0lu&V5C@V}Cue{pPL`^l%qR1t3p= zkZrj6)cn+K2N(HW+C|efNWRqDe24T9<^)Pqx_Wz?cWNoVeRO*^lj&h%on^k5e>71! zY28Ny@t(CcTAN;-X12s}Lz+~3OI{1|jaJ&o5>c63@as&Ri>kP^D=O(J=uSZgmIB0VQtC90{*Y09Zkca<$HA(Py> zIUhAFkOQ}AOPa53I?q@##O0k?s#Kcv`Q#=W`>MJ9vI1Y5Dmrfx|$(c?i=J)(rs1Sjp~1{;S9xE&6l>1>Lu2 z$1qBTuY!|OlKN>xnGUxyi>QC#Y4-Fcb!^NhZ+uBoK~?)*7DwYpQazauM^Y^t-5!%V z#T>^jZdR%%LYFrN*C%S>^pzry>m1qn<1yxm|Mz!0bh~KEMI%9-h$3j;a6eu1(tiwv zL7|UKAFvQcBBqu8XyE9Ws8>)L`=vsjpJr&Z((zi59ka?bhrI`-{ET6J)L_FJTLYG9 z-bMCJryRPYyGJ`o8?>dRq-BhEniw4-YtNwPuZy!-n+YSri)4)6qxE`b7$aW`dYA}L z{YX>XuDC@eC8r6upqt$%QtwfGTH*_zAD$Wp+aIL*-nB6?w6bolTsNGldOI96HNJK_ zJ(cBaO42P|2@<$QVeakg=&A_V?7Qr%SA{Z>eAncbstz7O)n7avj&8<}%NPkNSj3;< zgmMlnSE@+Nd)s7Tpa8`G_~U@f=Q!(|y1Jlj2s$^^dY4QA@yPUE(4jBJonQ{_5(;<= zr+Xj@J1WK(GH6yIyN(yX`;|rlN0cu=Sj3X9a%vkR-GcQMXbk<2o_RNuLO`KI#29)h z2bqK1SD#5awl29If|b7Ra?z!NA&rr$!Vhd8ar)+tYZACwKgtMO73Q#eQ22B2&J*Jm zy)i+n_h_*WjeN^f$^51U#xB@K0*y2QF{ZAgV@{dDDh88-d+1CmdKg^Ia-h|^Mab$)4UFVDlHx5{gzS-0I zt|KKXTWR2H0P~4{vm-PYV|Qvh63xY`5?>Ow=s`FhST;2MQKrDU>%GzVV(>3-!)1p+ zZ0G>#!1W)YQVx-oR<43$dMOdvyR*5TQk}9_xup^KQ%iRNBfJ_sNt{OGW>yxsQPQ%cIYm0yhpHOpUp7`8{qv!>diUwnS)& zukU)M33$4VL(LFMo~xw~<__eoAC@<0E83C51A!fDhMYmXmdKz;e7K!)KA zI|?m3rH(?}#Ao$ha`@6>`#yQSB44AN9T4BAl^eJGyuRnqMxcY2EJa6|htJXH(*HLk zoZe|K$kjU;}bvl~8J)VSg1wLW_07rd9V6Rd}WKB^5D992>y)2al> z2qPb!Ow+|d%Dy^Q(Fnw-VE&`FBK)d-v87CStw709Evyn#JVwY3Ea{B?LRwFuC^oh` z0A=)Rca|`_q+DJ`!vaBA+YV86w~4pj1FWQB;Y8K9P8Z2@96tw%csSKJBUhBQM`-EE zO;g|Ry%g?1O%41%{@HS{xUnX<9UEH1DEpInWa_KuY3G`LM|!RJfyVYXUhN z1Q0oG)7&qnqhBX?A?SA?EoQA4I?C zu8$XnZEGBP+j#k)i^|95c>ilUzI1=cy`{xz>?Pqv%fsvbHfWccA^tee0mT%K+)G`~ zID4?*OG~^t-762!+8&%XivbnUPj|z%bm(nd-j&OXBEM=y4OVQ@rLxXP-=Mudg>MKQ zn(t_~){jLwkeLb^R1`JHJTMJ8_0se9KgA}P!=+dK_2bKjtR#DXHcgi;yBAhi_?oN5 zFQuETUmlF){63e{J{U*p&haJGq50Hw;JX<@-m|>&vzfAc3pUTF-M5G$h4}fMK+iX;3d6e{#SPQEAyC&(i1bSM-X-)m06v@2gw5dXY%8mu&AJN z0NaF%^7zv7oLkY_Yot|xRQLBT8EZRU>#&A6Y5O4DOL7j<0EIb1-=knD+QbF(yUBk{ z?E5g~W*c5UP}{IcoaJ%ufwIgJ$;T(dKQRP~Hq@0x9X?`jZIn4?9gLx_$30;buiNNp zAke?i{{;W#USR%%t$_Zey99w}%g_JfmhyF&qfw5m!>$T)FM;P@0o~~%61V&Z|MCOU z{+K|`$y>bt;0v$Y@UYtq>_=SC+xQZIr`?rML=B3S&JBSK;h-ypX96P>z&hN|LUOav z6g)4p#cqMFDW9zSi`f9y<)R)`>8IwVdSYd|T{X$W!eCHMqzT_(|OM9$^o)r3lO0 z=!)Myr4oG<(|$^Xlyr{GG<$nCS*C$>;sK84#fXEMD!;eHPl;_R277m;-nymUaBf53 z-c5+Cr={n?jK8qnQ!0c$+gD`AmnaBWh7$vf5$twWcnD{meV+LmUh$r!Yb|JISZtuV|&A6)ps}K3G0e@!2u<0 zo`Rj{@gGU7B|4DPKWGgw!%{4@HAM?y5`0RA?C<+R>%Rtzsb#V1DFT2DK?asQhOFQZ zo*T9e75TsV-E+F&liY-cf1B(7}!LR#UtuTyu+|w()i(Gcub2s zH%EN0?PA;g;%vSBn3fYWFIa>M6EAG|<)-B2(!l~e1ex9A`yRR%lS1kWL(fRwFz^p(Le&DsTGHHOk9$382 zQXb?4H)rxy8T-NK*g-KL2WP94li5R1fn?Aock{N9d8rW%Jk!`KW~-!KxOrYxVwC#_ zy1f57S0v0_d3M5j2c*hfE_4dJ_>!>B1--282icDaZlYgZ6?J+|rkyTOGGIM-Zs^=q z{{H7;tlm&=t*ZlDGyzd~&etUUxAGPONRFXa*dVK5su=W*;ETnbd>%E9g`bPFWz_RF zrR7k^zpj+s?kgQ@x?9{I{TmoixE$V%h;>^ffr?`xKDiDc=roZ)r*l zw_bj;eX0)Oit4m6Qk@9k{h(&$#?@E)Z$X$~N0YH=J16?8wxA! zk_dk|!Z6M4h-96un|NvLZ%L6^N04i+e%lyNev!Q>Qno`@HI_e(3rfMfH9xrTMmav{ zi)-dVTplh(Yr7c|2C8y~;ZZ8pU2!stJ*G(@Lxmqg$5i>B$HdV`OhK$KiZRveVhIe! zF=eq-ROBnQ3Dq5svwP|KfYmgSRb{z#civ03rc^wITQcx`!!8a6p5n*$5 z!OOcd3gnEp#1f@+5Lg#_4_-!-`rGGuw+&SZNIni#Dy&8##%%iZCbt|>{}&dh6d$Ct zYoXmV73SBO$HO9SN)aro|xMuGxg~f0%B>T-s#EjHVheUG+;oCe{sk=wAnfyab3Ld+XWvO{2Wx<^<{iByATk(1=Fd~1 z^yW01q6X-{Oi5cjH1T+J(s)@&xRq+-?yO;i>e*2tXEwtK`<>sU6E4XG3#ovD0488H zywHy%+ya?cUc2(`idY>Y$f~=CsPZrztJ+3Ya+f_iW)>#}eK@%3oeYj}j;ocOi!U#~ z-cg8tnKl}jD^y?!Dn!~)B>``fmp1!gF23HK3mj32PhJ+C)4s-kEDQQ2!FAF6NFv6c z#01eJt1a4J#f7OXgdF86cX@HMF%cZ@j~t?xV(yLT_HEAk{@Pa)mzszT8Y$Y*LoQSf z&N+|G;2<=uXkBbrDVNxegCh!fFT0O1iL=_DXr7xz53bX79mLYDzeZR-Gkd)AY7G^+ z*;@Pk;C*U^&X=7?xvwLJlgjQ{byYmTqLdy_M2c(ZzH}-C7!Hjuj^nezKFxeBmtZqx zKJ{r_LEG;z;}t8%RtmwenpfD^)&5X~MxAv-e52qoannV55$mrRl1bkid!!mY>nS<( zVjOtgVX@*UFCnyQvnkwVD)XMSLYohf=o!sl3C1+UL(_~~3!OAPYnPl=$7n?XN?Q&v z)vWC<6W``uantfJEvdT3nHkexDGu+8-2h(=CGLqG3|bAdA6G|WPn!#_(vUOG;-L0~ zlO{bKn&xi3 zpuptOy>hDYH*GKH3aDYfzxqnY5lu^GZwd4KU8yPaXtO)(YPGn|c0L@(!%@?Jo20F) zsVTDQ*2!GIjn%dx*<+T3)4)jrVNr<-rNpErzpL$faGT#HWtsaC-#W=AtrH`H#V9Y@ zg;DfglZ(fd?&{AeMNuk2~XK1Bz$Z*ls zka^~LvUX~@Q-*~S{lFPf0||NpS=RL^E)5@VIpP7Db3wmql5jF3mP#Ce>|Nnh=tJmM z0~-FDc;ZUQ7@0#D0}is>j`eqI5{}N%# zAcNP&f^kr3tG@f{>c@*x6}!p{7b{-rM{IP(gLeDq6|#p?==+w(-q~)36Vy%uoC*MI zVP>6bQqcYjICJ%3+iXvIQ?u^8@c^ENzr9}P0g|QDq3zdoEx+D&XNyPv)cRzCxU9x& zUYt|gje@8bFd|s#czivVdnWVku>e$rm5ShCUPE+pLphZ~H^3L3AfkR3sy9Uqk>5Px zaRNtH_0XfWHK{#PB^PxGlgZJB4a7zaU1d{s35o2%nqpOxLvF=x-=E&kH)7C#Bnn1Z z#Czx;ji9C9u)nW}RO8mxW=oFf{JUVd^XKOq&rX7kwXCtd*KOFsNMST_h=1h~pgT6& z;w}UTAMu*P;OY!J7;?5Pe{)FzJai+RV>N8TUvlC!oN@t$*ut8cVDdj({l^r#5=gSS z#z-@jxQzs2@luFT-;6kpNExgiP?4hAS%tc{5Pt@%37e= z(K523%8r#2hXO|_q+7OU-O zQYZGLz4r(ebauuZ>lEO``5~7khYBS~0RFE>fbH5L2C&`>sOMml02;~*oA!6Fb88h~ zO~1AJ@nwZ)6jxZM99EO|B1Cg?X0@-}I{z#@CDuL+utF@Z1)#Tt)Mb}ED!b<8ZM#y* z&P|v`_#gV9G>QT;jK`9HFOB=zwA1d{erD5F9X1H9xC$eAkp;$EQN#5m0XqJM=+-80 zX(qo1X>^}O@ir?^l&zBi$m#~XjgON|>P%wqh_q@5$zKihIF8Fk;GF# zk~Kq1_wc%-<2x0jJK4%ph-)#Q8`d#k2$5BY40ddI%N5N5DTy~YQ#~cV_s`03_F}W&$?K;TZg@f*N z5j5#i30&v<^@A$Dw74*TwtPo~YbJqD=V@x@9*pN|Y)^zv7TFL}PK40V*J?>9uRXzU zg>s~i&j5BjO>cmWZuoW;6W~HRXuPe1WoR`5$e*zzbY9tTB^`Sb^xH?6Sf6&8l@jAQ zXg2u0f&9a27=VgL**YtvU z?C!nlpK5b~vTpZmk37yxr@!15J#C?-f#Hv1--d59^dH8K?EMKk{IuD3eshQzE#tiS zd)w9Fl1!kvG;-18T}l`VQ?mf~_F$rm9s6~=A=zVrqW#$|cPWVQQ?_`%X&J%bXFA$~ zQ_-c#JQpm9vACNayX?sigV}HM8jPlT9H_42erj?&iuUS^Ca;W8s+=pR!1u~|!oMj0 z1}@;!-+t}x!*(L??Rnm9CR^|IOB=`Y5u&g0_l@a05r&N(?75`v${N=XXxb*)#pso( z!QjTkDAXC-OSQ2K@~|{2%G4w3x2KfDexElUO#4-Ez7$$E3!;}G^7bJjWM_U)kAt^H zIEg}rR>yO96Zzwtw?_6F(!|H|tc@V-kEh2&_PlN;8ws>ex4TBJ0?ldR09{<3@zN=> z3{bMNlrXH}8fa*{MRJY#%L%fCWrYTh_2e*qn}6KTO+!z2A`P63^GUfY=p*a{H8Rau zj~U5~DtH3Qg&yFeS{zkFQXR1~r&v`lm#>$-rW!`PpZ%7C-XtgT15_7z+uWRs0WPq9vRuPJ-{$q*w7%i_`A0_-4P|8 zPjVS&9d+!x{tS?KQ`Us)xGuArJS+sv?8dt7%BcKCGOM@t17~vpK3k(FpidRox6bV? zUjtesIcUBo?0r-H!vR73uJ>suvtsIyH5{-WP4FA9eSsU6AP-i{PY{*e7eC(2kIgey zNOJAz4GaS33B>F*nla?aX~^^pGN{DTG&(YM99hlKYPdMF!5Jp{V>VAb@3O5P6YoHY zcPv9vR#E+kp#(q-g|X~y3Mn8R7fZ`&CO#)7us*lxhY;gBju~MzCy(S?4pSV?W`SO< zgHqCX`EHIl5#J`7U}8UO{5ht(h#)uKaf2Y)-&Zd2vTLK6ZSS@Z;ATCvd)@RVY=;Ek zpoB{4bJuJ@?1*^BjNi1dV9_g4g$Zk&Drs7E&x$F>>V1kSKGtF)0{gxVQGZ`5nRB5F zmg(?=pu%dP%)+g#tn6OZ$*Nmm?Edj{Zq73%N1hkC050=+)QQokw^W~4%Lf`2*T6pM z?^v@vBxfer-&|FT4&~uJi}L`D7zY-b7OMrx!2r#NpWdMfRyrKa-NWuO!1qYTbx<># zK@Uc|rlerH?jcp&(5O+Iq!`1}q0YTxl}C|v1Tk>R*)CeQW_4w1^40tTEPaTo{a+LO z@%GZ2&?sU@0^1BJ>10$dyD5L9r%(TcL|&%}c2Td@+LsHIh-1VQHm!Q|M3 zg%i8cY&nz+OM5zrU55dAys!yGxDSLAr#k>YvchIyrPZURwm<@QsDoyu<>@W-+1+6p z%^D7E&RrI7>eR!~H4v%ZRwX_NzB#lJgr1T{X7Qdt3EWWfy-xP{R5aEGZ7=ZD6d1B* zCC5u{4g*1gH*cS^qCD5_s_H!uUT`&A=b5`BK6EqJb%#=sEXsBhlIPoms=UiSD<_hH zm%!eEoa?g|T32#B1#TnJsq`OaYEEpMtVFNFmFj)(R1FI+xtU?AsTYo^U#y8FvC(6$ zmdEeghwP?ezrGBQbzf9-X6~uib*W z&JbFj!Th9Z)aM%n2g#Jsx||p;)RQHjjT_=-J}jgKHH&J(Oi|~PATCLl7_6_?7lD^U zq|UzFRf+&Q0=plnpe-*M0<(`ZL%;-GZ$o!mzO7pl+ zAI(p6yNw7)!Fe;kRMSDXtc$I=D@IDjuxx|^X=s@dsbP=C?EEaFLB4Syb+^jMxM6M$ zC$uHAq$z{`wlro%H!EE zultN{Mc4zb`E6NeX?v>Xw4zgDUjcF#K?PxEGFu0)jU+P`DFtySe;j?>rTrtmLR;3% zgc>G+vPRf-2%oK(b@%6a>AeGd_Iz-XiIMRb$?ABso5KK?_ifDxyuPWYqa!yh30UIx zvym9UE<9s8GB1fe!z9vON9kin0T1Hlt7nM|mbSy*aXYxn6JjR62JSaxA?jH&t}h-E zNEcA8gZCAhu+24A0EkH4(69G0HIG$+kdSbOa3dx8w|S~pf4ZWH;v9!t@~@^65RO8^ zEyC$M3=`wgLH|A+OZu`f2{SELO~K)|hA7Fbb7&G*tN*?4I%>pVhp;CD!Vtxd#bG0cmzuBbMTn9|4>KT~_FG&Q>Og_$d+J-xS4yCH%BnbCEZ1Wsc*pe1}M zEoG5K%=vYu)*s4$wE@LOy-Z!3x$X4$%4>W(+g*ZZ&v-q>_OKxu4}1XsP4FH4V{_;(J`VPoZ6<1-bW!Pxr0- zk{_tCX0K1(ZT-%|OP0VC)w{DBY8r&8bs!JJ#M4Wvh)lYnEVain z2__HLEcC@KQUcgyV?&l0yj$Ww`lDY1N?l7jv6x*8Jp~b$nVr@pcw=2UYDW}A^+~FU z{%CL~|I0=cg_1{zWGQl8Dxp^nTKXF+=Qr8dI-bjNP_YP0h|dCdL+~=@1Tpwls(!?? zoMltyy{eap^5}u{f|3ib3@KETET9*6H3bjjo2|_pVsj0kmTujC$2>m4{}7oo7~qJ# zaJcSt3-VP)>%?FzHE-8Z41Y`hN-`k_dsTQV zQa28!hZ&6vl-nmR_ZqBF9crO>qgOOMHDDEli08&=jZu85;9Ihg8?#eJIsznqj}f<@ z{5a?mgg^dyf&uK0ux4pNXX-@mkRqzoeQj)GBX+m2eriD)Kx4aFK19MpoAp`qr^eul z=FgNi+=|Vfs}O)<$Oz6G5vs*p1ax%52JH}Q8M9_g+i&56%QuDD;tyz4~H%-TH_^okt}n_c>W~?(!!Y28wWf-GceG)z#xYw6gQOjW(E$ z*+?LM5gN#*pZj}ihhZHTxINR@>Un`M?JK~j)dp=|8|w3tJyQ1RZ8*Gs6Y0rt(Qyjk z)>Kp267hwGMt5|5jQO!km;z_W*pu)nOXOD7{&u{hWKYNRUqZ7x4x_t6^whXgkniW2 z(cteVn$(HVcS#kNyIy}YiMI@{qJ8wMy(Xkjb#b12}@#GGbl&*Xcw zrguH)qtmAmlSiVMMfq4)5y}~G2(BPCy(U>HaHp?5)7=adp1Y5910q^cnwmY=Rw2>F zuXI{6qU97+$j25k@F!Ol?7;Gf`t*&j7WnlD*$I*}3&MbLMgEBg*ROzM;SpUpCe7Oc zny?SmjR-_#@40Mb6`C|>w=_*elTrWWt9Uhl&T8d!+T&w#yE^1!Ka6TA`VE;YsI82tb&)XL#;(lLkURSe74 zA?gpPAE}Qeqc_BMsHAnIt$kumFlcS4rmm@)L7(NYVL3X@*u+NL$8eskF|79oa5IyF zZNw0Bt9pLsC_G){%CSWeS!xjTnT+q37f9txX>Uot> z_?K&{_J)prg++d7NFgwLu(McOI7rfv6^1}Ls#YenJf*`-`=#u`OfHh1%0TC*`p3Rd z*d|n~+8t|TNc|3#0`|xm`^qy%kc*PlTKpv}sdDDM@o8VSd57vrVct6aX~Z1Oig>@c z-CU<;b!V;wDbP7zlJgu!yDD~N%+{)Bz4s~IYXSPgT$)pAgLguJ@Lop43Q(lgC2riY z3RisAAhQHlHH=$?yb5h!WpukC3Hn0tmu{ht025uJBw|Md_R6c`dM}(tl`LwGEjVbf zQj=;W&U$0=J8UolpeoDO7P3euA-#)522P7D&qZ`0vD!6s0)9N%A^=zPP`-xyWULoD z3_8(8Dxs#@t_;{_qIS2y*vBaxav5BWoBNMie?F$FH-jh20!^@m{iIJf5|uf;#PQ>9 z^`0Y`OE*dq9&m)rNwIqW%ipA_ehG9m`-jy^t}y1~_9{A+FhdzLid*43+z8Wtyl&4g z)1fsSs}`fLhHJ;Ib-bh6AgZ*7Dup_|ka9Q9w!l1oTKE>q)@*8hx7Exq%>G)g&J}6Q zFdx^A3R2WXl%E?sG@6=pJ=iTEowZiI>453{1}17vH|BdjMOJY5w6NO~W(LB5DDv8M z@mcV1Z>~#CF_N=2?BM$fG4i9I_pF_oCrj^3e;DDe7FdIl&m*;*2@&6=+)STWR8-ve z{?!4YYXz`~QHdF6yuO;6r&g-GkE6}+;a-*GEO<9|~G5WgdT=D1Gink^aaS zXv5Xo60FdAj48*7Mu*jamWF$J*K#)mXnXEfO02MmFau3B+UmlVvjx?=>v}Jx{&cKD zuQsi(-_tlwEk=!L3ri1MOK#IXu|q?|8cnHGdFMVMG#2`{RWQ|HRo1uQ!|V4c!2Mb; zm(8~Nt}q7r`&hyl!0A87Tm?|t9vaO!aJ%g153VzPtjjjq%u-dpad1DT5~t+Z43c!> z@Qe_0H4YL~NOeX)%97wb$KuXX{D}68o25Goy=GHF>7H1|DZtyc1-P$A5FuSn6OL2Y zEGGN5q6nd>h6vl%)mwYgafz0@d`rCi^HD9tO)@iO=NRbduEP0>PK;4fR=!tBGDT(% zsog`LnD|M%{JSw__LlphxQf}53vHPQb}rP@;xX_aUd)H+g=&=sfJj07qlxHSKu=Bg_GwHm-Zt>lJ5||C zc0^o{Ej+q?8BS-N6)IU9*A1HJg zBYM`TmoSr+JYXER!`Ksbmx!~6ut-%c$yAOp&K)(GuvjH%SFpISN?q}%CYOmqp$(2P zYN6>DCCk_(A4Ii~h+ zxvM1b?OogTAFMjC_1{qaBCtt>fB+4be27`8TdvxmOD$#<;gc7#RPJIIE~vo@{Dp555oq=L)lMj16u%&?NJ%0`%sMZf z#Y(SiT3EqY$4Vr_O&_sexLGdAiza5s)y`C|n3J-wq3oKxgYxrz@T7MfR@oFf1+13S zY=ha?tjHw3P?^YGsFY-W1#@jUdnH2#&r5{ABrhUxgmquW%!^lQM{=|WXKQi?@=bkk zLZB8N?TYU9uJziezLmZji~WgtLC&(V8Dr&=SS;iqml6xVorSs2$AZ%A zkx`HAF?co{5Z%&7*4zFl^4q0-aX62CzAnaHo9$_Fx6q)cTWdV;aUIIqX`e00+{Yo5 zwU_>Bbfk9o)KHU?rI6M(?$o130S-pQCy6bNzT3aA8~mmb&ZgD+V(Nce-{2j{w;WGX6H~uc*-|zna;DSjpfGL0d8?8PUONTP3nfh-ZW5`7(uo5YH z#}EPd0Sgg#hFtm&%uAu{;D3MrADY|$;ihf40MiTK{<}>g+^>m5Wx!=PBwqpmB<4W> zQ)Go`u7JbHzyp78xc^Um?m~Di>T#*0va?smkEPHbn-64uzfsy-)@b9c@o#{DQUBW}+dIDJCRJN- z7#)}ws_zuJqSgCp*ZH3o7l00*qtesei#UZyj-Co|;Cpu==g3u>$^{bz$`Mqx+A&V5#Y$8eyeoH%)gD77LWO`RXpA zEa~IF1*>V-NqIX;zOtM~MLf0kVvhfYuZM zg#X2R><$BT`Ad`Q)++-t*g#iPfZOrjOH^Sv{>QZx1Q5yrTHcUe4pGWfq5bY>Mfr`r z`TzYgP&*k==_m{lZ#+8EVs?J{@K>|`@B6te3|LNWCFDOj)?^OnZ6C+}Pc#v7D`rq} zzFxs2XRYCLRoU@Yqs4zpiA3NiI&f%i`H>da>)`upyZO=NzwMg;l5wYDn3`(J>|h{L zXWgAKP5-@HU;uwBpm4);siDXH-V*P>$wBlU5I3(!(7Q+fvqwPy>VHe|{~}Wf;WP{W z{b9%~{-6nx7hSw*1-Sj5o*+YhT##1LXvhC=SoVRU08AmQ;U7yW@nXDZtIvJBzyDVZ z?f8SB2yx)IRvaPWNeu2Z-*|YU!{S&wW{`N0#79&ytPjeW=BqoD|B+@v_iJd~U~emC zA;W8FUFw<(R_c7s%PgJx?2m5Lt}F$^RG6^|EPEfT4NqhZ8$J^pG`Gbm-^mEd(V`;i0YH!gEyfL}An%%Un4 zsFwi;!l@(8J zH);OtflCwHj+u+wDL10QK6a(PJEiB%MXowgj?;Idn)lQ`4%@H(JoyZ~2P@N}H<8va z`kT(%C_EZIe%OE0rk$gx-700TME6N5Ma{=4nTvPTR`YVcou{{Que(vVr48938KEWSh4vQ5ha`U6y36#{}%kJhvB(_vg+w`a=-CjG(CQRNm z&5g3$ysx^RYct2XxKpJz78-13^AVjMh*nO`vcYGyMV?Xb1!bwuw%!iS1HS zficj=IZ*{IiF1<6O?W%zYt?3YG;-;3fqS_*IGBPgcijH#mIkwlM(g@y^P~xq$0)7* zkDf405?M!XzATuvH~C)y z56@+TFY6Q75z9u7f&zx)9BpUhi*@May)Q=#YX!!`f?aK+>}`NT#@dV<5`mqw$bfmfSM=aOBIPY+S{ifbZ-lV7};a>pGW9NHr^F7q<;etW$oYoaapfsobKh z#3tlyWQsBmE}gIhdtXL4y_$=_6S;M)`gogch%Zj+G>%(zri|ZL{#oJ>xGEcmctH&= zUWgE}h~%8gd6TD!;PUa$qLITQ8ewx?EzWn~%p)7TrQ9IoT^=k9VE!9zU%e@+;7sB0 z?h4a#Q3Q?oeF)?{aFnv70`q1{z%b?1%5|v4?JCp;zZOAQ92dOMQ&-a-9zwY@`NzY| z8-ir^!wlfVH=Sz{k{BHH?(4wZLd5#tsEyBZ4mNxeE|YZ*Ui#X9U_Ng+uj_-%_4v)< zv+2$Eh=n^C9?tR+q);xKCxZkqV?@+a&pNWQ4vv{=z~??6ukpM|$H~DvNl)$TD6)}A z?uUy$dMp}wLF$|%Fws1e8a}&$Jj#H<^?DINO1$q}UR@Z~PTpM)oQ`))_O@)iZtd;} z>fib#{V!dyygMz(YLyRq$=-&A(T6$?sDe@($!{GRb}qjFPpZoeOpkJ1mKZ?qyRD($ z2-!Wf)nkdxt2@QQjVID8^1?J?*&&0|6<;B~xnLSGn2zIf&bT7akIs#ho07nncP3v( z22celv<3p|eBgH}KLYbXOJpGPcSNkJj;=xrwP6~)sV3k_O3;CYUv~d9)X$gH^N2{| zs8X~jQc&`KTnmB^=>t9$`*MFSEBcF%-XEGdhS%0%IlF|3Qc z)aJSkTp*qEiMu?n<82rtH*DFvmKCnsT{{nb=2=(G%L}eO0RbKnn2Q}mIkA37OP#>w zE4v5H0IxPZ_tdkBQj5t}GdEq5j=fZo+Y&5PLGF_o_9zI~TW0+C67)r@Q-9R?2j*GY zh#KO$-k}a0MW%pWhLFZ7eiXhHRQb zhk)zy3!@FEpq1CtaveV3^g$~HsJR(cFm~Eq^rQ!5O=D3Fx=0-8QIf1%p^evX)&+>5 zu_RS#l-?f3h(H<;Hn!6xWb|rdfj)~tNFtljagK|sl1gQ}Mo=8fl{VA}EB!R=rG zg&tX^oNJeT(1`ly_R)Sr*Q1*!B|5XO4mH@~PyX9Q!$-7j&1k;Po!8WpCl^#56qy8c zAoG_YBHHoWpokbo> zYw_CCAbhL8o*h#WpfJ<(sKlp*g1`HPQTFb0S1h#sn(2sN=)wAFkaOO>>$a2g6-pMC zp?hgnSyh;w-(JgT5DMX|_f^{4by=lS_;}Ghf7iyCANPDyGO8f?qLx;ZSy?gh^QyM3 z#;P%6>sE1M;e~P}xJJpxANi_T&<9!`y157U>rgs7UpXOMY)ne8<5>AJCp8B!U4cJJ z<}+mlv${yO73j0mR745BP@G!E6MHgxy{J5TcdOGXOtmGv{@Sq9#Lv)gJV-$)Wo1<6 z@cOnBB;P;noxf|br0X3W{~)O5iRe-lH4#+BM@5j2oY>jwwI3Fsg@#xYBC2!Y(P{Yb!G z@wLkg>Dj4iTj8D6ALt}$nZ!u9{~O1z(p}1V(?!vA^y~AiWN2A--n$TXv>@HYF4LxW*}Zx@7v=)LESieeq{& zGzle4fAgI4&IMtA^=G3l|8ooc(@W>%du)EU`#*c**MXyHVy*WKNdo(Dzg~f>FZLVa z8;0-nxpQRF$$v~-k`Q|TOfXFO@RZXyJOZ~HXF37U*T9lG?)&tdoPHCrrMoir*DolC z){~JBN4>~fn(=I)!zR5JJLY{>9!QSYL@sO@Mg`WR2&VfeC;ln4!!s*I$Y!Eo;Q9R{ zBtrWXQLk9C%Trpa4oQ#=kAlP|G5FLJb9+U0!b?FsjT*e5TR4&e8bZ7kPhD&-t1rbf zVqeF5B9-r(8aLSc_@|tVc#)3p>7&f7dGcv( zq#gM^LG5LF5AnGAwNucSw%{mr8^s6)lo*gP@h1WP2(hC`CIyR?OF!@U-p4B1RM?)T z9I2JQdw8pCchKK|b9Xg+%eo5+qmwh1M#H5zc0>Ji&_-xF`V=nkY3In+791LEpGJt0 zK<=M!ga7Zy3wo>3a~uN<;xF{EznpLIs4AlkwdzgVAw@W)m2`A2;YN#`0S-#4E^{Au@+4*=gBJkf6R#bw-Qe=isC{r++ZpZ%F-tv;Y>kODk# z_XFVA46t2$xy-muBz41Ze6h0GO#2<7cfmJ&v&Q+Tg^0bBE0C!gqpGb|P_>lH*Mm+| zxWYX`Kydxmbdt&aDw5(O(0X{x3{e-X^-x=^+Mw-t*l?{I_27jlKk$1d=>6%7`jiRH zLSX@0%Q1+-BqeFESmfH?+B}-@DmcqfV+cOwQ<|gUJAC1JLWy73^Vt<}O;6{}l>7C# z|766tw@MY_dmCPr_1@2Au>7S5^sS7CU9YS3bDSVXCiG0$Un*5{7my|q82gB$Pyf1@ zQ(4Tyqzp(ox|H8&)eEVbgzw5@aeLSw_;nE+40xR-B#aCm0h5|~2oi&%W`b;0fyS*F zoQtP5Zv}|-)EcvfG4D}^sdxM6&-*HbmKb1;VerLaDbzRUd=jVX6q_{veq&iMA zLVb-6QM`?mO5M3jtSF{J=s$$_rguOEVib);&MIA^F2xDLBbaYtU^WEEijgHecNoX( zmJg1`HP6lqX9wZQlaD4j$09!l6vtAMun?#$Q&siZMP=|hjzV?{qpeFzuBERiJ9ZAe zQBtfxjJj!v9-A)xr%U$(($SXOGM<}@r7(sB5z3N&3r+RC(c7WK32F-0r!cV!}U-^UzwM35%P&IG(4xTwndiV`@^+(7FQ+m}g@pi>*qUfBDo)i5OdIxUHbgh=mra zuFg-)VDyYMPY(l`?1Ajcx>;RNflCu!r$eA&FK#W>H9GCUs~jhM|K^Ur^Ga{)F6(gF zWoragP1B_uc%5IhzG&B;(zy4c0d^X-wK-`Hl7U*J(GgRd{a%rGvB(Voj8Q@l`FbQ3 z@7_?|RDhD-JUpJ|Tw1q6g8_JW`<--(K25rXzJmuxt7Dwj`78S@q5a{>r(eQ#-UBfN z=q#b9G@Ayq?7T^E5eK$WWvZBpvPp`L6$82Dy0I4q#YFauxZ;{I;~SKH$f86R-bc`y z6^AIJ5mWH8ycA9e12Cs~`z*f-tuf;u7u7C@X3lvneGuozw+k^}ap(9HWybR!&L)Lw z_)BYCCYluRV4yrb++&+sCdxhxl35LWpP*)!+pEnx^vphQHNFyCD^Q{W^3`E20!y$L zlfURPu6;Tom$c&&w<+0GO&l?4m%p`iZ2Z`YIh8M-PlhCHtTT#ZBv}nK_9=f1Q4Loq zvk7UWWA78_TIqKw>&t0w}zr+@MtU7e5v_ z>mrPl!5D;%yi+T&e`P^?W^u|~l#3RY`NG=m#bv_>;;DH-SD$0b(0fguuy zqAEg~8lCQv9VlLpJGuagTP-!gGM6PD^s4d^0KGhjBQduUeG#x3!uv$Nh84FoBival zIc~lMzqLYK$X#+PEOcGe7jT9cqBr<67t7%5ou%g#r0*RC8JAsoA9^Qlo#aII4H$h( zJ#&ZGqsv$tcqi&SK5D8^m-;EY>yL`3pd3{yKKv*_Y3lT$pyRZ(LZUNA8_$R?#${v3 z1%Lo7gvlJ6Yc^CuQ&#gaY}7IBXdWW9_M^2jeXHq9LL%c)-Z$F7mprl}GD9JVc3LA2 zumk&*K^_=iFPXIBb$McG zA5zv;u5L+E&ELc(WW{SUkFqV3XX>TnSk^Od>6g#W&aAz=uz1#O?ES3O(rsS;u!R8% z3CW_`I9$nEyBHF^2l)sIK4;Nc)Slm4#EzCM@^l&MIHf=u>%>`g;lRUo9k$mDpmnD! zz=~pgD@>|`pu`aY?>19!y`K`UX$n3l`%PA80UiZX3iOD{l-Bp|=F;QhTVN8&Wpiy93Hi8(p_Bs|dBWT$(-ZbAkAaYcIK_fLb3JFW z9VX>KG6xyNP%>n0RV+Nlx5nv`|F{prZ>)My^vld4g+( zn29COHAA`0!suox#e-<;N}Ll}Wl)s*os8_Lc)kjzhjdCAI5;+<%XQ@ZW(bk7hW0lF zHJ>ftkejmkkxIECNgA=lrO&KoSiQCjJmem)pOtX>b$E678f`}8G&O0rZLd3TJqusb zuCpaIC>ep=iFlcBEi@f!zWJcJ^=Lv*slu;pHEV5(4B|jHB~YGR+%nOMttMJI+Au0@nVR9#JR5zzt*}`1Xf-^de}=I zI~F1mgj7C{9O&Jyo>JQUHmHJ*=R0aPQ8G{^IU0okSp?AwyuA+cr5-V?-q!B3+8P2=8 z)OL0>!B{3CNKe7Tvu#7-R7Gl&J{}wR><5aS}_Z#T-5-$1aIyOcq!SAU6L^ z50VkbB_i4GyHN0S(1nUBg1v$~%ZbU*lCwwzem2uijRefit5RYC?9Z_XY16IQh5Zk^ z&%ZzO=hv1d$M9(U)>Jo<6o1Dj;SsN+Rgj9iIoKqRa*P?q96_fa$nBPf^F{S@P6Mnx z6}r@j2D9AQ`;;g~F(H`SXf#N{!7gqmszIe>suef$37=HBJ)vK38NUv8r$^#?jr!MQ zbJdV&F#y!MKGVUAXqXP7WBbo1BPi?D&Nm0jap>r(r8XS%JVTM0{e`ZoQj6NlrgAj> zCn(jMs1KITJW!N+Of(1N4>N7cMd04Wk%vE-e-h)IDJLU8WvHPVqjswWKkeXHq4wa_ zT~^wxPfFQmbt`6EK8%`Rj+Ac^~UmHe1l@co!)lnDz%& z1mW~(O8??Wvgqn~mmS|| zqy8X7b+PX(9BPjr^U@$nCGyL7Eo9KXe1oA*5Mm$K`stzEdC(d{LP5)-8o6igH^cQ5E4PFA^f)!XI?Ltb;&~>v09A!aV zy``*%(4xc#Y)KlbE`_CJOVG7408Ei_%h+J~KTX+|3x|7?ERrKB;H<3lNvk^e>?vs_ zz;d`t_Yf?zs&Z>hVGG&Cb(!L6MY$d7#Ui zATQDFORK~4(yiTD*99Bxs&|IW3*OpR06CIj#(h zkR_g8^^w8>V$d%rb~GxE7!UDO9@hwxKmgIuRF@9XYRf{v0wg5)Zbl7bNo*>^%oz@# zLG>au?5vN#3$J`=hcT3WTSO#Ez$INv4thNh3~|t`I~SRN2nW!OdRs328I@id`D~zI z+SQjhpi!|Y{LEmLPj`h_GZ>d^i+!ntkB?;UgMlhYTTj73o~#+8eVteiC%u*j8Va*s zJgfW|l2Ct1Cq<8p1Iy5a+6Jh-3GB%+WY$=WYYMJFCX!p(!js(qXRPXQ}0HhUJu&s(h;*gP^^Yp%@(oa z3?8q(ELNRIb+nuKh6r;Pr$l0pFN2f}?tn}7A-L9VT zL;)N~PDYn%&wVhTanAzz#pf6{FsUgWIOs08vj9ewpoS+n&~66@Pd*R4CS!~x2)fgC97B|CZ8@k zixL2{2=FI4n;QI%Qb)yz3IUk^x}ZS z5afe4$VX(@p>R6*erfx_&KYtAK$NbJ3sD*_fgwyrl@z1O1eU~P>9--~xg;O(KMv$B(4;dSx^ zdFFMnlN?=8;akg0S{QK5|=Ql>8XmV*FW)shGMmr(~B1A=En-v zQZjZ=l)7;Z3)fSXx^?$4m=Mo~9hb_K*}AOr!`XrXs^4b<7-YyA;&HG3=Ad+`Db*$L zGGWFDJ))8g&29kfR`1-kLsI>X5~yye^?kLdW1vaGu2HWPU|q)j5;=qFDE&W^g7@BY zHsRE`+5oVCzueG-<#Rz#_2Q3QG@bP%=zN;wrtzbn!I48OG{fz> zEL6Y8evz?3+GItsz2Ob$AVTko1xvW{NFX zyZzngDcJ_L{pl8FgXoz|%e9VWDpUp_QpT10icSkXPOGvn?XZ_nJg!v)GB3NK!y}N> zQN&zasEQMv{M&XT3Ipo-E|Ti5OYw^e-eeKo8w*n!r`;!J@5yv5-9da`=NlU=^iF)p z(O99`pwajI>V7|7fF+2K$+0K zow~-0;IDvXE*|5SUWVX&?Jceb;g;Q`@#hFlXeMQot|U8&*s0GHishvt#ULeiT7tpR z0gP(iR0;yql`yulZvcgv~tg#Mg5=cnu#;iduSFQ%HA zZgDqsw4QxAjFocmany_J8Po|Kj6?UC#eiTOm*`d&`&7^!KQ_98*cYT0_i`Yph)(5k zM!QxOLs((QFn%UiDlqI%7)flmL`T76*9e+RXOGwl+| zFV$69*i7nefL}%6LG$eh`B*2YYXk_`R4@X&v1Ziuzp08-Ml($J_Z}f&>)f3U?T(_f z1-x~<{QBC;+Tv4szSY|qB-#`mfs$nXF8RrSg68M^NLju#4-*NvB^B@|v>&aCTagh= zH#cV*RjyHlG8)UQu}TIK|27QnnB&BZR}_4%RPhjxOa|dZUa&rn1_Y4=&LS-4Add&B`^cs>}9+OV-z7a(Rr8$C!0z6Q>2_x_N?dl)>q4L{N4PF4Q}ss31DL==sM z1uc=hBT7XT#Cn&W0v*dSQ}AGqRsgTCmjrFtUQm8XZ zT59Jf@7a44AUfx4WiZ;Q{)W6jpL_!qlV6o7yXfLskNJ(m#R+0&m;9-qVyen{X+Tx` zg{s)#GJRXr^(W0*9CKy7x>o)2KWU*ZNC17&HJkuKZ8Y)5JN6}cZr$zxA)^~5G z&2+B1cSomL&pVWF)M);T3|n}DokR@V!9hsXA zB_Ed``_D`_$4Xmz{LW!5QK?WhX;&k+vB*fGLeY+v_Dlvz2u^_D1Pv|;5;VBGyOZF-okTUroVmG;YQg#ow{}Ze*0nYx#xlGiSE&?uc^o zeP|Z?Nx~{6F0(tW+A&5B0RLXTEU~YBR)yt5G?C2ALl=;na- zPPJBi)nQ=7P#+wVmO>HQ0v)(}*lh$Skaz(dyB{*-40Gdm&7SpfN*nSB z+P--7UM`{|4=L}h-m^>IT4%vnLivoNz)t=%k&Kp>z71dP(2zk39Tw0k2F`jJgC;i} zUDx=qZmpTMO62XDTH&7$zT)=yq;!k#h*)C6zJR3Os#pn6>GWouCxjEtDB7%F!nbMA zn4Ygu=*;_eG{Ytug;=hf82!DS*mf^CMl-Go^6>%#0G$K=2>L8CGY1m7jSvxwX`ydL zlM<$Z?sy4YyBhKpvdN19l(0D>m_F3s@Mk?a51pX4GFCXl>G!)^--+|h-%_mNr2}2N zcxzt`jzRO*yx zg-jx;vj5r`m1aXa-Y2!)lOeKV4755gGo)w^gC$2x*uw^m8&dLknx4joMp-Q zFWLB#RG_gyT4CXZJuWj_q#|p%QXU?~#imT?qt?0^C#A)f@)Tn)=5v9*@9;&t9H9tl zr%LH^^yIfaKcXGP)p%O|miw!*kQQz#R5Sf3*(~Q&pZk)GL zM@D9T)tN`_SBVm8zy4ri* zzeV&o;guHCf`0E(uP_2E*nS3nj`#nFQRN+)Cb%XWuppu{PHs*81J!JF93Je7zjTof z*K1tD#+=p@L6_2W{;K_Jb*Qaeg5E`>q9|4Fta8Cvg}V9(xbR)}2|d%Pc98L=chF`z zIh@C6^jIzVmmH(d!;!JLMWUx_)w2<}Vu5UbPFIyiM$*fYjWjXk9uOjmETh^c)b_0e zLH0-Su3YsM!IxKg{E1!@CB>oIz2CVzs`kHnc0@@o?ELMTcfQk7;~GA&-E<}cy_7|C zCjAsf-6b`Q+D7+Ey#uk+Cr%{@aWREYZ%=@`;*5{G-ca$)h=-TOttWHew68^7%B-?; zY8c_^;aIT%(Ai1glN!)1Sm&zC$6|yc(nJ$7$Lt;Do|oa)dp}_0OmdQ%Yp&$RlaI){ z%xDJyYNa5Wjg&~di3iQ+;BG+|N6>22uUPgL^{A%v4K?_5ueGc;$#a2D=w zghPNx%>f6uH&O4icQE65lXK!*@BE&p#=6F+M3SY$v%e1sNG2Lbq+JhFb}lWlDC8|@ zfIRQXs@A~gz=(Bg6W5mAE>jb-|1d&^lDAQC>grAxr!q{{s9fMKS&SDhSD`&$#|Bol3KzZ--Ds>3gIK zM|m_OhSj`!W2+Ez765&H*!MR2Op1qgCnVDA6n#s0DuGO$vvB0=5YDrDt*}vBUwbEC{!KP%VAb8hK!4&E(VIH&zLtb6z1zEAH+i7Gr z&Q()5whF2oYDK&+g$s8(r8z6}HTKX<;91A5N!I7cqmcwNWu{)X!=LX6y@89U1Y;}16p%tyN8AbuWp1CDjvW%Mm^(IV!d_RWjT?nl%! z@7C+OEAw2LF=NQ7N43Y+)uVlV<-5)3cFDTrrCg`9wwZmmqVU-Q>Z*Ga^X2-J^(ENS zR8aX50Jj+N(xyn@$JBNngCR((D#Z3CEHq>mKuwu;^t-Q?s%g32Uu)+cotUV)Iw5X8 zV+qKYH(n_i)aJFgeuNiJsn73eHX@SDVWVr?68;f+5eGwXP^?*S3-zhkSWa22(=pRi z4qlVVP2%vH!fe!CvX(S2mc+~~lpj#YF4;1^emmv7 zJSF9?2B&o?LvJ3i%Ucx3uIY;-y*1kp2LCdX$?tYHCW$Pf0;i=c&pSoM z<8k|iCgU(j4Kh#4FAZYqj_DcMO;~7T;<_U=mm63P^ts8~(hB{To&CR?$p$oANt6m4 zc+ZK|$M;YQH8ZskG`W(x#f0)3vE&p4IJ4=j7nO=nCA=qg$}Y^jaxdAVM|^#hqg6jG za!_9+vDrVw^@ebq>13S=7Q~#ssVSJcQd&U$K6P2ycHCVKnG8y_Mm*>jR9F}gF}{t) z?}LjBY`-@7H0DO*sDSHFA$sdGE>x8~Q{=f7bD#&* zA1mR(TKvj-Hf^G@c5a79_x5sNJM(X>D($eBHn~F`hvv=uWbfr5=Zd51UuL1TjLD;2 zKO0#n#=45juy81$e|J<1Oh56L*No*WhN9zWk8TFKS-@_az{`U{6Bd!X-u)8c)JvOe;H{nsG!|7C zhC*8(+?p5^m0}>~z2%&vu67JL4SY+l0>u2`J*9b$Lhnm+%@U>oSOdfA$Y>`o2o?ObXnUNAGb<3Uti(& zDQV2K7Vc_v@%O9zm7ssd-jy(YgoHlsA-#6vh#SyW7gtl5edW_{fsFmTe2xjf{?ATk zu1J{f%8yg{PN(neP)*vv(jlZR^y(;QsokImCKCA1iSf-w4LF`NLd;~L<4vV0BO-B) zpGK#J%&b5DPz%wrUH8Vco)6Z~4l&@UQd&zyFEh0EuXBs4ovKI3Dl<=>HejQHUZ@xG zX+oGx!{*c-0O2=~gu=VP6r+jHZ54doR2%JGJZvCxR=C62YwvtNYS_k6qvZP_Eop4^ zv{+{11EdSxor7Z3g-f5E>$@*0gQ@|LrZ^$5uBoBzxvcvsJiQGKB*8{i0=J*Uv&h6- zs-1{6?n$?S8(`Q{dgEeDKHx6>PAgJk87)f4mOs*1JoIXwX&!TMJSt0pgKfw$p7GYu zZoSxEzt_{tw9LN4+Y#28ht%-7qd{#F==|IE39re>285v^U8n===w!oPQq~n^N5>{* zVJGB4xFGV3Xp5$*v3ldUq&7r-FmC_Yc-h1^?)Bu=UH;Ovhqc|Hg`KbrJ}UPB`=p1l z`Z~^eTRTn5kNU-)mQ7Fw=dstoKU&=u!65(eqw=0~QYkN_|xYyB;&w z{a+l}lIIHcC!PFcK`FFjn>Qx_52Yk|ImS~40WyypYE z!^s-wNdf+4^uKHo%IU|}g{VSqY4;?t=vAlwi?qz(Gy!exH;{fa zL9XD*k?nX(H z$%ow-a$(w9xD$7M_{!MTvbxPqP+ix<3F5Q4GYCtI>`cx%;8h4295Od>AUNSx@jp8V3d{rz3zh;vf#TpNb%RGa{{JYG{|8YacHwMX*PbsL|0@b(jJ*aCe0;=L z!&r9>WYD@0Z4X)rK=itvEuD_}F%_1D0UCJh*Bp%nCfM56^lAR@@-XgLIp<WGkR8`f!7r%L@^b~AC@9J4Hx?Xun3005FOhM9)35E8Gu%nrKKgsnOP(?P%R1QT@0vx z@P12I=85(QHU5?FHKW2)N%Ejs3Mf3|7GnJS;;B^3Gp&J+>z1saU~KsAZ_uYSy(^}s z@XG~QWK@xt;gXA$Zg@}0-0HLMDPXWXU>fUb#K3r**yQ&AsD99cmS4*eAA^%S!sg%6hS~eYSC2b($%2VzVcI_w%Y0)#0~099~!m; zIXKEdC*&}V3!kmA!s&qNsk9oyE}^GqyJXY;Y@;BI7EBFH-V9$fquxDTSIwmHw8=}m zmHbcua@YYYsnhoCUz<&K?=0HG+~y=PK=us}4$r{Qr_=R@CNNso+HH0y2my2Zv25*hy%ue3iUW^miGKK*6pW11kVg$k_d8}6X3lH-x&Hr&}FZciJgv)^!i>J#kQfKbG z{u>Rwioi`v5#r)}LP;T-m4Mb`^A1(%p$p?!hy4dFr%S%Ce)%zaJiO$68i+7+Q4BO6 z5BnnXX_8t@9uCu9K1oOb;7ez)^Lo~+3@;rNq0J*P;(n=Ko-PS=p$w6P_rI_@Pg>ZI=l_3=&-4F~rPY`c8k7^TUE@BxtR<7k$Q@Mn)#0rAhjhEbu&-cx^sX55=B%Q28Dr5Wy79~%Q} z^bBTej}l9>n6+@`504-1z6KheWh*8XNsMYQrY|gDP(VGK=8eQ%qVQ8Ohq9Dk!}sXF z7BFBXAE~=tE4`8mGwHbBKG2xQiE>2YO&EFy?NFOJRDKSZAq@WPxKea|wz$eoAW=m5 zau5HMYGu1I z0)Ff}Fz#OjY)WcrS-er&u=EdOU*;Nq-m|}cvF0(I>YCki*i>I)hNGwMxS2D(iBgaz zm%wO3kPUyw{8#P%Y2wB{WYtXb8l+6g7Ja`Nu8bv;7|(?_Qgwli#38# zkt9wSY-wG|nb9c_bmfAZ7;aqTBE>wKF|*B7k{i!kd$yx zukGk1B^e$M!!hK+{^ivcVkIN7IA9l5Jf?a-|K4R}-z%jfgjoCHgLPqE5URpfbDmDD zllB0UT57k{~{jNiToowCDXb(k{6qkfu($yD|5Tm@D# zf$a0J{}!mCh?w&YSKOW&U6O=7pVzA4b6MR=TY8RR$5Y`XNwT^6?jGYA06pN2qFOH-eDSIkna{IJnXBa*XWaomxmx$6&lgpj z75+Gum@y5=_eMeiR6OFf61UzJFLVZW#V6;{W8`)tD~m~gH`N8@is0Zi2dbE&w~~7O zO%pV8Y62vVr9cFsz6R5mT40Wo*SUYIsc510kSRD3AZ%R}`8=4Ywmj&5V!5x>HcinG z{-@pC%-s=}BWGr;n$!M%al*E;N33MiH0M~$M3u&MSWeN&yb>b|$oW7}-aOup-fo+J^PYI*mUY8o%*xcLUpyV- z_0MqpydfhA)V|`}uYCf1!Y$?uP_sr{E}mH}z|XJcM`fINb5y%`S*bMxNNMG4P|x;K z9BFa&uniV!>$LSc@@sL$Ym2$tqkW{3-KMMaugjxB#Y+i0M4#@VA>E8t$+-8qIrjZ9 zJ0BIL7(2?kg}P7Vr2de}l@W-z=yO?6nVO9im>4T$MB+hFcUWqHO}Mftb6=ZxWAEc$Nrzd8^!pTFJB{sS2b1oqcVNyd(}jZETTscRJqUk7uj|J^a@a}Xzy?qWs- zSscEkOsNE9WEj){KPp)%8I*z)ZZRU4we4u{t-GEQ2Kl6f#CP+MiNB-{bDnqIwAE)` zf01?`=GW$j3Hine7fJ{Nx>)DtoJQ^9Riw7XwI>et>z$16u1mEV8$2g`hO62rgJhF^ zUy8N7LhQVy~wm7Ekugwb%Ih=(RRW8FD(qbR7 zU7>R&wDXZ*&*o}r0bmf9OrG$l_BxU^6fw27X=vy$n7QBZ#pL-*i!M&Mp>rl=yK-@? zp!z(i3WBg%PuvGa;*3(}F3;`Y_RF382~sGlQ~bx!{mL``jnSh7c@=mQ*R%cM2uah9 z8{6fVQC}RY-l_Bguami1Jr?&D8g#ta@?N#aQ%VYJGs+ZQZQYegDjmTy+Bb*u&DC#= zC*Lh;XTaNjVvqaQwDX-@L8J8@#BSov0u+caX6xNLPGSY2N;f)$-$~P;jTLhaBJm~^lEtC;Y!~0DiAE|MF zkU}LWnx6mJ^OxI$rhVJ|%H81r%?;^W8P?dzcCw_4Azw6aeWQcz&gbmbw*lA_nzR6>OO6XSOZJr`@6{hK<5t}qTm-ik4s=kKDlrGydUA1L)Q-{BDu z1aUr8k>yM|B|qydQ~fzs9OGDLVGf{v4}-i3UgW)gU)pd=`M7k2{xN{|{DOz);NQomf>Ssmp+ z95_QQpZCyhE_Zk+CX9^0Pkh;uHlN6=>Z;qs$-Ns`+{~9vc+~t+`zVL13g6t9YOoeX ztBx%ZpZALiV=p*))|~QdqsJ27S^MTTqu=2KY&HzlkM0B>U>pLO{3zXu%M?@Rwdbi3 zvp3Ux?wgx;@|dj$oC%)plHg6Xit`2~c_b}70;x;;W*Tc-UEvqK%yTK0JLfwl7qA`b z{HWLJ1%tg+qT`f)@Z@h>v#F^Vdu*#eOu=wW@qUx}Xuj?}M}3QWvnitnS}%Oz5d_tx zC6QTP-sFn}x{?n%M)nK(DY!C+6ZduWCASFxRDK9|8pf4o_~zv5v0u{jEz9E5QBai& zqU?VOP6*T z=drwV`7ChkHz4JyUU+L|Ys-?`^QsP4oSa6VmIHsZYH?FfWju(vZZmKS~=gy zJ)PTufp#mpLyuZkHkq(GIZ#9v>+icMG=7JnlCEX%jIVD9Ju6UscIG{rR*5e=bN%J2 z131<;llR4BKW`);>?^!J_sCwiy01SGspd-PcT6GPQB{W=dN|k)uiY>HLBe_G1v7Rz z4<&Y|ZPvN%>?1c(jK_bZ8_`b5d%-GH*2%S3*F8y2=Q8v5q>r06t%0E8682Vchuo;n z2kv%SdBCerk^h`2?dGuSfzRy#f9AKt`a`{Sqm8n)bnBGUDe~|c9s7(h1Ze=M(Mv~) z^AQ^yAdwfgVT$-m2UahB^|gXF!e5<2AV0d8o#D;xDvl6VP%xN7j9}Fd5{f6TlNxNn zSx2$>jjfT7@K&c!XKhe8P)}28`8`&;+aTwOAL}huPlQF!m1niNqTs+`Uz(A}apDPg zSUYVopmW*U#@5_W3F#A1*-x%A-u5D`PNDVGq2g{;r77bz*$X;S;KrbQB~M%Rdac~VYNe7yk3r@*u=M0RomZM`te z)KRwo@^gB^Hb6%EV=2{&J2O6!ep-o6QWrto-SCz=gP2*S&_U9b&Rd~Vi(PIWYat31 z5$1$`{c6EdRh%DkJGm$5mFLJPvN6}Wq4csb$Lg+mm;2(b@877^hJ4h(OXwF0b3fZWiHe#NC7SNPaBjZo z`@#jAFf>O(bKMsj+`&=Jip{0@gSyI4_PdW=oshra@K{? zuJt+~ceg2aPKHFO|5TnA?;NF2Ymr4&ZCq_lA)p!cq*@^oQUAlS@y(i+bsREdg+S22 zhV9p)k2~D1ZaQ1|#;SegzQ65{W426V-cHS{El!PmVSk=Ew|LRUK!MnATe33J782$$ zHywEpC7Cr0+zfu(MkjBvG~;~<;uZ*ca7|K0P362|i*gg-$IMVAv3;7`Mt0&hC9~OY<9IMPH@cQ)w+8Z;9MrB&)`-u0h z%gn9@nYm7`C=-Z!Z?|^@r6{P+BgUR*x4#l(PyFO&W}anbPi&WD%*(bjWHT;DgU+9? zvwbxTH#if5G|Tsp^|=J)fZvtL{;DruoBtfL&>TEHMT zKWteQRdc_--n#0r)X_hu7_c(>N|yKZlgq^!YK!ZSf|WR&%<(AHgQQr<8zz`pbSeC*HnYZ-C^Sj(%6IFpZh|X(B<;&|?{)qglu9F_eY#pX}^LpL25wb<|2M~d2*HZlB`-1PV2x5a)|m*|@aG1=Ym zBSb@pI&6so&@I0>YtrSAPhC@3Rb2PVN}`2Qz;K%~$9J;f6<+)GheZ z#h5;xm#mZ@&w;XwRWg}{DdVl+VGUxDPI)6{GkMkRrRZjX=h;iTcfDm_J@vhf;oy`MBjx5R`EBg zk!)v`w#DRY(tDgDrWzpWZ8b_L&^3$l3~Ws+n2p?l)$puywu@KE28db}tW|1Q@@R_| ze-tXqbqNq#(?A8r2Za^Lpz}P?UkJHl4k_D;DlOzifAZ6*Jz^aPcbi>T$69JVSI1D{ z8GaP!Q%$FHngy&m6~I0E0CGD3T6i_QrF-^LJGjFw)xwyTzRm6Diwk@^J6B)WA*E)6 zcnf>kqBtvA3YoRP5cMpL`UB;J5u(j#Pl-}P6|&?4sEJ`$hc2Cf*p4n`~*v#tNYxGtHa;Y=MHgIYvFden>il6Nv`n-wEoDM`xe8l27^NA7~ z^$TG$Wu#;wHNGIQ#s5%}T>I>MTc9|+Z!R)QNda+ z0k|mRHoOmrg6nf6O?D~~r-<1|#44m)TV>C=- zu4v(hmCT=-ES!}=)}xz4y09JTPKL+?$;XNMGU1%O7lF&%}0DkmJ$x*@A z9tN%7*O0+(@?GI8aIn`Q$qwXLLK$+%sC&f~M7&VN18u)Qn_Bx}Zpsa=ZLGCGTJ#a` zC7jLtN&})>+YuW78HqR*+5?lhnl*LbM+H!3#M^tb!bS_J#=K$o?C0O!?d`L=tj}uw zhAP-TJ~z7<6j_ogA|-Rrzh?z-L|aKTiKOkPn<|2gu1%6u{zUx-tFa=`ZJQjFR;uQ@ zOtocjOQ_>HVB5rM#x<624_52MM$Xge?f(cW5-H2L817!q)O^_h+iR_RO{@i^=vGqF zw3AuyAK!75tQkrxeJZUPl>)*L>4Y`}$QOuOQ7N5d9f?WRo8y?=cEpLsZ%V!*HIxw5 zogE#C&(k_Hx?M@1Xxko5$j}otLmJ&=&xN+my$(W7w&$N^^_B=e`eH7Bmc1<9x;}uh z#&S|T&((A0dwoQSJx)!ONk3iSEb7kFg9yTu_huApw}uu*kHjK6M6jxd{@A{fh*D8D zaDsD=d!E|Gi>+DVP(4o1Aor|sl`py5^~6QgM*<&XCf$F2@OBlf{L0BAJ^(Y8wo}hq z5pPYm{7;{yz_?i%b6@YTQo&xh`!@|3XXUEqTZ(7VIcBJf$K*Jz#uxjSsfBMpXW_Mrqn_BgF97LB- zC6>@aOtG!jy~AJ%tqtVRtlSo% zl&ZkV-x_X7jA*3X`Mxd?xtw`bntpqfaNW7Os2O98KsC!&AKRx>o{ z(;3^;nn^W%8^T-S8V!vIOlGK-P+FJRAY4hq9$^;L9eK@W@x`!cQDC<%NJ2)xosMVV z+s`k}zw@K2Ia*pQyz8m53>Aa{bwN%_=uuB36@)_u)vvrBjYwUmmX%&12$i+20~mb8 zm7POB2YlehqnDfM&VO4>tptn}4v%Xxl?T#u2Ss#WmXdrQtdRaJ{l+{?^25=%UfPC7 zvEql#D;0mK!)nqpgKy*sWD*CTN+Kyz+{WJF6UnHYlb|4Z7d2%N*2~*4FeU%NTM9KK zGz&!aA&we ziqocRfgW0t7w(FOdd7SELTz^6gu5a6&B@z^!e_Vq0m6Xn{H5z*K~oFi)n@iu-)R@? zea@~b9o6L(Qp3s!vq5+TV<_VS-QE?8+qpc=SKYGj;XO!MSF@5wIPkKw?)&Fne03aF z%ujmya`?g2gaz~B)vR#g#~H+TcNxjLxx{xZ3vO#$+32lb=`mXsvIm-*U|Q$z#GIYK zWWJ}ZV$^^|bj|-wvbEzNv)f&!t&4GIDEHtWi_g3*I*x6qE{k8Qu&M8g*U4l%E?{QB zq|TFjVs^7JD*ZAeH~1P}B^*9r8M}xf$yycJ8!J?~{7sD8t0cnmVf7C5x!6@DpbSwh#Wh@Nv%Rja3_3~2ELmjRVRXZ%H+(4|m zTwd*Ckwm{bTZ^hmrsHvvu%*S2XcK$_X;ihONScoY&bz^(_t)yzEjcw3uqm^{`Chd1ZIFiY=Pw;waCDWOIAjrFH2Ixw&Qpi> z-fM+j+%9|(8m^hzt8ks8a2Kf@V353cIdr_&H2;AVkE~5olxsmG(&?Y z4|3nV0FG_n!NJVTKC#K#bdWt-FE`6?I*E%;S^U0_pRdT+l7b?{Yt~=zz1zPFtv2b2 zqrKU~p$$``T~uLrP&M9J3{ock%0m@&yGG%7(Mz$=5#lD=YL#u~>A~G{vz>#={yp~S zW+_CnVJ{^RsezBnMIN~evNG26EJ{?>L{Ze1UFU{`XjeftI0+-Tg1FT2w_7v$6ejIK z4Ff@)!>!W(ZofDU6i$ae{=Uo=^61!G48{7AgGdq$bDlYm>%2bKuL76N_;7@LWYY#D zo9gOs&L#GLH)G(SP(qbo=p45;6BO+QEH#&}Pe-3na`6^nPSXFqu-E;&+XCKh{O#7w z%DV$1%$_bi68=nX5k7k5TC?GHGfd7HTFXtqMt<x@#u zYX(QXV(IGm^SN>pvkGOg$!b)pt7o*YEuA;Z?&FoJAe`!Y-w2^kLQ+ zifLVmgR_z1x}L|vTt+>5bH{cUU8rx!_)!Csy0jys$9F zm_*{Ad5R@_$*VIE#vk=3Uh+iv`VH{jNvA_66T0@+K^9 zzYJxXX^2ngeAT<70p7}nSxEKOO*9)z;xyWH%)Hc=`!6OX;VoLcOb_>qxze@Yqb_Gw zved&pFC3GPJ=&vZ`DwVe<~?X(F9#^#X$oh|C`49`~-Sy!sY#ADhyXB+4}Sat<~ZA;%Wxpygt11W*6~d!xMf8t<{wfoZQImy!-7v=+pv$Dzp?)ON^6(92f7F_VTK(anq zZQdPp>sC4PsMrro`f@veI*5xa@&^VIr`$HJnOh!{fte80ko^V;MPDua3{r(|J zVFT-_5Nsqp)$ISnpO1zR_91~j{rlge3H1<=^x_`=yK&F<`TN-zpvF(-N{c4VKpiii zTC!^aHw*U<8Fy**cCk3*X5lNzQ?5z2#4_a4Mhft!orS(WmVaqPeSHvb{22jcXME3Sy=#5FD#N?Z?rMShf$ln3j z$TI!rb-%SWa^rbl@^ofc_uYBTTy)$!72+q%7SJdr4nr*I#zk%I^UZygNo@p0Rk%#pTg<39%mq?+D4(f7~8j2G-< zfm3NjBA#H+2XgQvw8ECz2J01uW; zy8m@O0c9tLie4l&QyPRmb#8SWD7)pVz=?9A3BFMtmmm(;Q*sNq_6(h0y0P5a%Jli-EbnOPQ(GX1nnFOQ`Hd5v z-2@u=$%;Y_2jG}I_D*FmTdA5+|5+HwAr}QS=U=1#*H-rbzz{!Xne|){nikOs5{HA; zj0DQ13$PD<+TWFMYBDbK`WgQzeuo(BR^;epg85i;v{rfyeohjr$cpZA7tsyr04HUT zaX+7w;(lDR#>Iu^pnc7xQQQ81aaKT$=HT1_mn=8MblAR@_?gA6#OZe-vH8{7I7)VR z5!MD3)kbTo4J7hdW@F2}ufq?qhMe!2=emKN{$EXDh;JyL!!8M{<7tDRyvzx#dQVt@ zUrHP}(`tyN_Y54{B6(Wrc^0F5vWY_>#Z`Caf-9^e2JQGUs&;`YuXAu&zOO1=Xk?6n zDQhHL(kYB{Y0pdx9Z@D-j+?NULc;VaO6ulws`l2~p4yJaPgHJ3x_$Z^GW2ezNr$z; zpLg_BB3oS*h!W8^W1Zv2>d%sM@{Z_Uk4$B(lQlC4lhAOlS35WtQhdjS=_h<2E}%e) zh}oYH0X#`_22G0-`DyQ79y56iQMztlEl5OE{!B0ti=X7-OTp?Ex+}#Y^ff3{36{$K zlYQ{NZMk-|xDVg%a@|ayJ{o00V$e7zjJNi$vo9CJx#VfwE^WT&V5enTBtf#HfAoGtGaWK=zdJ96eHpe;ic zt#1||M^zR$sNf?hCX(N^@zy?AK1s(;{|~D{*}lc~I#fq0EeCxqSsb-$36AaCrby$? zv4pJ8t3e#vjoR+suM2#)WuJs0(kMwmy_o3NR;nsO{f*a$>WZuj8O2mOTyb3cjqiD^ z&zE9<23)r|$SFTMlJ97sIScj6HhVX+dn+%TT^Go9cRZ?5Jdebno@b-8TV72H{Vn@9 z1_hAtJ#|(L{HX-tXaNf0JW6mj+&f*PBAB4telm7d=!?$=vVGmwW?Et~ZkuR1_;sJY z`5e$Ne_*BQoLw{6_LdZ4#CzXH-YY4LkzDRaz|uD7U|L~{q#bP~K zVZ6mYXSymiHf>l|tdt6Kmz3^Jtfm|N(GF2OE}ogol9H6Mtd7gY@)yNMv&2+X>xFtO zvbDGr~O;nggmKqutEX`d*X-T zm+QI6Zyx(BDtP0qCO|tF*7;H(#th!5ICuD)D6k_8djvsAi*7V9RAX8QBFP=Jq=bg8 z;7?mbP(k3ynOOqvPo4Vu%6IlRznQO9RZSN!yz8_EQdzTcbZ?5VFBQ)VzC^*?l&vYe zx7#7mx{Y>Bgh~m>p{y35dzprJvbhkU`M3tV93X=q&qvG&NT=Ml02xWJ*F2mygRZHy z$)FSa>40s5VkPq3eO6#59l800MJLK*Ck43o-hPuS5Qv{7WQ(VlQxG1*#6NlVq0JY) zH9k0s29(K&EAdKWJHRlakOB&FJ<8_{?<{tMMJLFZ3kRl6)iW13uZEmcBf;qb2fP+9 zQBd#{9EX^Qc=hjf%%sUrBr)d{%M1S!FF5h>+z=)^uAM{uY5f_o_ zCQ!6uEE#%h9!>CJQrJVlNZZbcLB-v6`PAM0fktCnDJC4#eF+^{O&2&Yfyl9PIp}}N zuq6pBS97z;606744Jaa@%))y${LW5CAprtxA(`0yfv@<4FAAs+l&to0hX@J$NQ6`Cc-S9!9eOze#R9o$^XhT$RMy`1U=6SvM-ogN5=afe2U8RfVnXn`< zMkTG@2-_|{ImQwlXOPu)A-#xn|M_BZLCIF3qjgU+<#8Ug*btJaGg-Cc)c;+G)y=cU ze4;1mE%d9?{biRE!%0xiLNZLt-)`X++OF%kM|gg?p;-f=qm193F{d*I6zeQbA#Wab!VLX z2(YQVJ?Jd=Brp}6Im?JB&+bli_*yN8e?~yPB}g2 z2yJijzvfR|Ww&yHQ-JZf1`Xp|zo?FnIJ*c_raJGYylAyNNiK##u46o+$u0^;;7u)! zn=sm(HlH?uC66VMv~UgTTf0RzdBZZbOY@Hp$5knbjoqx`xqyU6g?#K(38TSt7>>Jn z=hGJUwo}We&w5anu!p^4KUT+ZU_aqzIcVhJ;I2EZI7^=}q^p%rh z=722=P5el^izuf?!)}aJ6ENoLGjjFi)Ks<=IS~b>daJe?TN2+lRDZ5V=O+?*DLWP? z&4v*DV&vR~2vli37%#=V_X{siJgi5M=e*x}dm~`HVz)N8jIm=s?RkHF;#LRSUfu~g z$H|G5HDrkWn+bSwn(uDv^wgBcO~!LfocQm9+hUyL?VBz0626nnw>memra!O85EljX zKMq-AVPj)!@LjF1cKZL;l6T%r&DKtiG_@-s{Q07h#Q6Ur?yaNZ3Z6gFI}^pD%7`fB;c6W7k zb#?Wps_co8{-XH$(QH+vw7-qba)rN?ZYtYwx@#4{oW}0K3+{pEtU1yd3R9T<@F2EEN`Q09jk`qS@uoI6)Rj{=Xn}S8aN(wJ5tO=AtK|uBo_(ZY$Iox+#AH}yc4G)iIg~^jh zW?!Aso6aLVp@vZjD@W0C`;52mTnm)xZ&%jZ$O;Tt=BYQ}JS(VkD!4+Y2WyLGDSh*4 zlzlb2bAYcfT>&>w8&>QP!ZE^?i;bQ6oqGKTKaJ19D(6{%bpP8{y5tDtPqj0P~mQ7N2J2r0}u{6_!rL1^DT zJ}}>pSSJlnwVvox3R74T>3Sf`0qrMt&D~!PXP6}j#3QXP@xfH$Vvsqz-1LKL(@kv~+;PyOX)~Pr6d9y&J-NXor2d;V z1A}9qopZ!7O+0TDawJ)J#Mr_Zvi@{@?W_Ba1!YFaxBzcB`&ednthnNyq}=w6!mM(L z28hl7DcSWUg4`kw6KdPL zI}*mF=Se*_!r3jmEi<)oa(q0ZF z_Kcf%9nk^T&PIt}VCaxg|+(w#7Sq=P-c(d|dHJ&FBGq z+fq9P?v{G1dQm8Y6Rh>!YV~00HH zwng_8*rj&lg)u)L$`M?{3#Zl3>?TJtl}$a9N+AT8RRl52effL^^bAUQ)Bcsp-nl=4 za-_6YIwaQ!tvdqhJ9)Y0Wu^p`#W(AnqIp^h(S`=QpXk9f)w!;AJ*slEoQQ>2LFg&Z z)f8b--K4v>ZD+a%c;7J8f1Hrheb>rr}&(Ek~(@Yn{z!)dcbMagO%Ww-m8+Y5A z%}tI9QZE>@jV)7E=@4W|Vjz34>C)lwZnN&dcl!49CA0Yv$S}Vr-?F?6Q z>2${5#H%3wD9Q5Pr&)<7?4fJEOWR}1I4c9*g01V(b3jB`561TA`Vc$U6r$d#Ytb?+ zln)gH9yyl_79h(L!*+!U2?~|3DQ0pb0g<#K%fy}gqUigsEE+{XpmqfJW;o5Eyb>%7 zJE#V>Igq2UOLis=@Mti9~d_C;dC#AOyC z6iARD-$1i33&!#q|Eb-cB)Jih((I%IGGvjio?+7KAb=s6R2=d;T5Q$%5c`v_Q^Xrk zQClMPO-7GZI;7`v&;z&KplpSpI1*5~dSmSkfC(lI- zj0cp&dw%SXVa>d&JqVdUnU^I!yV%+4%#4!sR3Hn09SHizxRflNO}vyV6&kuA7TiM{ zV8elm7T~>Qh9i2+*$-f|!EkqKZydEhSDFK{l$v~!ZgDO@4}sMBo0B%}osp+DNuBMt zZEa;uPGB3I_XWW}qz(UFdC_fmL5Hq0+K16&*M2>}dE|is-TT7N)L{*jkXJ$18+@Ng z&0lUR$w`snOnQKG)oPg=Q+iiI3e8s0b(wI4y^)VJRZvJB>0l6|YW>239%OaH zYBKJ3t5|}p!f-YCC?46Q6;WFHs=62rs`C_K`a_uE87WufHg2(n)x@}@1%@D1WYYW2 zU;NRPB7=&E!$~ILVqWOuJct@;JPdE|N9laiHn78uiwa*5Lo8T~z669Rv*hEUiUx09 z&AnAm+~Ag6nh60;Aq=5_2Flu5#`mgeqEgMuvRx*-%hA3(p-(k%wn9_qW)|x)z^S<2 z_sf0sfiMcT!#NWZ&>y_R>*F;xuXUv5^O)k8{<~PbP{*pu$}?6_CY2kz+bR&mPRuXJ zc)YI4IdQ;1xD_QFwNCCY}0g>apvkTvSPu=Uy$1V)R}5P4iiVgzs@Y^|NZA z86zzH>7d0N)%F`U5Ng1;dJHI}@NT%_hq;@^yC#u4za2cm=GwaTzF$bD!q4d)GfG}?Wd#y} z9Ly)aT847ThI7k)7ng)GAsNupg#r;uqZMvp%ILkG&9_xo~&_^JL zCx@E5q{Z`OiJFRvy9<1@`7^Pc5~6XF7>ReD5!Uoo1l=d&CL=FviOXd#lCM=b=qzDM zb*|G`9rrleIh8AAGunu_mVHoCkNjynC`j}y>X~t6z~ezC1$sk8qLHJ$&AmVMrfL-_ z%tlS4vJ+p3Z}_>DO*BPGvjR@bv=**Ee>IQ(Vg9+jIQ2?fPtp=f0sPxtGvU^Q!OX|0 zNI8}Tq#kw*L7i@j-5_98vovYA>1PETLzvp7Qe0mc*003UDG>`e+ae@AZBWu_)ys<^ zkh3Noq`Uz!jB)t}#@*qRN0P2_>kUEQg$k)!tH9!+8}QyV5);G8e+pnixfzAF8csS~ zcNR9a=j`C+4JTaaIw}%77MPq{hUc0_$5=0BV)2+AFXcPyN%T-^3cH6shaZ8YEzHIm zPob=juWexm3P&65dCbfgl1YuEU6CSv;p+;VQ;0m0ZK0rx9cOTgOvsZ-u6%Q`#Nc|52kSDa=5x~}0 zTvZNHFaH3;3rT)Z)rPI^`WdbkL`xglLrmyiWn8JisRh%u>x0aIQ<<2_sff^2h)Yh= zm!JdZ1R9kCP`{MJYnUw8e6wyg^Ak{T)7MbS>nEeK4-nK728*}kQ`k!vD3F8JM?{xi zOLP6%xBweRJ0$Tzr0F$9R0c$c;onhINP0}}oX0@^M9fCe?E0F9Quj>LL&iA``cJOx zV2s?WLCM~!4K{WI_=0w1R;?NArfv`>dd>u1U&9>zr_%7Z%>{-c1&ceHUA>>{t;-c{ zJwq&RdKj*S(Z50aZa2Kb%PM=|s%lF2{zud01i6payhWD2rX$dCDZ6+jVB=*j{{#B) z+IkJ>elKXOAH_Op zi1rydlW)C;I+Qn~!#)JQ!IF)&h0LpH0$F6u?Est*=~^&F?>Wx~Hr+ZzC?xWs;57=( z(*O!KA#yqGm(m(*CSF4tea-GPqUqg1&832Gh#G|8hx2=3@YU6ow^eGBqp^;N#dx4h zvKgU+tHdd8S=8@ zO2L-(Qm41s$!)XIS|}{YJohaCu2AuDdadRRw$gn z2$9WO&x8Nf7wIDnZxnM}&X&O7)P*`{GpzsvsH+EWPQPLstKf}Ph{x+TFgnOCOww@5 zh~F}!EELiEGP{nW??X8j&XJX3qJ*5LjW|qPTNMM#+w{j*DHXV}t}%a6=xagW<`}(D zMd{Wki+ahf@0e;(p%eEmp7BzxJjPHR`Yv=>!$?mpSLJ0+)b^;uWz=?*e&q#F5Bl9m z4te+3UmsilEasvN>?aLBLmty>Ws+^0aB9284QUKNAwke&j#rgiyxD>X%=jof8cRXf zREfiM-aWEhvIH^KgTJ0zWV{Fs_$J_SH9^LlVjqH>mON^JoOa#n&kS$>!0%!9X1V;u)7klXZA`&@u?`%Y?!W{@`f~{m!isEJFpV(p~wT2E8jov=~b?-GF-DcQi%8K$lasAK+6 zA;?y4m#=PkMvrMGfe5COT1DzJCY4Ww(;xV@)AxPHxTRv zk2Sy;uqrZ=fU`TH)G;P)wIhO47@!ex7pZ%TX1DQjM)N>dsw9hD{UYH&!1L=xO(S}x z%l&$R!8pPW8xL4q&du=Qe7<7^&u0=T^&HxO`36t75;Y7(@8&t5|MTS&$b{lL>R&n+ zfn*=8L#~!y`~(hDZ&%BYi1>zJXhW3)^);>+6cj6TtPuQ4&2LPle=cooYLLZ{$(+Su7zEaH|xqsn$ljI;<0IXTRC8TJ-U?(^|guH zvtWjySG=8_Zn${a&oYwmg5gTyUL#SUDQsuHE5oj+KgoVspfZLUnWf`1YDwwnB%h~x zC2O##9srcDD^Wohq$a-b{F45C*lie;T*E?LaZxWOLA)|jHlS0#Os{gF7kb|v4ghkXR>pjnVl1Z!pq4(KRe!sb? zgU-lHXOVKoGWb3gI3T!gw;Sn?Uz|Qq&!8EUG_3j z;&m!j&NF5tpZQugN74aF9nC&y`PHCAzKsbi;|6Nu@zGRhDcn*Z@Q&Hba3~g`?kb^& zg6}wWv}nqjkdrB%x4aMjc(=#D(*~HuR`5{2SiuiHv_g|LB^Y?gAb@8tAxEpRB z(y1)mZlfMqBR-5>g3Gam(I1^^L?m(HhU3VOoldaQFic&Ve8`b1dRG}9( zA;Lf{V#Jdt)Q5*AER*L=G2};EQ;8)M;T0NgWga=i1Wt+xtz4?pV%U<+!^?~%+YJDx8qJ^7y)&qG#}5C66D3L zf1BBpU0`QXvJ4lUia{-7wiEq@lQZU{#{=3=`P&;myvr8fu+A`2&cVZ^&KQ{i-y&s< z&u;S zi5bF!>&0jR&AzIr)cHfVpC+KtF_J;3Rm!raIygp@3ZjekO>5tR$5iLqo5CaM;dhHT zo-PlxPf`(t+L(BIRR_aDuRM;l_VjxpKN&#fy8t<~co{FgeI(FbOPr2Y0c_3)+*f$j zTOK^HmfX1?nxy0`>eu5;XCRUG)#NVLe5A}UVlTuM!dg|L(b|4KEkQaK5SP&F%+rZE zq0C-gimoz2CPg#c?|$E{A*h1OD?znd2$eOU-gRe9PkOgGkPnikm!g-FR12ZiR$PSJ zI5d3|)7x#07L`Zo8YGi;$TK7;<-(O4Ak^xlik8+$>2zSKZQkp_QU4HegVc8j>{ z@2@O$6YP$Xh9X#1EuCf(_x?~3A$efaGi5cj{iF>t(+TLb%yWm^t^8BhEfoOoWTxJa z-TJ11Me=20ZH!)X>9|e201WQ)4+}6hW+j7t8@5w+NcBJ+7~4>i5{M>g!hGI{&hzf- z_@zx{wnA^-@ZH^h))~3t6i>sWgO6n5>uK`F8-&na9h^A8JliGf)^~cjDAdik<>73i z!=i@r^aM2EPAw*`yMEd}ZE_^__$T$J);vAJgCK%HRS&W_7-j^L-@3G~uX{Xz10~G~ zbnQKkv=rbRmm~SGYw7BxlX|wVqj)o1DGQY2qI*O~i0-kC>}vOAc&un3moZtpE)5aHZH2MQm)%ev zzPT-cI8yK1)(EV%do4-)dcQ=jx|E`JegWEA9v5kqhvnSL6F_9&Quoh49891b5wI<@ zLuu6Pj-vT|t z+gIlCW9aqE*GRmAB%8U@0aMLU51n*QRudpp=^-XVM4KuBv7=MljDy{&&W@=UrN>7v z7h@5IWK*NcJBw207_8c1j%e`$0e@9O?v}81c2BN-A0$#{;JvEcW^<|Z&}d8Dc~Thx zcv^J#V9vgm5pn-9K$alRPuZKjOjoYQ`LxV0*p$eme6Cf+5wYUuwGM9XD@CLIbFm)t zS~cI8JwC!X{ep((Y{#lU8D@jW>)$`%`ZZ$oBG@nU3^|+L*S7_-ymCz;OzpW)u8te{ zm^VYBGls!d04@qpCGPgUeP@6u?a6gRqNn=eQd;-Gb!9`bV0>#Gnliq-TSnT@nbN$Z zu|zW!nxr?fyhp;1b%_7DC`o-J&g*V?_O?qwSa#vP=Z4eQZ~5ZV6LMiAi@V z30hA1$sn+8>tQ>(^LKz_1En`ctM)VIkM~Vv#YAq=ir>RHr@np~s{HzqWKN_)xm)Jr zempv%a|f2&yOEvq7ay&AoO>UTHj)Q_yEZp;Skom1JRIhwWWljSoW?Xn7!&Jg82MF~X@Udr8*XFvY93Z*yxzhLY?(At6a2&L4p(Z|z*h5Suc$dm`fHxs6dW zr6*_*1h6gxW)mw(iRPBwlDyEbC}0<1HN$LCPhbAuM5g>-hZ$gtFRE8n1Uz0C9+3x%FA_QHU^+4Isumz029m+4fV(DrPul7;BiaM;==>Zzi5+( zCZI-o3_(e5TO)sT^p(HRQ&^{!0PiYr?x&Jj0=O_(h6~sWY1#NG$ewZ=MZ_8vX+jT5`hA zb6zg?#}}!A`C0!wo0NTq@E7t}T68AY=;XSTIz~+{|#0LRKtT=4-I$zq16lUJ z);kF{p5A-N?I)IlA&HHM!z@se#`E_1X5GJkL#%$!-KM<1FUUh<0BmT$-B{neSrwP| z=N{YtQ{VuKWplrqtoIL^n%4C4?fXAlZAVR)f6KXzQ<}(u&FN)4?-}Z z>0|ACYCNz^EaVt$rvA1T<91FDZuloCrqAU2-+%PnY@q=U(Xh}0ME|1t0ds*N_zMW| z=>MTg#ktKIxcd?;^z3gtt3&Rr#vc#=`;f>Y{0A@kw9vy|e|LDW&;RQ`A;8~ZfPt}} zs(~@=|5rc%F&H5DkCXm?2H3(Nf`7aFZ^W_DPUrE#UnzsM ze6xSFEN&vOF%V9QSA>?7M8GyVq<8Y=!t*yjdv7!UShAI==&qgAiB>#h+ZLRW?8zJsMZRxrgaQfoidH-=AQjeH4qzk>_X(6f_<#b5LFZ&sruvaYy zpa}g5WU>&&>5DJbjOrYo*|piQBF|4lA&&tfn(Yt7S{Kd!xtui9Kg2GaH)OdPLlS8r z{Ut9hOt=+RY-@)-3kNJ1e|OEE?gzjd)mjz)RxbLLO<5Rn5U%6lK~LImOv z_|n&2T8^BLMiVi!m*(W$>TvcN)Vj-1KK#OY^6asFyjcpjKU#X~quJ#s zz0y&Fy$C(6@Ks-rj@}xee(pxx8+FkYtajVG zJPW?6#d0mwzTdFHVh@bKBq?A3X*S@%PXxNVYW!aDhlABqdvtg7`cDh~J!6aQ5UTAM z8N_Bcx*6Arp5r6HL*XPXZ?ICDZ&DCRlcdl1J-q_2bVDDe+6TCL%`;BDKNDh6={ z|0)WQ+u_H~+Q$?85js|AjGG>$fn!H!u!BzhB??ixJWaZBG9RRe;Z5GkgEzzY#E#F# zJi4BEIXfd;r;crLjqrqvb`pq3tdlqRT%HHNs@YIkdy=&w|6W$1!Gpof|5b>Hdos)u zp)jVZ0&;+)_bba7Hz7)B5h;!R-IV2DfeZCG^fK!6j=1awIO)ODZ@3;}cWcJCmrV;8 z1(Ah!3g^{EnZD{@n%J9Hg&E*@^{V+M_Ej@PjDOlg=E?r(HYy*UbHSQ;P@TY*=YGRL%X^ z^P1@m{37zk??Y6XUrpHu5JT6Tf+UfG>^rHZEuoEN@Jom*Y}pFN&FZ!KtNKE2XCHf` zAM{4tU_n(lwygbDGw$G8f#*NNdI8V)0qd+QK~^zd zHRa3QP$H`W@!tMx`mQNvh{Xh`A3u{=IXbBc-}j2^H>qom9I$p`Pb7wz{O#MfQwTM( zArvg|Bv_&MYhEbLlU3JlpCO4AaQovd#yFR|p-be>MC}p-;$8IV92wQh>c8$$Sw&4w z96E~EW6e0El=A%pBE!jWz7xWtEweiUk?H0Ui`1|5aru^giAMapDJky4{ zAtwU-%12mLz#;47ZB*}~`Ha`G4*qhnOkFYtkEU?%6bYX$`U~RVvn{N=@7m;|M>VV8 ztyO*ubGlY$K%SjoG*Lybrr}Jp`mKd{>pwfO_lV~1S5|&Dg_JXaq?dl-)VuiZtuqBg z+WO;nWeliJ7Ws&E`t(YlQiDDTz8K$=cwgo}7QlVvQMSJ##$Pr@(wn8N>o>=X-P_eI zWz6@v4$NJL+k#_)M4K}fP6s9IGdDDl9Rx)PJ;cfI8*-(Oj99+^mA_v7+AMVjYFU3C z7l{LM_oRm#e(vuI6G<;O#3$9MoGuh+WzM_x-uy^?a{AKix+MvEq_h}chxODLX<3lh zU>g47=cLE)k(IYBKjgE7oZyCItv`neQEf{joW$T%mWEbx5@Ziq7}#KT=A%T5{2HjXW_li+UpiKaaBkKF1^ADn*{ZnlRM4 z_tA;{OyJ?{s1|hi7SzvS9g6WOSxv+IN-bMpwR8JsLHehj&VZ}xkBUZ$U@TB!$S*WH zG{n^_k`dMuG>b~@sDv|^wzOw&8zM_u1y(cupyX8&4hvYJP4`#vhY%fj8MK(LdKH5f ztD2#S@%kkok!yo}GBBq?=d_y=LAt@keO_cgw!&%b6drXMo$D)*5@SQKi6j;H8MjS?m?< z*GaAfv@I@p$Z8au(Cc1lo`GSZ96|-ESC8?BSaHu5F}9%iz0kj-tO2O<-?LSSkp!`% z=aE_SfTX)_lcAb4I{B`8YV597QB;lNffE45EN6290m+&KFni|p>6U_&&qO8=LK)dtNP<#~S@U%s3CKY)+`^(-t*7U}>831myv1LZDF-ZNbitO< z*Tsh+NKHe#ktl=s*O&;@KEj8MYL4wq&Gb<3M2nG%;01n$AyV#WB_fhkTIw;%nRhSF z(}a37KvMgByC`XMp~cNWCtudR_yQ(V))fo=CZ3LA-_XSZKJun{G#}*6mi25xl%2cu z)R7j@_eBm<+{pHc`GGlEV2wa%7fFxnA^Y)dk5BL`(3!y*yw%5qPv%*&rWBAKOPHOA zmJLU+=H-u5Ev`l55~aEG6g~;k>Lg@WuOiSaAwe=R70t6JV+-U~3s}3)|3nS$p7l}E z_x_F%NF8AFQm+cY{IzgWVb_>cIMI6pW6MLwJ-sXshv~JxNefD?fVmCz?7$-16?+xj zQbPg7pCnc65ywX6vCA2AMMeAj_J#q>dstAr_CB>wZ25^rif#=1+Tq-=phF z?^5hp&qX4YsoMRi_lUy3Df`;U*A;6Y@`*|QW1HAyH1D^Rmf_7J6s#lWFv(OsA(Da- z1Yc~d3&*|To*Ha124VIx|hYO)k<;)tUHWsjWZFSZ}`woF+QG(178hJcdKa%S{#eJFd zf=_R%DQanH89=_-SQ~OF7^6u^hr>hFpEIT;%r26s6HEn)VZKi4|2E>)zW|M-J)6C( z$Eh6~W4qFpnxdkX5pH9y(H^;7l0PxNPF3%8h(G641bOzontfJ??%-|VptfrK+TOeg zDU&h&4;@j@TO-DfUrtsQxXU~{Z z1DxrM=!0rm`Q?8iMl<|r`C7UhuxAv?h0%YN_&p^rPz*uy9vsK$)H$|?1MTX5wa$I~ z&)XbaJM+**M=%?0(R*U&Ke5}kV@n-o$+3dbyf~OJVM^J>s!Wk@q3(^;t2VY_-U5hW zXEo7EBBY`}u@@;sQ;^W2WmD7d1)2s%t$xAio|l!(w^$t>m(n?CFE4dmRg!O2}g|UDY){DNbq-*<}}K+XN()@ZJSQbEec-0 zF?i8_nq<)U{AWNk_mz||Ja->S!TYihaxU}N8=hF7z#P$o`nJm{(P0Sf6F-ZKH+8^T z8%XCWiSi%Q0d(^xm^4a##CR8M9?@bb;sa8mW~*w``*{`yD)*|E9ZSR*yDk{wlV`{ zW%>e;wc6(gtP`6qywv+Afc~Vv$;bSk%0;<^U)v=6yp)HUwG{t6wRX}!ApwuS)B=(Y zdkl4fQ=`0M#rbVXDy*?k*W(+M65guY85~L|4m`9cKOZl}_4-`xDQv)G!1URrg?S2J z_4h9jLX~ll`+h}2Y|Y+Nf~-Q48(s570xpiaeavM)92{UvCy#5+-dhE>H&>H$)%m)6 zq&ke;7y+m`?$0FPnSO?i+qv;A&r!cJU(9no<%y-K#zkQi7&!_#na6n>?)aCZ;h{zB zCT6-%C9}6~C)B!S1%|9|CaCprtPcyPgBJq!B4*Qa8TH4*Z6x2)>YW^-T=*t zI@d?Z2~&>V?bfgVab+S{;Ungi@jD&=cONm0OEM)$+Ned)@g!;2^L@uMC z;fpe2wc8XgnCXj96UXB_#bo4PB~rSZl$q|vwbrHwbFezmfmAIZDUpbzP%Uvd8oxwy z;H9lNtHt?F)xEq=Ol|++N!o4ol+x}HGx@VUmHecXSK@IiVjfoixmOPuZwNjf?gcNC z{aQZL5GI_AO>OC&1rU4o(-f2>ejt#?4&r`-j=Wlup@!OELMGquBHd215Pgt%QN-YhW@`Mkwmset z$LM4=mUv9K%3~pmn1HQdsvO9D>C4`R#C(x`Mmvz}$d~Q>ir=~Ft@05kmakcXiTo#2 za{KtzKocq5GJZ>%&|kp&4c6#E9FpkX`jKj{zKgcHxz*p*h&J4K+&7a_Hmd768*L$T zFI9sB3~=>-wUNOiqsTdU@)wFmab=5mPU zY?2o4WiM1N23z)vU4lVxCv1vs`)9?ck#4al>GWVE90|bA3lHxsF8!3xJ-N-wGsVU# zlk=)A2ob8L?RhQRg5``Th_Oksws7))WPQU8U!Q1@5zzx!6Dvz3wUVGg0sw2qv9urH(o9qEC_=4jp;HlnrD5_ z{`?}QurOJ7U)t4VG$&3Y56E3&@gTRj3&2+Z97e6fpfj8hY?F0>T`6|D(30DSZ?K81cCYL7(oKtb2vGYoxOdI0m&_~x zq%7QLKGS2UJ!P>V|Gj&Id7a}q^6d+Y1?qxGl>!|Q2z4~c>l?SI!jUV`CIdQMQH+zY z5d=MI1z6-)Ytu_vZik;QM_7?Q)3sz-;|LR%deI8$4`_rs z8gyZqXPuga9BfKap8^w?wxM6_bKReS^uNWni@$ z6@y8KBjG8#JXT3KXx;8A$Qj=Myr@yE?km8M@I?U|z;p5HLxwD7{aJA6Zr(?L zv6IcNb|0G?vd#}1w&;Z}Je&+0CYv~MYnN(l1}7eQCT%_3EbX{C2qGf|2X~oeqWyir}Vn_=I`j}j2e`&q9Cvd0ga#U7RL@3u((F=EV4A3C9Z){kZ_?aG7 zd+0u}N*05srXz|9ji?hk6P>Cj>67FFdxd)fRrK>;wig$FoZmmg9pz{bOM zk@YORxOiu^S0fwPu1q~lVsf_4gLO2{ML>4zxdYFyADZ@#egbh`#29|3%oW>AJb>(8 zy8H^^E`tV?vII;M`U6b{o52nigqQgjc-!0?vxwF@YZs0D(|l)L;wr78y=(SCQojk` z=fk^ptr1aQ;(FqWX4_TyyR$^jv+4%T>OEyvw1>!s!TKrZ=IY1A;;@waaH8T`MzG#l zF!LwvWYf3Bs?_a|56;W?d=0Mca)y>(=ejSJ!i-w)HTkDm_$JXwhU$zK$zi&IiuSW8 zWZO2u9#Qw?8{H2N^g+NjHjv{P#?w`xqCWHj!aUw+&_l(^16UWiG@^Xi3RNF7h;7*Q z{(V!-#i#f=A)hc1&CUD#S-fxRdGVoM=^UqycHHns_b(I_bVrhjndrjhIKfn-(z2x&_j~N^1cMsag;KdwObGLvrjlR zEJ|4wn{U@2_c=Nu|EjJoF1DxO(9+!^Dle#9y$37=?AYgtl4(3bCt^7ZRfZdA9l)lhDG@H zvGz7Kou-0o2=~wR?IH!ut+ZEL9-Wey^Z@8DD(I$se!H*$5@;fMf z)Q+1>O`7vbIFF7S@KB8}lxr&@*6>D61*Sa&e4Dcb@0DY>k6v2%xhd(J%SX!ST~%t~ z9Y^Vp<63bfscMfC?y<4%Bu+>VnALx?b<|fGV>^*GZj+TO<5$Qzr|j^ZJD8ic!q*rr ziQBYvo=3mzg?Bys{6%KMZ*{R)B((sKx)}Fyf_V|GaZT{g)XA7?3R=(e)U6OjoJmfi zuFKHWX3yR3`i)7=ofOdmt71$zA@RJ~xLf98*U-ne+v=ict9SBIwr2`eQeuL$y4`}?J}PA)x) zU{ZM@nqAw5GuSCUR7f#l^SNXPNS?$A4>;FoTbhfLM5_v_wV$CC>O^S&RGjX9qolUp zSf^x=EO2P}@!owDbt;`wO8~L$z@FX@=l?|C+E~Pb`Q-NHhyb< zrr+~i$?*V#GN(3GP5h>Lt6}ou@+>Ej%FVRTv$qIK=tpUjZ!zu1W;lOw-t@1x4=IE+ z#b3zaCvd#QWW%m#DpNn<*<0FvbL|I{-JRx&JFg|~`z;vCLUfm}HFLc09h;n7Y=$ko ziUb@s`fo3tLKgXoXf|E=vJ3n#l3tmco4-oYo4J88>hv#0Ld$2!xo_nKAxCy6T zu|GyX#{h;5alLyGpg9{yFECJ-Ca3fo^Z@|5v=$-mx#RuEyc=B?IH+pjXO%~(7dLL# zzQINCebDW4%qzHOX^T4v7(82w!N0?lWykiLMjZHPR>fnPKCU|wp78L+3t7hMWA__P zG-$1DE8MsYlp?k*uq?Q>MCSqs3v*#VzQ%;nmu{E2l5<5LI1CDyw{N|a>v<4&;ik9k ziDy{(>@r2vHdEM^2h-lyckE;7YVMn~hT%NEFnYa`W)J{eH)h#0kk4d0Z~%kfwFJ$D z%!>%fZMii}gG37s1b{XpXY3D)^5VQboi+heBXQB!*_W>*$#8o4W)i-wK8M)DP71N> zR7fT(J7Q0YO&k7*u3w*=#9B)*HyL_(nB7e_2exo5ydU}$+2Qv9{u$u?Dyu!b zV0d%qp6IxE$E%9ZuIhsZCAdbQsPmA1FxX932eCWn?%RgF4vsW$TISd}Kr-OW``<_-1k+jQ9 z2XHu11?xSu4j%$EZAJ=7v@!Xn{W9Qyj`~&&j{{7ruw9PZ?~!)=M?aVejTPZR@(3#Q zl!2VQ7FE+)Rc}m;`sjajXvs|yE`a2Trx+6@Y&c@)T|`Qdcw4r{$4mRh<56zu)|J)* z)y_PZT<(w2=mC}OdK`e7o`ddjO>FM|s;Q=C&+g6kaHn6%EC0F0CA;b^v9ygvoteeF zz@Xb22O`%$V}4^2iP`G9uVl-G-vW9v7LdGQpT>gql(G#hz$nda*Ej62zy3~YL}*x! z4cL9=BDKmsF@e@2shZ1T_I_>PyWi@1X*e!rgPy||0Q+|~Wl1@eqK2hHYA<1}N(NXh zwWd9_J48ag2W!MQt-%3ap6ZPijWd7v_`1fVtbm=Bnwe8ZX0Vk`#eXC2Eu-RUzCYj7 zXz&m$2^t6zJh(%G1}C_?OXDt0coKqz5Znm_r-9%>JHdjxOXKeD%;EXXnwj-qv(~*c zFYbK>eY9$yI(2sK?`Kzui%+MMK})jBN!Fe$pMry$-#gy26ED9`ekI*!-Z)^j3F)X` zh#N~eQsRv-1;QN)@McER-OEfzyRfsqTcBh$+W^G2Lz`PvMSLHG6E33MCD9NA6|w$s z+GAcq03IZz)WX05d;JVhxjjLQ24`mcWQK787<=0L;q6~|D8;Kq^MV3(qaB-rpiq_e z-P`l#jTkqDFTFfA-3r&!&y^Xka!GTiuTnN1qGeRCAk(YOgNr+q=|^9}R7*-w7tV2* zVGYPjKGX$+KRfPyHKeA}D!iU!r`J^m=$xP_ZHwH7fTXSdvXaf#a80&jP`zH#dJ3F= zx47eb8Im{tUOaBtW1_+f9a7$-S`qORR|o6eK2I9{dXg9yaSsNfk$XOQT36XSp| z-(!=zZcZNvCkYR(pS&|D%c52c>!<>(4k4;c9=~duTlCsSh9RnFakY^aR55H?KT80n z?L6g2~3%}uGD%<)GTBM+ETPNXu@TO0<+`e{bu%wWWxK*@q3HA zthbrJXoNo$_oKD!3Bxrr!D%s2V%)-3sc3xd^paKv*a5`zmk1NnJB4^hV3a$xW$b2x z;~(-%h;vyb{sM~%ClJo6$w}vFH@MEF9#m{{hhy#hq(W4sFq5L?Du-!M| zXDHwp&)^{7uMn)=k&|bGTM;gvWWWQ2ucU}Bm*;(sFf;;oxT_v+;=uKWd8x7{PtX&!2~itqDy_3d>);U)`o0jh+3yJya)&{aMt&F<0y~>8dpW z<^P;NU(O#qqK*s34NG)Ub_RNa=sWJ?m4GTMNG5#B@VLL&H9vad71VN5YM(5{Ww@denonn!2?5iIsbg7uDdG^V5&h@Qo4h(qh|AB55NbFC5R zh^tw)0G*%pNvrZh=o}m@Fqszr(rY?(>+$%ji&m&f<5v__UhWKnL~?!Hw8i4kxSV68T5J8i6|NvhDM} zMUD23CH4i;b&p@1ed}82U^~~*^fSDkxQjh?9ZM2Va_wEPhn}R15Q|?{d6;};PYezV z%fH)bPi9~I+M*x0>6;*43jp?Uz`e6To?qg|JzFAhQR2~GSvIL3IO#J;2_SujAqSzq z8(-U37{3;zA0Dm}#hHA*xR!!OTcNRQW{B`;j~EJjnd70gzRzv&6lfPb_6o?@E{V@q zQL4625nRYXGuy|>2V3_JD+MZ(81Gq9T}=7+v8^Lj?V9b~VwTH8@F5Ekeb4<~9u$Uc z5=g$$8lIO|CyKgzki&5T#9l}D1%EV#6_X2XjT`Z7DcqJkZ2vx$k zdk~6SL8K7QyilpOSUTYJeQo3r{l8x?Rqm5;19iKUKSZhOU0V;&N#3f8&^* zE&k`~3Pen2iILB*XdoH<9LzwhCBRNq z`DuSi!g6DOg*BvM#Ji-$#IiLT@Xlo!aM1>KggKt1QlJWKklYKuL|Hho52`=#+&PFE zT9;0VP|7>uCM&5xR^2g5x?saz`@OL7*|UZk3tV0wx3*TV6QqfI+YIdMU_A%QNwLbc zWLoqIx`I}J68$aq#Nr$UbIr;cZ@R}nyrT)~D#g2P>j1`2@z!`fgZR>QP0eitEI+<2 z#_`ZQYqTO=PGvWEG_9XqVWnBPgdVZCii!ATH^Kam=My(yDfOQ*q$6WPEXG2)1oIVF zX4{sT4&7aE#(&^XDcN)ICk+el+R`VdOXOAUZ1K`op__xt4Q^p_{Zx^uYHxty6i57a z@QwwaEgs&?`qP@XKW%>NACauQ!mKph=@)`2pdogqX5dveO^b!Op{N*pL^;*}`A_X=yybu9q)=5M#QrOOTy=RV@R>G^1uAqR|xR(*K(^VGG_I{EBv zg|%yuo>PZ?`_nny01kCA1wj&oMomRtTtP|hN}ko;wR)jW!>k6KTiSelE%tZyZF1+1 z8T!871P<3%R>>i4Snv@;r`P^kY>I997beoZd!!kZL+NQ2O~pN005LRgoKI|lsEU2!DF&(7s2e~KUtLgfNd?O@~ZI(&0kU}UaZA<$(ceU`) zUsr0vZ9VW)bN2U5)5yU@XBxIiiOmZeYf$_f31b*4K_@!Z2tp#B6mH8bzz5z7_k&(gWG@)Qv*4Z|9QfPS1{Ic_ zhBz;O+{6A=WYS%8fbn2LEv$q5`=2KlEv{HF-Uj2_x$SnyLHaPn`Fyzy7$^HOL-hUa zGZ6}fC|o;QY)^V3u-A&12>Zbu_gAViiK^lZMY-+m)wg)w3GD7ON5jPALH=(bzrn-= zbe!lZDd6|EpG+F0Cl`BB;&)U}Q|Ae#ziPZHw_^Czc9@=`yWS{KX8STv3gg*Xa{5sx zNlL8GE8^7gRo406Uwg~@0)7uu)5Zw9K~%lYP!d{^T-CMSb$s!5ifz1XBAoHA^KM4o zY)+^#Qc+hL?I&`Tf9)38jq^Np$R;rG^Xt>5I9!d*k#XJ}hAGu#yP_(`1d$QuU1=@!8Vb@UsD%)K5eLNTEl+l@DnlJdZes%&*f%Eqb zgVi|qUk%MtK*`3W?@5-JyTaK$yI@TtD>IB1a<@yh#-TPIZ(4D^1XakX8b@9&WsT1W3cM!MZ>-2uSzU9H!^B7> za%ug1y*ImAF=EtM@BON&ZYm#rJ47#&LtAwu)IU#ccYC}4q(xaXRmQ7fKm((4O7@>O zBNNnoQOeuuBbwGvWy;;8T4n{w`KzwgK~|*k zvibW-KsEod!>?=w#TEwpKrWgsoQb;yZQSLRRO&V(;SrMze<5ci)L@GJ%T7#cn`i)h zajx>J8i%nm2WOxAAs+Y%tNa!rd8hX(n%yKB2s)oSQ^ZK4nmP^1pO4vYU1>^mKv$g? zxV}JHFgdIf4Y0_XAuFYF2=DU=7}L7Jdak#tYN#MEB>1gR6whwaB4S`2CzO|^eIq?W zWBmPZ-H9Evrm07@y;uH~&E-~>`5Lu?YR)cKs^y*qd>c5|AFAE7};8>@w$zW&h03VnKEM-gMVxl+unUO zwThu`I45QtU~X>aTyW^xUC$UJ9#StwzTsxLL~FgBZ&vtwYAbl>*3IilbDI?3)mGu6 z(A0*c@*CA=_n@~19(DEl^K$qR25E{6aLl&E9Z!F^YLBr+JD02WD5|YJ4!E$(R>xxa z!2a`mxPv`>81L59>F_Z>KV3oaDb3w_RGL-c)k|cL*iLE>D7-nmb~~K z)D?Wlk|+Fs(jyg0ExC6!vb&B0^%T$%=f+#)9EJPi+A2LBX=RyZe@o{Y4{qvsNu)|o zhxuFze&002uD7#jG`VcGQ7_#E9M!&Q-w2?)+nZA9SUzm74dj+Q&^c!Y--q9(QZzUJ?EveT&`Y8W_L)+G2X;Na8UrOyOR(eLjEs^X-$80XH_U*xeP ze^yQw$v6fMLkU5B`U0+R_be%N!tVW>KA!7Hi@9NyVjat|^><_u34u+a1U!GSNJMLw zpzx#IP*OiXk{cgXP!~;b1PnfnB23)ejwzuNUQ3d3zKR2E>iiex=%5&Rql|&ju?prF zGks9NFDS7g475G?D;*R+{x_;O%Ks3=SDyi1iODGweiz>0fhaU2693B|lP;jH zVFzVF_dztL0ZPSyjyz-b$|oj$EHjtx4+k#Lp&>H%6ZHZA;Qt&WCJ#g^LD`DYbSmTj z`xZSQ(h;eruFmjG)qv>|WFbK1UJ&RoD}x5)ihz+x{d?DtXpnROxdC5bCrl|Z?CJ6Y z0S$3(hm;p>m3@4`6M$_xU@zePzr`zw1~xPRvVUO`Bpi@?SVVH6VL^ruU@r*(X7$_> zNFTrn=mR)0Wo~Z>F(^k__&H$LtnzTANz<>N)oT&vvC%m=-ba&Uyd0HjAUuE&8gZ*x zTczw_2TGX$c8%T1E8+(R0R%1V`>Zc)YeMNV2U=d8*lQZFQCTb{2_f)hs@4s>dIj({0J$}4C@y_6qyUlG}qxi zlxQI&rpR3Ret#M~=Sh|#OtbSJ#82=Xpjn#sOP5Z~x5XgP?-b%ieEitO3~nwVh)v!? z{#nL?C8L2z1kA&dKFsMO2HpbA6J!E;b963^Zmd(!bGhm>osz3%d!+ebn97Y`+^1@5Aj}+}A^q8gQn6;h>qb z+5YvwfK!m90wEye-OX%&rnYVq!kz+vKcmr%%^BFe8OtpI30j!P@|GJHgguDi?*q(J z9N2uUO~tzedAKBDm{eJy_epYBk!2#F{RD_sauPmYjP42oue!~GBGXcPoI?_cGFzTa zwE*?Ji@5>YQTe=quhU3Pmw9v!iVp8(YL2$19ti527>0R|X71m3UxHu{HcU7}_{#j@ zl|%R{z=n+Pjgf5$bC`+`=D$dN1RK!+zydU@Lk{jm5&#|PhwVD+5a-38+JGjn9)rdQ z7VtoJG(_h-ddc0zM9WnH>&@Ck96NXxuOtR{YyfaTWa&+V-UncAe{k4(O}@I zS_|RNB8N2LD>njr9aI3;G6!6sF~edupgl}??_^uZ=pWLGQ5JGE0C6EbBgnz5TjGB@ zn}s4&6RD>9!W{!kI`apz#vlNl)A!Wo*J?{nOD+Le#m7L4I>ebV z;w@|WF`Rbg<$dUh^d256sNc!S89I3nRkyqbF|bFlZ84yTl5(@Ui`7nF)>pGLDk?8? z0W)ubi>_$BEz-;3*{?od8Ix?EKn=rVKJXc$AsER3I=cYZK&2ur!fIW6LmxJdC9IcL z!s9hgHKSd$o7~e9d(Q}~bC8_XmnW^hf9Pmmbw`j4;195#J&8Ffng%Zd5_YdJM{gO#d>nsDo~F3>eA;P=v%eW%kUMW}rtcT|7Me0xQ-945qLR9}~wn{qVnvbslciBfbU^VHsa zQ(}`?D8+efgEsgCM(EVDP3Ux%p^C$91XmO}NN!fUejJmW&RUnv=S>W6BFbSAD@!r@ zXUkwWKeW`rQ~CK<4;4)hDvgnf_g5Y1%!ksNbX&Q}3(i7`$z@daN7)zeul_A4C3T>M z&tA@YhM$x5n#fXz?0{v)VeW;I$l@%+c5%eEU^G3Dp{onwT*9hK&1?Bppu+jB^rV5= zzFqfa%31SCR>6$gEN%(`iUJ9st)=?}tF*~j?VEWn3oORH`1>y_f2m=jy97S%HwS-9 zZ0$Y1&@cS>31iQzHr-!8J0_jn}w!v0tUNJH!so3+eotX?9XFpu1NhVY|S-#a@6WNURBNyGuT{rgSCMy^##@p}r5lK`g_Kp^h z0)XNux0rabTndzhA!tRuCdchrbm~g-7qJOCCz(vQEkENs@uMMdMkn9u4SA7)fXg~? z)Mw==O$W3Sn$kO*U?!B7kkn5Ji4$o=n*6x%XXG&tEVTVgD_nXYNlb}HzWB4f9(HX= z2f%g7trYnBq*q{V7GZB<>`ttPv0$EMmm96c5%xz$JCQdCT>Gwg4>nmTMRNYio4eWb z<%#>b=M05;px1bwa?zjTWPeVzFM6`DeQ|No#37P#A83!XA|z;;{%%x%KOA3maW=;x zQVMu`^+ZsenzvJUy#~ec5iDFlC*A#Ok+;}J5K}^UC~Va0Bxg|Q5p46>J$7e_v`Al` zZ58R+b26>*Bf17(865jOD>Ma*Y|qM|E|i6h->EEkij=12G1w+j>HJj;6PDq?#sIoR z@*-6deUUe0KxZXX5CR1ok%=@Wn@zQ4~Z}U{-ExL2w>Ph@gU-ER{R(|Kq%#0ra zeZ$P^#4lrmi-yFG+mtf#rzd>AqbkU0w=LdclnWVTmtgdj&&^7UEXtXH_~A$+GgVjt zHNU~WXvWaCsQV)EL#RP$E9~dVa8RFgwj(^4ivae0LsoV|8l5xy3xk%J6&vS5I)}@C zNb^~Hv@Q8|-6etvA4Yg;HT+cP2Aek6eN{mLv+YS{c1=4tdTvJRYvi(c1H_T^hpjC= zCR6LgW`^a~!xEiP1(80pWIBEBWT#oRuI(c*%UZur>zAT_+{pa)mWu3_NvO>yQ=9M$ zj=McF>izqo?S-%}YP@XLfl|Yn{EgSYq36)4pNDTx$?Oov$m>924~(m@pd4vwKqT$U zU5#naTc;c2lVcp{!=R^_@N;0{P3`F&2HMPfGR-Bm<2)cQR znw17|achso8Mdppu-t81f=Hb4pIs@SOOz$o)mg|mg4BK^f*vI+&Bb-_R?ZZtTH`CA zXE$fPDF-gf`M#cxFEt-)@?UZ>hsHQMesLT?`nio>-icn4vvToTaF&O)(-zoi^UaWoFxh26*A z_Z4`{`{vxsYwDpPD1Y%c@2>UL)X-(_BZo)F$6bz_PC9O7GI@Ik8c+Ac#qNhM>hdC6 zs@4|*2?4s&f1T%YW?KA{Fn=$bq2J2E8sj>p-)qmXtzj(4ng zIJhq2LSjo5)Zyyr5KTgG`NhOsvLO}wQnydc=j#{3+CRo5y1J(&vn{%SNe2dl?Nkfx z?>!yGHnD3Qk6@*_Tk^ShAE%f0pz$##J{9rVHXU-79YxHyBe zrczmIy4sOwW6wS&e~}rJpn-QQF%8@C^-G~R!hg@-#5gQndvZVhD?pIdd=>(&C`6JC_h)cpGHKDZ}p zL*k%i=GiUp`+@{r5^Q9LGYv8FKw$03_SDKWLa5i5V=9jbx+wLB^9dPUse>BYhmdzBD+hBXM0QqP`zTt!5yczCTFq#jUppV}H8qWUfwN&PU2FLHFUYK} z*sBc$+!v|}F{7HWYXt6zj?CV47%pF>C$CxU4|0QWW>h$8#+0$>w=Ys7FZ^&9I`Q&( z^#xu9s&#ClU;riWO~TNg{=5#lUzVy@WDM4UIFIx@o}D8Dfyf^O&!@v2{*m+2z-SF8 zEwyj5iT|wJOYML^;B``Cvxtf68PERhEG7Iq(=s|`GNFfJ>8jb>vkiXB=$sZ_%8l~~ zs4JvQ3Q$z3v$G|P_FeLh$g4i-KMyaVUrsURzE%Fq$-H|U5%f~=B@g|q7`3zEl~mAs zEKL2~h8>`wi2Fz$jHLA$T20#Y-LFg1?COo_8Z?;MbX>S-Gib_UQoXF63JW$sSKXr@ zR?|WEn5Jpyl9_>xrMzj_w6Gaf1ZloItZSbo-FR?OQN}J>^Y`3MyLuHt&8_6hS5_!z z^*pW>p9<&PIgAAC(wl;;y2ie14jg}?_>=EDs$h(qNlhSMkrs~xn`G_`qw;K^v$eyH zVym{2IpaJLwSg z-f^a8Xa-d&$5E5KwHkDD5nDtHY=Yfo0uUM6A*+Tv?Z;x>tU%!9GKmHdEjEu0e!^ybLa?PqXWJ&8@&=@b_M#oqF2aoPG z&6DBZMtyP`R>C_GC|T7mpZ>m0j==TC3ks^#ZZ2o-mLBzi9!m{!`uBa9*eup!X1%M#oIK+GrE?o?`{tmE9*wp^nLN;MsGn#tRf^@ z%)AY8B%|qjFC5C-iGOp>pwGteq~d6wxYY)84i4v|r>2I4Cx1yox{?MuFy+N6=-`5lSca=!%B-Cuj(Nu0!cpojjXC1rUy!G=pK1Wlz z;|DPiXKe&63v8S}737Pv#oQM66-PdwpGKv+j5K<SZOhmyK%-$clzH7UlAXm=R z9?3-V?xlazt{4)zOd%;R+p8UiI80Ww%IA&fAdRQQ!~ zRr%3-BZ%{QneWbz+R3K~MlDE{*Y7@eRo})y$u`FC?|*8^e~GL)B~O{+;>h64y42tK z@uC5LJ?aC%l<&I2Y;RXJ#W|frt07JBd@*&%;q5DY*rxmh-^&WQgtSD>qVe)?Ra=I3 zdQn>8Ic?fw87bsy*0{^R2@ih+|49DY8J-`e5H!fJx5c|dwcGm?vC){_x4w4L&0BJN zpTLhkIie71OLF&~}E}^A)>*0(!@kHUE5F$idS0`m?jMdS7Un0Vb!8aLs*{KPPLDU=#n= z)WzM!%)rjQ9q8Svr)DCT)k~#7u(@?MvF7fX^VmGy57Qn~dq@(fViQ-Ne_Vdx+@{CD zbO1lqqC0JHAi42NiTZ9*_x^Hb59N#Z=zO!u%fu)i-HULvl|TNTld2)KU)=w;jE;2T zwNxU~z5l?zQ3LR%A=maYm@lpEUjwqks?=S(QdW(p06dDj!q4fmzSuVFlB}qS&6xxC zp&dw;r0Ei#HNOZ`)8$bdUz(D+XB&Sq?6nOHNq<71MRwfHrlKH*w^pV z&WD*hfwlRFM~T@1?Qol7tItUr&(e7#!>SrT2&w3aO$mL!Ne2CLO8Ita%$52Ph23nh zBI(d?XHO;k-XUf>LwBHot`pd>Zd)Y`gbG3XzDLv9;awU2{nG8>UUDq_x@CG1%X{w{ zS%td5eOakAK+!FN0TWK5$pwZO=_2w(hRSVl&uu?^%k0i>Cd60o~R8 zTG^JiO*4;#!Z5FRQyYySGO^Z>XfZYJkF3v)flIM|#Uxwlj5SA&BApe+oPjQhLc!YYq_l@j)a;(`6C?O)zI z$>*KL=S8kXod#FQ2?43bjaTDM1SMm@dG|O;^lZTS#T<(^Ij*0*pk&01esB66LXv|M zSWqKcSlbcsJFM-eE;%)pXWLc#BpmE5CQ>`8Oao#yzUlq%uE#fS{ArjO8>xCV0MR#+ zw$*N-4jaANQ40@3_^@u&tXH z$QJ@o(L=r%J@(nZmGFe=>8-Ws=N=ggVjzmCspd``wE_6e>wTv|8zyIGhNpbEY#e~h z4&}1N&mwmq(i@xEUQ6dm{y|l=-(18z2?pY%v7v-MwC35TC3`XN*TV9p0zm&$8Crfa zZ5giWDQtPLQyDW7A1ngq9r!GU-S9RZ_Ly_Rx6v=UJ9}{D1&~hgynE)-Crj}?9b>!P~*G)03cUtlx}8d3PkF}R}VGb z`4-D^PY-4D{K9dPj+OY}1M;W)diuB{JWu*$E7V~}s9Cak-1!c#j&q*u9sK<=YT`+` zF)IYyb=^$TI)5I5dN&(MRbowlr~Q7-c-ZQpvK*_BxL9oDN1&|`W3MhWWTbH_6>|AI9AH@ z0i=6q-@J+*j6-=F=vJfIoRg3Z*sPC!R7B32vIhnZ}ce{b&-WTX!xaoKmFw+?}`MKNb}RqVG3S{)R0v4P=J$_Y4Kc zY{;z2YbcX@SwV_NK~2S83zZn0ysoi&@}h+c2X@8|T~7;`6La^vqPmT0jOl;b%=j3v zo;~cPTAiT!s&6Y$38V8dL{z$p$$@B(Srjr+ysCc>wOLpDaUC`bY&;>ZG`!2FhpMr zcEyQWe}zCVfTC|kOvlkfo6HhgiS*`V*5~HOYSVKd{mCA?Y@g2kNN&z!HR;D^awX0;o$wYX&-!9@86r`Ksg za+;D>tuL_z=-$c_jG-=(dhch#^xHWw7|d1cj^eIrd{(j*c8A6L_+4@EI4RqRU|K4= z$n&rEgai7Cn+6q+bO~8rSk)3W3%Jp$LquW{xMf^92p@IRTWb*myY_pU5gh<28sZP% z*NGn=q1`Ga+&BUY_%K?Po{tU|V;tPy+kJ`168SL(Z)<3`5I66S{nAmG_I;KDk~Q zrw4@Ze#{aLb=qwTSacVlIP0tVY)syv!gl6I48TZ60c0#MLLa`*A$C``gOYXKpMHiw zAf1Hjk2eIbulFr84kl4ed=ZLuox5Fc0^c-PgL$@k%a^;l2tq+ACAB}w^31rfTxNSh zkB+ie6R(y z(_b%i)?Prp4`xzEKY|6<-iU`A`QBy7c@=SENwJ@-Cx%iU`_uAY#Am{SxNfewG8tHa zB5maKaXUZRnWkgx%x}jT&%=uh{z5R$FkXUDXWDgT{0tMJWK2~#@z3Qvb&E>^X0Oxj zSrV#A8}~E^4j3xAruth)7z09D*b9@9UZe6l)+;D}T4I|X3|2aZiKUx#pw+ax2K7Ep zksOHZ-p$&G6%S7SAjPGKiyf>T)f@+_6eVwCplop;s2j@k+Z)&3Mfr}bIoM*_5Qz!O$ zeD6lRT>I-`#RHjq%3r)uK-@p^uKlho@PdQvOt%$SMd5}<{tHul14eSXC66C0nbX4L z-L-FNyy$|7IPt%G%TOV%iGz&v9o|GdYqt?3RMvy2VtGl4+Er=x;JNg$qNQWGVn!{2 zi`6|I|9J^fK|=(poYh%@Q5$4@&ihg-remp-EGsM0^q4Ubqz%vX_!BW&Zmosj!fF{! z6`=zJwCt0$Qr1`D=G>9^GfN@;-@0?HDb zKhs{LWt=(UW}nZN<5g0KOp8f=N0G!?y+)U_gbyFyyl@0_t!CVhP0Er*UJNAfJ+kj2 z;u=D`BB5tWjMea4jf{OFihvM~|C!=p)ULMIj-0Pb@$t)fGhzdMp89yA^eI8(S@?(% z_v$%Vs1(=Lh7+LB{P~T=Aw4YuaY*@EAJ6@WlTf+<+gp|lS_TW z_S%~Jbm3T-x{+_-jpJd1yfTVfEoCF}883>PJYhuUU67?uuQKUXdFA;h-(J;qXQ+o)o0 z?5X!sK=1w&qZu#_HHW0coc4pJ-!M<>r_I#N*NVaiVZfxYJwvq%+ob8f&UfX10>t`= zqp$U@;J02XHeF97w`uJ{`NT2}KeP+=KTX^YztH;t5ylq%yWj8-pEGrFVq?AAru1+m zoGpz^4Yd5Te1aA2+nV={C+aS**KBBOGGwLBHt8FT@iH!-0`i1y+P2CJc`X-9lhae>~BVy^cj^V^PR;rHif$^z-%kI`M;I2Jpo z&tNomHlaF}24vAuotjj7WlQ#XQt7LtJUw2ya$K#BSWhe8$L8r#5ZQB7@cIi)@nfO0 z-S_s}{P7YcZ(zrRR2@rE09saC+r)M3eI=J$>odtAxQX6|GRFp=f2eO;hGinxXs09Ij`u8;KWPz zqI0XYb((1j5!tlBLX<0=?c@Y+B&hA5e&3?1PGPnQ^V1U>!3?dsH8N@UP}#81-WlUKYIx(tYse52V(10V6bvB_({ zd5w{Yz31Vk>ck-IU;^?ljP>q!ETMq6Q*0&5Lf#(bcFV#<#mL8@yRQ2+$me1KZEkx<03ozcM6VvB$WD~5E6NWzEOg z0{w1qTk{Z~`f=<6+E*$1p8iL}3d?@|L4k|KlrbkD4b&2!R)6bc>zYyI20%kR`Zk|v~9V#6hiW@oi&o5%>}m4fSim*wr1L3 zisIwymVlpE>RB7J!Lxy;@aACmA(e5T-E7MC@I98m_iar?x@T8h$U=VVDvqDlGTW$* z{t~5L>h8%?$6@-Ae6rg1w9}ejUIF?)@Q(D|boblG>VoCgm}ixmN5inm*#dT*&KC}T zRw;(Xz+|gCc#Rwy0^lBb#q#|pj_|ZX__trj!^s3YdwUAdooj@i=z&pycDzU0qk@4d z4V9^geyQQw>+`%aO6XYzawmBz75_(&>xPmwnF(*x&+8U7ux}Ol1=9gXi~2up$ee|o z4;OcRSK3O?&*bF4-9;_G?EeAt9NOOW~xF3OuR?M@TVR z@@1$-TZ$^JLj9&<(-6B|5jrZ?n4|1yZ*Yu+?~LE(PJb`qdY#-^k{l_RwDEwA2mvx8 z1T(d~OIW>MuJKy^ZM!_y5?u16zxY*;jQXz%0#E5`(TzyxE!K&ydjbED3ua@lHTIov|!!o1=nhovsV`KtljREjOiT647=X2YE+?s1Gp8z8qj-L-x>#26jHMLH}&DEpgaG@haq>090DtDzrEuGF|u-z7aza^=i@kQvz|DRyM z|2b0_?BRg||2E$LvhiQE@c#=k@xR>n|D-T&wQusV!16?d z9gA%|D55uZ{9UtwigU990cZd-bketmdga@4Z$1}s`&W7xDycB*DcMxs&IcnCzXWUE znr04=Deo}({t(=Zjiy-02Q33RXeio39IdREd<-1njK5nmXx_r&Wx)G=B3j60((uZN z-HGQTsXW>!WpiC)%0a|0VLyv8=!=zIbzI5U_L8F@p#kXa{7M;8`}p}u&vj(!*9N~+wND#t?s#$I7NRMfD?NS9I$T}51qsp2VvN9< zhGpgoFTAH@)eCI9Xb7(SP|wk8D{E^aSuKqzy~hw|!MGLGzoNK<_BvkF&P|g8{#rEK z#yXW=8e`z#A>!Gx$BTd$Cq_DiW$OeW4E+JPFbfJQZ=PTw4}E9>kOSzOXi#(zo1$Z8 z@epMZ;v)j0+KDkGL{H4Z$|oE|M+)q__l2BN3{Gc?iumO7)N+g%CQ!pqZ%sC7&5eLj zN&cw1S3HFO*7r_nP@2oW#O<4&s|PqT#3VB(nLa{Ucw5ke@7G$2l4ban$SdWtszZ~Y zrZ~lr8Ss3r{8iuE1~7Ourl)e&^Z*{h0G+#%6Sp7Q5!Ur~+GIdfo7uF_om?5PpSl(A3H4> zD`HvU_^V=7S*gb&A2AK7Uy}#v`QJMLD(g-Sj0skKTH@c7Y2s%y5eBjSd0W2cekg zKI6m=nPx`Ks@PyP$t^&!VY{$KBYTT;8=&W0$4u;sqE$DM%KNm+dQ(e$tA4QmAEIJk zGT$L#3#9$TNw{ztx#SDzpBcdwDsh4N*_Ga}U0OWp`;JN_Sa@JNDyKlrI<$Fs>f7=x z8R$2aq}v(D(6_Ch(oVO9A8*x2PL~MFjgcqI&lDlkOZuCvt$U>61qS?@rt40lC_WXi zl7fDH2i!;-;Q7OWpQPNaNh~>3ft=BlTz%+^V2lg;yPkd&5LM-@4c(pKP>Otxc{u{4 zOe+@bQ_NtTY${sCq$NACBFZKfZx2eAv2sDVTc)SQpJa#wuGO_>It%xQ=&RVh_;oEF zLVhx#*g$D+A$Z*yo9G&t3i~lDhKC^yDEMrAJ3nYfM_iq~S^~1T%iTf9t>Q8dVAN3m z2ipo`-S9aXzvSUEPCKKq2=M$aVfs5rgyD!@eS5=21bDG3CM2PW{3@pt2ey>oDK|WG zO)%qQOo>y}RC4oGCwnXDxu{C+`y=!0?Z)|d*s{^DBn7Y8S4Wm>fHfJV`;p-+&zFCS zuJHQA%T3TFa&iqX@2C9kC-eH7xt{md?AiSG2GeReIwqSAzjUz0x(T5A z^h7-h^IdZeFNi)pw%kP1Z|Bw7;URopB-6RkP5V-Lky$gfXq_wZJxwH!6AfZ6K|oJ> zYNAX=dpm=8G9||@lg*3C%>TS;|GJdgs7L&pz{?lOmx0r-?yARqC&*DVedf=7sJqg=!xgTH>|`SI4AHt zi2(;rCJtIy+wO)rnPI^T7(7D)F;a^QA+}RvfCX$BHH_o?NqICwsFIc=;3a&K>FwOs zW5jq!91g&3hKCvdsNFgt&>-^Yn8jt++%DbvXNW$prucWwD4;@_ChEGH-Qxqjp3RmH z%D=O1j(6VII0#;@)4+ar@Wn9MX6Vp7&dm09Z@^ha%RIw)flcTazDAEUdA-L+oybn2 zVCoo9DRt-)^u0wMH7erb9UX!2d*LE>m@D}?#S#-cE$j~$r!Ov9j@@N^+7Bn8m%7n< zTGATlQ$igy z&hIQ5yYewMv!^RcYBlHR<$jRC%f9s%CbL0QXdZvIvX(j(+{C(I9f4cr9CVKwd<+wC z;-`hRFO+XZy*O>fs($QF9moNG2!ZXLphV+rNNEN@O1W9Ff-)!fHa^NFUZQYj?;`Sj zj;Y@3$)}p#skW@LKZ^DsqgwI`hQi$)0~H>-tv#)!3>S8~&pIZv&rHk{%2}fp0@+?m z7Qdr=)K7LK&Rm`C^NUXj_w~5KuZ`NV@1L&qE@H)K3B5vffw6y9+zpycb4b#LcJW)- zAfRV=de9QQu+qbg7$ZVJa z{O*dgBRk`zJ}Xw%c%JfC($Sl^Z@|(y9cYMhvAf!+Fyqv_h1?xf1766^yF?3FJTuOj)SmM^=-9zP~A+kxQPzEAcXsYOXq%SC001h#g$iV`l# zzf25pju^e!C&*5?WWnf^WW7v$o@&GDkMn-zBc8$@y#h}`?6*9uTxG_Q)XX1l)Oaaq zs``+AJ1n73U<@8~+5>K&@6W~Ahy{+Hgrhj==K9(4^8E9eEnuI$N(gl+iwdIgxq)r! zFZImwRzGi7KC}(Tb}V%+z;3AVuo>7k`K=|2$4{|<*;4%R;P1Bx#2A>7W}&B!ZPn!wT%@L#bc|rzBm#w zL_5@C`weKl1b^5q$Iansxs=Nm{{ue%hHZEoVaGoz^cEA-aAeuhRQdrp<8=rpE8WvY68wVsm&MF%%CZKnry00FN4+t&@h5)mFkxf~;jq70Wq6K3OBh-1 z0@H90BO*O*oxscpuETjvEylsd=x=a2Gg>z)ULIU7vIe;mJAI5f2o14??2YAyhju5K zD6DM8-gioR4=gPehnr@dk8M~$e;vJzuX(!k+;?PIZLQB5TXcEoy#*qfs%)w9AV&^? zm`yS|c9)^_R@ z!k0+{pWB^^l%GADQ}|e?XJf{@LKur!DX~oZP8xT#({&vGoTqH=oG^eP$~apF*y4+x zCb;{h0g|Ww*Mcbz3c9_Nm?9p??9g#+=-c^at96UbfV91^mF0-}$+aSBqf(w+)AhhX7tDJOws2Fzk`mtZ9UW|<`$e3(zYLfWYS>%lqfpQk= zeWwOCH!F%8W@$_gDy%8+o;telK_}ac7EXV#&A+ascW?701&yizu(=l^&kz3ihE0$D zw9s+FWUML%G1a95bOUo_(W%BiT+avzeve$P_m0hY!7!oVCBwBEWu0$O%E8fA9qaSY z*j?)AQ4D)W#!^2kiTYGiwHP2JvgFRfHIX3_2$2Snb`*Om;uWp+t;8|ks@5r8w5cG9 zJ$nS)rwhNYoqkw~Xg=Zq5bo2qiC_ggHcG^LBJc}N909AiO{Utfl~lzT;_{3+$V(Qq z?^M6dvfK21by0oON5oZ_al#UZP)P-1E(*l{p)#oeGhh=!UJZ>A3Vqv_H)bShqsBXr zzAC%bC>+>eI>3R=9EUpoGbm^L7I#VSg$zX=g3K8nG0WK$5hxVP>hJo(uiW+DZ{1;+ z31K55;Nh;QG9H;5dg=iix#F-<0cLvZ+`w^NnM>-757s_qSoiOt8Om39s}v{XrYfmy ze+I8CAm2?}&Ebo2wsq+?Yf38=Q>5U0NNYp+yR(Lx@TiqZ`!vE12Q9p=f&0SjPj!2( ze3}dtC2k~(x3DtPIVl9QcyO<%3B`)6H)nXWhQz?22-FAS%=il> zR1M~GQ%H&|aHj50BsAfV9CNFZM5zfvnNQfbm2Cp1DZ_`=zes#P ztT@8Ck!iAG2@(P-&0M-Zbii;d05UFSd#yS8fCl%eWODImanrMkU?IHmScD*&Q&(N- z008mxO3d&G&zp(yi2@6;N_^C;qX}+2QOb?P-M_Q9h^Ms10nZppM`%`~OVC1Mv}3rd z;iFBWocHm^26=0$kBpZ>?xdgNi)LpBYx3m}dz5g4rxL=pb+}b(>VlVe43W8(U0=e= zQLzo)oT^+KKBQt*MkAQ5vuS8Hgwq{oUQ+E}k0$o08Sy3)hZLbucu&r*PyC^063S&Z z#wqZXfbB-{b)YL@QNk9n-;OBh^VvkXrgP~hQARiSgy%e}2XFaR5aUt>b}HH;!K|#1 zuWTo|m+(gTWwekkxh}O-x$~?EWE$1#VzxBK-b(}6y-A>?5#aI3b+#vC(K&KeHp*|Ma4p9CY z#_=kQ(yxwD#kP0usxD(DGb)SOT<pL_>rXKk1>-7`}i!o=vTAE_v6V}oUN&tI}7 zws3L7D;o~H6Z<$q`1gA&S&P|xML~fB_e6i$3uUez)dM#1&O9521zy30DTm${`VkB- zgC)V}e)?MpbgL7ZmfWMG={#FIf8jiw-{daK*1%)6KprQuHfc>s;N~+%%kzpUqOoP? zQ)jlj!JNU=cx5gPSxtnB4{Y3<JXZyo|YX4{r#khh7 z0_y=7tTFKt(!rOVp%*bxomG#81ixn7X`z6nh-D35!A{KU+%Uugo5A`%2#-$kTM)=* zygt)0-U{>PhPPK$9Iw4V8HH;;)&ubc~sJhK)hegf9yZhLo62 z9rV3lh$~*993Z0svgyYzv}vOK=M}=57Qzgl$Vw~rnr`LDjY=*S*ikFi)32`4uD*!l z&vgLBekRQt+22xnq*?21NkhKXC))sR6rQO_Br@g15|jpJK2Gb!IRPvGg;2*BGT zfTyCk&!8G!!UU&O@&NJt*RF>7kCZHg0iiG_T7i04cs*%hZ?kn&^jK**N#~}b`Pw&e zm`AebsdlKN<;s}BA51gGK4@nhfMQ9GWYh>>&_Q&|4c5PUB-;|6AN_4j7mX2^lnt|c zH)lu}m_Vd=qq!rci35$cM*NK36N|9>W3mkHpEpDbNHGm<```0mM%rz~R#4a2WUo!UQVYI`{H&x*;hG+|e}{V|FE-q~4zGO9g zO2)MJ^Wlp5Q%ghDBS(v>&vKzwr98IpRNB65W1Z9g*YbPg217rdkv(}WGn^jEn-q3h zzeGd$N0ETHPeOd1t&RsaNa+s|W=m|qwj(ZB}-A#tnFQ(db)k@z2 z0dy{w9^d6DUXz@~^?$D}{lEd8!4|DiGHS$Z!+k6Q(vmD%1-r7jwJfARH-ls~<7K*@17zJVgBJ|L7=mZtp{qI5%$<6_5h+~8Xui3RrnX$uQs$5? zp2pt;(Zxsvxu^?#@N#>d)vLy{;}Vi(s;$AVOrlt?U|pgvmNMhkD5dA#Vqq0A;P~FG z9a_?nH1{A@H9Iq~;(cwZ&HBA<`42{Ht%)qlSJz~C`?j1Mh1hGB9X7OoPRd}N*-)Xa zXDv9yihP=p1U1c|!*Q(GM5|Gga;SSf-MH#MG3=@-FhFdHT7UJ%B(s0|1vA^t++X>_ z_!_Z+y)6gUVh!8C1@5ULk1m-yih9r8@!KMYuMEPYF5>O0p8J4I%(1go*B)Wnhs!sP z3&sVGAF2THY;G52%RdLcU~PIA#UC@p zHD~LpTLt%whC1qtK1X7VUA<9TI|Y=T0Pv3sZ~*UBlg41MeXe)>lk2Q-khy;WkLsL< z04}HG*|(#h_RHR9WOD~}as_()e@sGi#X>skQ*{8zcgUbnF&In=6DMPXbk&mc3Uy8; z?~Y`fnKt$8I+O7eHumr$}R~Sw@q;xMdSF{xLff;Y~mN9&tVuc!;V30DpgkG>N$CagR5fMwAI?x5dwN(ou10~ z4~ zCn>#|<*4ryN5VB|eXkd{=r5%+Fv7Y+MqUe~bG*g*Mm}&@bEAs+W)c5=+=p-LEFGvDLZ+SG;qi1Euy~b!RdB*I}0l zKkYMfSw{F5-tperYL|jn&d zcys%bnA)tp@?vN=r^dqGFxAJq5L-FsGq{goG?fjjs9~1P&dr}*PERdqqy3628EsVK zcoAB!ry#&4FlSRmflYiA1Jj1V=Cg|AQo>(r$Zmpu%I_)QIw`zSQ9_Wh{Pl`C0d=uoV8zwIXXD+28?J1l723#}Se z4q}H?IiIYIMPz@^$6#3W$3LP^0tfpj4A}nv<$#ccr_7W2FHF`=_ z!M@~Dn+N{J2h;z;9DhM@{;hcc1Y8yk5G7QMJ{rY%#BsYm)|Gbxbz@Z*NAj8J^YFJ%ffd-B6&V8ggvJhG@R5w`u10NF=TChORA9T^^p{Hw6uyDh zXoJn~T|b^b?=R;f|EUktTf-`Vgu&f*d_H;Jh-NRBcGMPCXXt)2zBBritFd^+!FAdC zy3wa8rP5a57!9k*`@D4B;O1KG-}}!)i@tZI>qcWS$d$RRz!IC`bewGe`>aQ?0eB+8 zhQ3W#0_n&&`p%%{Py3_mbr)wHFgEt%OPrHC5uP@k->guUvl1ZVrqTWRgzvv}M5A`3 z*Qay2HQ#`ly{6>y_x}IFNt%-D*nH9Jg+X4$?&Rw*?LG|T_{`4McHKL9Z&nVPWPxbI zE@r}hQy*S{Uwru=K{S#8_Rf6+|2^8sg9Nisj2-rZc_Xw!nuVEzzBb zSV74D8!@0;Ib5i_m&ju#07&A$@dK{c2Q^nT+#ZOXJmYA6I%z|If0H__nEz%a0Eyy% zV*igo{a>O$`(=E>gi#-JMOw;e0(y5mrnb=qUg+Or#a}b$5W~VDz(+3-384;)1>sTI zu6)n5b4_0GJGzyiRY%N$3qc1~p1HVzr>amub{HTm%R`KJG&s>tw7=J?6c)~If1XNX zpE%jEsnFy%@3u!*zc4!Ed;A`3@x6mrJ`e7V;s313?b04r9~X~LLN9>q&^uauldHYj zGELX@o=p9~cy#ES<%cVxj@W}EUh@_uc*o)igx(DbBbVCSU>B@jj-SE*vcpGs^}Vle zf^TKqwY^8k@@OGTtK^4gnHF-%v1=AJyP6ZNX82{N>6U`KEY4T|coj|Zb(<_p&8i=e zE{Z=JP%ksS4?@J=t2Dxm22?t_{{apHu~; z=46$;6(b`AQZ>*EWgM6t^8_^tq-!!3BPq~I%^`naRuE$2qialpt90Z-6~nO{I&ZyJ zWvF!6Q{8x$$UY5P@kT3Ga)aF`_##ilgW@G}s(-60(&0GoW5?Q=c~)g9^^7~{Uz95S z88ji8$3AeIprH6BobH05h3CZD-&$9CFOK7YuDPaGp^;@a<#6PJk@fRmn0sz zP-3b8yUVLGM}2mcA*JGFt1*Bl6Vj9Xj#mexH(cJX=0Z^Rph#2m$RjIS*@>YtMtc2X zHNDyl{5Ba+vSGOV=d>;0S)x}K3(F2gpg%pOU;_|Z@tW~HAg=vqvb zI*THj5xc_6C5jYcn9oUuB^{S*=q^hYVqrYi^1$+%DLNUP7@<39w?$sji9k7t@QYM7m)h5TXkL|= zzNfHyXbYvp?A&*oV?)$^&=zFs&ucavH)$RQtb5{1%E#Mz3dG3G=vRyG*y#{n<#Wu627D1%N5DU5?L&>=RBcr-VSDdt`62aKt|)2U z;dC>yw2Zbf`y1!Azb^cVWq-5=>k{jICDP;Dgbq)QBVmW}6KKc5^@g3V+vT?sCHHhx z7*ra%!(Qkc`&Tc0N&3(6RkK&^LQXfYg-8od%x`-O-n1K?@PElv{@8Km|T`3Rvk zjt51R2!OJ6RicBtb=TNgwydy41EUSNXz1t>cLuP_SRlud>hTV$l#p@jvR5E|Mlb^= zavondFLq&sP)gWbFPq1{1{PHP2PaqN@(228(Xw;?>W%N5Cxvq81_^P2i@7f8r^uFS zFW9^<;{}CagCfvkHNyL0d1HO@@Nx>%BQg^GwcFZZROYUS%jJ%-Wzsa}qtdRz1NTX*%==acr$=)bJv3mx0g{n>qtvcE6@1s$oMLOa3(+d1^0};*g9xgxFg;)9ayDD8{`o0zVrWsKl_;o)7#X?XylD9VA`> z)r{WA(8h@noH%mv&{c-qLSMcBYgy%q1dr*{KZ`z#H-%mo&@Rrmc;$9h;Y3FEhVgsU zj)z9x4>-I0R2TWHqpf*2l#0eT5(T-rcUA`wi)|Eq(K{+fxW{D%$~bb+uiR#>!+bBb z_&|TKjtgDKyp$RAKK(^(N8oMOOnHdewToz!$x<}<0*1_HxtWD)nu3t zo~kLe;1K4!!sjE3=%gR)i@1wAr1+l2;2Ca1+P6#jb17X&q|NcN4UHcC?GqBB8R0&C z^Nq-`+Wbpo*MpD7dEK@-7IeMN9jLg(c4|gF75p91c7zhLTL`W%?mJ9&{h^h=XyenK z^;r(tSoqbHNfd6C7rS`kgdtZ3y~qw)_3HDG5(VOkcR~0tmNI|C^w|y#Vs8sSCq;{nXi!5YHkW-EA>w95sY=2< z$(orbN1ZlT{hM`Euuact;(pA}QBL$9qo3>i&#j-OlL@Kv4{|YWa(w6o_~J6i--BN} zh{`Yti{)@>2%EWfX#NfSUf9~|s+1R@7#1pE$JfSxyVI(-z$8rBuYz|?o&o%m)m05R zFT=Zeiiih=o)k7@3GT+Fwq}(|&OLMmfXyDwNsk;`)81lew~Wj|czi^jq)K0InxpXcd(2S$1xB#8;TJZUa=LoGuZYQ0 z2&)8CFN1T$sAdKY$hLZ9{Wlxp*wE)^WBQRlC61bdV(IbuaeAt|`zIX!94DMqTH=}r z%h@SW5jqK3yT>brjE&*4O9IsLzmE%bonG}@=@n<^wPBU|FP5ZWxj1?YA1g@6hg2Cl z`-j<^Rfa93jm0JwX#s1?n=+K6o=O9459JXbdq$5}lHM#{>0PWE%Nn4urr)nGmkuV* zseH{_S1bJSM;#!JDJ|^@u=qKy^mdB+hI5VK=F{%#lGGbbeY$P}U$e=$jt#ifc&xiY zr$@{m!u<+bUUZMXp}gV?yZ@IW3Z$^v58n7L!u;NQZTg4H07gG}?zGF1GIMiv^hx^M zae@r9BrF;-&=^s+`q3NVN?d|!b3cCISU#1`p<=7+VpvZl-^NDndE0!d5+YjZa6p9( zou+dlj`s3rn3l+rQK?=Y8<5Cip}=zW|6Z;k)fu`fV~U?UxmhtBLmVp>WTQJwWv>fA zBqv2xEg{QDOONLB-az{!V!GK%h*%G~L$>>t`QamuW|Y8nzw6qzwv$kkEG zU_kLk#H&fv1uwTRcQ_N2?UCmpr!rKlQDHXn+w60E3Eh|i>_%@kY?1fqacZ7c=JNE)8Dijs1{OL60&tw zSpwKE_a+3xJp$Aio{Q2kyfsz8phG&U=(lYb-hP*B@G%a|AjJ%V(g&}uF3_X!ojE!( zzIZ3q^Y*-iggz%hgEe$Dw41cWkWD}VbvVnr2w2zoauUyX;F2@174N`-W2qW3exQg^ zixmU(F1SQzf9_p$M5P7g;4psqN{|HSLe={0)ra02GhS?p+$`R9?4obIXhMTFu?4>e z%<54MY#ceIiq6;BM45nDM;3y~eT=D@npcgB?alA%U9DUI_zs?J-~4 z@%lGi;zmE&Zz+hq}zHq^GH9*u@ZR-rqt2>*y)K8Qbu@{GNo*b5P zd2fX?Dv^v^C#BLWEZQm6;9td|sP=|F^6`jfZ30P>g6f*fC?WXda);{k$e zge3B>bU1nt@u9CX>o?%H<*5!Z`D~arXn5c&vBu!&E<0uhVXQGkvmgix3%(n zTDJ@B^}emo`QGF`?(RFh8@`Ug`2MV_pw#+thc$b)+1m|wpQU#Y5hM~zD7hxAsWc~n zXF4@)>f5SYswcZ zKl6BiHiQBeNgzXEv$Hm-1*`kIAoxQm0?=(H9!T)h8(ip^a2)TX8c?XNExX*)eyc1< zQQ7u43@MWpQl%s2VTH0}6T@Bk>-b$#o;zZ{j}|3lESp)gW5O{YFTJqYrpRBF4&rbf zq{_;ALZ4I=&6A4Ql8O=nc;H&D2d=*}?jM z3E6gcgID%ue?ig^*DcFzRBs5%adB<^E;4QxIU|D*NzPr4y0r0AoHyIx@(r%&w9iT7 z+~I4Fw@nRlHb*en=?(_)h#}ZbP1DKxQ!Qk)I9`VGCdqrW3DxTDa0rHo6v8TB5(LVV z;#%2xC^Embw#Wi_B}NJP#I zj0XhzIQz8;WzP5Co;Z3;}v&lsxJ7T;{9_1z?iliRlSv zp6=`G+@#s|8%tEibQKM>tJE`wSAJNh;l{S~`!|`-kjTURmJ}hC;N6+@=ZCX=@)w=} zla=@*?rCid3F&&^ks*AMjv&|{3_TR4Adf{|a!{FG3_6NNJ-+2!i|Dr=Hc$ zd!YB)Kx6#q-t}(u!1uOa<;p1TSchoo@SoiD%eUYqw^Y-x7BKa$ya!QxA!#7F9?^n9-@2Vmq_U=DD zbl*o|-%`SGQR6)mNJR9TeP8Ys-;_77ZCUei+mX|GTj7M?$1*(8;)c=|Abw9L2A4U` ze~>*Pj01EU?61O6p%0w2Xw*)e;j~lBP6&5sf?{J-lgMeSggyb4sjjsNGp;XmpjBSh z6n`!gxy4*}y>{V#ZCeYck5NNT=hUJLM`eVKS^28uL8kA?lrxx#KqWlSn6O0GQ_@Fl zC)prUX4GBZa^s$#3PF;AV`gKW_?tC$A#i=UNQRPuT*l&WRq;fu^QveL37j#iSLl_x zh;-ZzySKh>Z_&z79xjv)17UO*2)D5=r|8C-< z$hU*LlX)+>@hr9DbaV44Ww@2}WaDFm7sM5r;QReQ|8ma@72|P6q6m3ym2#3A9W|;? z3DMXvh8;aQ>HUWMJZM`~zUe+wE)xJ>Zna?J&gUU5W#6DD95bubI|tv7%4o3B`lbj! z;1Xaod&M0b%wF|fV24!kdfl{qI@_387I`kpSEIq#SanxuLBS=8jGp=g?rImmFc#s3 zvV{D8;OQZQuKcd)?mI1Ra@o5UtwmWn!>pbeYMQXdG!`!IT;l+(Y9=HcuzMz(HXVX( z?l{Yz{X}DDV}d7DPFL6eN)$p7Rj;oHB#mmHed~>Pix`KdD27$vHUV z>g(1u6;{kPdCqq(i>~!XavIR5vbkyUL_VZI@=DzI?jFV#Ot&s0a7gaJVEUl zOk|u?8}t1ZWYRs$T`5e+eyO&-n<5i6k#wtXS;PbFCOo`Ru0zII``fB^Y_R6lQFwLn zaI@4wzt~fFQSRO0^~$;M-GSLpRnf6gN080sL9bLTCY#}8HWc^SzYTCNu%`3Q@?n+) z8!B$+W#AA%j>(-c4q9yyn-B+>&P#fNs%boQRw}(BMCa5@U?d)>p&UY*>PtTt-P&sZ znEaw*WRVF*w{)(cWY&UM1f+477rK*S)jzUi%?5=25EU0vc6+HurM}^#-GCGJvr~JI zF49S9c%e9WGVTGY3Q(6J4VYtGi`K9Z-`BB0ejTS=VBhO_7~e?+GyVxGAu~{?hFr~H zL)9&j-K3X3_QgXyb?gk?zvz~3X#S#RGXj))^x^_ln4YC4!}d!H5x$k&-aZiAYg40O zR0<*c+jVksgO&%&e0zr`K^VF-MjEkQNals?R&uxHLI-)-ihZHF&HjW(5D^(~A4lKU z@kNVbkovAXM_&U^d9^b_Dq(Q@gg%fDYm0vK(H!0+rMJmQ+-dL^QGn-WD(O}f-Vcq4 zyBvRPneO#4g1Yo}E4`i7dQoPk=E~GhayYo${W~)Xw$fZLVlJVp} z$S9sM#^{E%l~A39_KiHJQOMo&vwX?EmS#0P)7Wz%@sbyRwUGg7Th>ZW=U33QK-jqL zLfIs>{o1JE7E+GP2gYe?8NN508dWhc6CKvAZj5k{vA>&l7zhgEs#7xA%rq}|P$&3g zR`Kopv8je%=29_WQ@*idr5r9iKrfu-+KQ!@3?ke!34vaDZSRvu3>a~=os%o_*jm_C z@IxZ@mGpfY56H<{DD$sxY8DN78-P5b)n6%lmNNCdMS2;sd$+H>xjp(^ebavf<3bQU z%-w|zyS92vY*E-cwgct*11Ms$ZTqHBgS*iVFQ4V1U$@_T_8lY>_ofcP1lNcYR8nhU z+Zm%aLZvcS=kYD;n~VmamyD;Lf>XXB5)l6(ZHmU^()>ph#RDUWHQUJ%Wznxh#x0y2Xrn z+>hw^>5J+FlM*-b?l3Sr#h`%+aBOjao!v28ggh=f=V`WE6P3PwKAoGEp{lpY+D$)E zzhpkMiH_c^sg;buHHqnF6)dca5Sd}ZRWMgMKP-v5825&l%cc1PPXI!hh4y%j* zO>&G?=*@NKtB3hZbga2Rs_9B2$6dTrqxW8Am=sz-DLZ67k%7a9?p0A&#okr9K7mXV zIDCV|;x-n51NFJ6?))lI6K zp_;(7QcrdPmeS;_1qri8g<{=)=7!Vy<)nuDrRQLKI)2sFB?*$q%FPXs>R64J59`(y ze1{P|e|*?6A_%$zaozTJ@@6@KV7&%dWF-SkuF`CleR$_nkjHR zN~eP1@$r#9Svta&+t!LMO&G#`oh7Mbfii>KLZ_W?4&r@|R>F}cM<|u#{s7*-5R6`( z6W4YL8XO0HYO{iJ<&7$lrsP%+bHm~oHIkM%k|fgmOX%q5=AB^g{hXy8+on{NaM%S8 z`J3>8d>>a1g#q%YlhNY^OibUqOK8_#Wy^eXll?!4IGJ%NntTcm+^y!f=q7G^b%f1? z=$&f)mzZEoc1WKf%%YeLh5%D(?gX_bd>?;SL{|x0Z9Rui38|Z7qhoT+OR%=6+t2G-3!#BTnYK?J`o5{i)u67a3JOQhY|nQ~owwUYU#L#g8){c$idGOUJMW$SG;CJImUOi+qCZ zx0JVl4)zC)({OUO7L#AcRwt;2B`)yxj41`z9>V1;0mnwBbdWdKrZd;rq~~0O=;PN7 ztACVi`I+$@q^v>}9=@E_Y|$E4G;`VNOSk3JO;&aDc26^zQEfBYPrnLtO4z4A(44~v z5vI>hFRD}V$=a%%919%%m3@rdt(9@SEvP6cGq%*@T6nd*l`X><>o6hkMcil z=;f}OhH-q_8bkzFj8)|ow0tzQk|x=-taLkOZp5Seg7$S-O-5xl4>5>yv3`8l#jV zUdjZHw5`wXRf9wk*W;L#b|Q7+?j3`VTYi|<^)LYb75B0k&F~2fPkGF$4G$D+CYSig zl7ijqyWC)fJpG3y`c%_LR>Xho0MP%|Qa?=!u(bb0fx-}H@s!YY%(g&nq8vLi=2G_$BGr0?MHsH&3u2#~=v@l&X1UUDtn1-D) z;r}HBd3{GR-X339$bt0tHyzIZ_Xz}qS7+M+>k1tKT1Vsmg#{o~_%0SVEzIHVdN`B+ zr=$mYIe>Cd>Mfzq|I>gsA`E?exIlRXj$uDyF(9GIM8|=_{`;(}xOMU87zR&q0JbFYvEF1O_Zw?ZQUPEGN0bHe!^8d#@1ps0uSeB7 z05nS!L=5}7dJ8$0u|5>*0fnFurcYdt0t7(9L<8^U_-r2*b(ulMh}zAj|JWwP-%Ny# z+ODY}x@eKVK_TZfw~gt zgG+kVliWeV+W8DXA=VTS$?=CxGgg)3gyW}pOSEFnp_WPWh88ehOUsc+k?(s?-zRPe zzyselZ9R!!QNIoTu(_DwGUs+s{?s7Qw@WR9ZI{uE#zO2cRtf^Jm*8`L<;m-EyR!Hc z0TKok*>^kGeeSu#0cCN%UQFN}xS#(l@I)d3Dk$yeueQQ3Ts|Ob>uZ2k2`SFy+T8bk zC9o#2JQ1c~U*<{J8CYzoTUm7HIb@Uq#z`SY%@-H!9#6v*s8B(I8MbP^Fe43to*^wY z$cYFvQQLfec>1^vGn|*b%kQ>XEdg*rDe$nhyV$h!k0?Z}0CJtX?9X+w#s*byw7tH8 z1`z(^!DwGUce&dtZ%eH$4+30vU*BN-V~7}M6B_RxJX|Y{{#k(kk1TR|g8e_!%DoBv zq3`4PH2_s>p-?ahR`H4Mb38WeJr%q?j5@_m$x+v?Hfj3 z?#~dPW{KmGn}QFd)>4`8vE)|WT6>9fb~ggu=L$QjY8)%sq5k1W9HC| za-d8cKGM)a?TVZmy7&tV{3PJ;_Fdo?F7wk4@12mB!bl6=J@q}(5~>7XfN3~uGv@}u zqIk@J0>bnhI`z3KMFAMR$NM+M{vSy(qK~Qm&%BxcqAdNvgC1_{ETatK8CoczCm6M+ zgP6}OU-Ld@0Ghi5Z!(rl44ULxIF{NCE+4;ZL^;ji4>vsJ(X!)t>OYV^H3+RqvD2U! zrxvB@ZCxn?us$=Xv+6i#r4C$0ZREXNi!7vF*$5m zFmAzy9y*zg)DZb#*9LHeUn|h=ZsYW%T5F)w>kD8!uWdl&AWxuS*-ehYV}G@+YxOQ6 zgVW}uPBIARMKPs2Q|f2hIX05>^0&q?!&ASXGKHcuLjL|l4(c>GyCpT^yyT9ejwx%j zrQA(E4azRls76aDe#P^*+`R!Is+uqXb@ta>FEsJ&{HKQER`8X;)o=aUVV5;?XCvl} z&lTqOAsJphvkv$sHFgEzCnbG*6YhSG3o8UDs{uwXgnCb|=loIL;L{@o8ogFZ3rkG$##;FC#yRsNv5SVXd+eQ~I-Nup+#{VEf_ zD7?-D17+8=Ol&Klh~9|AV{4x;P*2ybozJnQs)Eyo&}Pc+I;db}IR&&Is)I2nU}Hgk zk>Drd0V@fNL$lmc#m_72WA0ugBA|C&(CcaOQ1KPKNfak-@jaaTPlffeyJtc1&$mCz zJ`UHFo+~!m*AMNH6&!oKBgTHNsRV3YO2|+xOPqZldRNPo`@$g6ptYsAnu5fo-)ug{ zCq*MpA0rw!Hi`X?u&Q~Rp11$alo_#ez0;mwecqQ-k=Z%UBM*Owmsj+v+P5i#-y|Bh z_M4gH%|1s0DT~tY%-?k-?wu^PI!nv}u$mGQyF3J$L0(Rc(`+rb!!Kk);ne`WPTP2iAjc49kzG^FrF7qddtzm(~G zC1kC>nhD!@$J!||t?5IK>x2J`)}%T&W1wTU0utc-Qm z+1b@)Kp{8;m7=C`;6ooA&I3*=i)yDc`1@*%VuP&x2|GgVg3WPMfCa zhP4^@jp^ZO`@iI^d~Ktz&lG3lI-c3y3#7qepepTNR>9UoXwi?*xLgiOn2yTrTqzH? ziEMW%s+H}MQyeI-vJesSJWmUbnwn4quF`AVzNa&~mHi;3dg<3Y*)$W^Lu7%{v=09M zAv{tv4lt&VtLcjE)^M$O$wN0042-;MjN?)3j6pF@Mls&HQ;~^`4K58W({>H-ON6&C zx$cp3!m7Q3`)pEN`iiF`pQ-O)oL*7gw3BNq^8e@L8XGw3TCRipy z0fO2qX2x7eK<@X<43fJcJFU;`mrdC6TVoKp&WF^r{6#I%xjx!AYMD?1#rn87gjZ>3IHK8iUF zU{a5iDdBuOFj|D-SjMwH+=K)i(b`-kYPXA&>^JYr4cpfnoj8cy$ijs4A5aAc$r?s| z+@c=kLDZ2`^%Tu1chCMH5fXl+ICB7nF1q*9*oXI@9Q#gJrD2p zF>d`H0cYpK-@*%(YsIuUGHo-Jx!z7F#(xI{4!$_&H#wY#5Mhg^zu!^S3c2cdX3YSx z?UmP>M}AS(`*Zm3PadeH^J}`NtHo51ce4uve2N=E5dkMouH(O*!gek2tDHUF_hH%cwy;lr zF30fIRem4h!!M|{JK$Dw=($)rWtXQ7+*g+u(&7;rKX{M_A_{->ow;!+h_lK}9_@Rg zcWc{%=0=3qk74kTo+1HHE-)kfutxsT%SR6tv{1kbDZle`Sizi0xt-RH^ZQG@+&4S_ z^Ico+FpK9;BTmWH%ic`usP5!z>{^u=kz?YTLs;#m;{~=P+6Db9JXH3qd}jAba1I0S z;?puF*fpASP>n5Va?7=2<5GX1WUAtbo_%waK|RsXhF#5)C8~y1{Mo{qIk+1ouU;Pa zeKNnTDL${-y_$A9aE+?hMrj!!PV>fl3754JZJ}akpSMYBGgDu$tzpwy=!3{^&DEKc z$dro{@!f^9qpx1`N#2Po=DqaDXFQL~bZR|vv-3!}&z|QJWh5>xJJx-;s1Z*mHBI#E z=Oiah?+v)oluz3J-1ifLs#{!EeuWv`i;M8KOny{10-I+vIINT~LNXqrMQ%E*eYH_R ztDX~cc{mI_&-E6Zl6FIcVasJeG)Kb}M`m%cfIe+y9-*vXIo>b*ne4=Og5&T*1X&(h z!Sr9pu`jt1-YeB&8)X{vDNUJ^na3q?fliyoqVVKvd9~6VOu~R;?AgYqlJ~0{tPy}_ z1gn*ks2%lsKKOMJYtC0WdQ4YhsY=(|ZI+aeZ(H=+kMWH(;bDY3wX86Qy_}&=@@j7{ zd3@2fy*7+7ys*QtT;jmQpF)MzBA7Skd@?#hwhYf2dbot&eIgIdASUjY>2@aE!|vM;HHur~B2y%^RC#I;mi&M( z84vq0^LIu9aFH2hdQ|j^^3WnhJi@+S|+v< z$%)GqWNsBm@R~%%b!HxLoaUUHqs{+?UD$tWisv3-Q`q4rr3_6``cSSfL-UX>QQo;K zueYT(YK7=7{Q_`eCS6gek^uJKdSq$?4 zGL~ZxfnAkQ7M|Z^%|4eRDj;S&|Jq64#IWhIOJQUJ6D#;g@$tj7-`UyOxPxDcGd>MV z^#}~cX3Oi@Y9S877ak^@13ZP!W@40@`fsP^{g{nm3qY|2q%0Y6vu>{>^ubmaH1~rU zB-oS()}G3%Cia}r|9g2t5&{aMB%?kN0Dmv3WZBo`NYc-YYr(^Kj>yruLET!=wkz*p z8fK-;)1gJag5Vn~)N3;uQl^eXVg1M2N?a_Uy1yU8InaFOcjAWt4wNVwZZ65e6)oaI z+mOoVGvc_vhVEvwz~l`cyhT8xeoRtzv^m2`(?%;#d)SeH}NdoRQNdma?w z9g{-Y_?30Rte z=$Z7nz!*Z@!#t`LUA1HsB2yhc4`~)jpFeqoGA~)!U{VNOq}3{I{%|cj%fz67Z9g@5 zp;B>(o32+Pj}0jd8k0Y$A!gG9I&3`CIwA|oHpT||>EG5h#F2~&JwVZp)+8i9bkSvDQ$2AFK@abo^AU z(1#WEY^_mBik{?)gcSXzh(R(9qF3)p!?3jp9dCZ2`lOR2a-V|yNV_B4XaeL8caO%? zq_0|cr$<|E;0tO1pUSlP3lrWJA%6M5o|amf&wI86;VYV~OPFLsk&G|T3#4bNl3b(&X&7``3x~#~EEXM1vc@T2s$>)NS0Wq= zVm73^U99r9Wc+wm>dIy$Xd>Xc(IJ#1mDHDxhJW0bEfm<)#!q57&e)jxBER)I#Ug2e zDd#RFQ2a+m4eqi17h81Gm+dyv%@|93bP$-Kot<{Sc~To*N+NjJX-pdqj)a+4w9R_A zgYl?Mfeym^u{mG!%hlert|u^Qi=2p1DXaO{w+u__`;udqqIB!7Z7V*DC=EG&npnwf z=|#EmPHpo}2Ouu)2eXLu6!?mvv>13Zh@w5E zgjbCEm+9d8PyVF1PFNW#D&`GfH;CX&$B~TX*_*EwLJ(K7!KAIGJN%vs-nhPWDNi5L zD%+M+zU!ukKd$D*ho^n>pVV%~_ta4y&Zj=sswlI-lQJD5Q8Wd?h)8dDVSb^Z9$mm) zKv^}>15*UrM>^2$?Fp%GFpD9g`8<52?KfgBFr>DHN=3}(y|x?WZ?n&0X&ZcLhZSx1 zF#_ZfbyuhkFfG^VTI9>hUn?HX6SA)}s~cox>es;CL>93#krlC~+jcg3y6@EO60pyk z5`k-5U0r6mYGe9$_G!0 z5Zo;g2p-&mL~sl49^BnwB81=x?vUWjFu2>0Ai>==xV!5%@7Z%!&iC!^+5NM>V7jNf z>sH^XTlG}kdRj);CIqtLD6RAL399Q9i=lrwV}WvR{GSz)GMXQ<+q-A!>~2X5o^uLFrHrT97Sh&BcyE};7@vBS zo%bXg!*sdvntdsKlSd41Wfml?=4!X z^KT6TuAP)PSC@ns3Q7`wNF{nw@r9-)KBGgAVXYdV`=NNu{_P1e6}<%}YBdG&qd`Zj zm~<%#DsZduI=xlnp6Zipm00rs1Y-$n3-jL^dDUgGZzw^b&)17M1Z#n)jSRbwS}xJ>kS_5@{oOuN$YCTWm5 z|IR;SZsud(3rwC2xXb33%)1gdKk1z|(8t^#$-LU^y+!H4@tUi1t=*jbhhP)`=*+k~ z(M$e-!a_9#kmR<0_k5;gSuKON-!vb&YhH>v^LnCOZbnjaN#F=k4H9~3c64BK&m4r- z3CJZz0^xzdb*X(Xr;1phs0n&~uM|m7d0Q&{5eC4fZoI=bPr42}%E`1mi)^eIaQdJ~}yAyT=0R8o1Lgf1GN1=_O-#qBus^d%)gr;>c8 zOLGYy*%>#Q%>8;VjtQdKj^S8zJ#Cvuq}X@Cb^S>BXV6zTN%Fg!GMPl9?2CAH z!2IT~2q)q|lNQgZ1=VffPJ1<>XCEAQUCW~H%_}pjl zL;+t7i(~jqZNlvAsK3dFMQdPZ?Rv(2uckoxd)h`}yG?>6R&!tHI$}^<0-CQGF!Wa_ zi}ehCq}pjcS?N^5jxs6zEzb@hLKCK!34B+U~a6RzVhd!-;@bS-Mx?)LSfa_J@tJn7d z370w7+?tiaPFxkEPbNM~$k!Pt$4myfY2(dbe%A~J9pZ{tI<3iU$=8j7-;@;YzuiWg z3>BsF2Os=aa*2{hqR7F9FICL{# z@)3;*H*;%0=ZPre;f51n6h>*KsHe_T|Mu6fi<}LCY_u>OhKVH@Av;h6qC@-uDD4SUb zo^K2NYteN=`|gEu=rYM#eD=)?#ifbxXb^1TBdPL&D9P8kju8xl#@9b0yHK_LR4_Iq z&Lu3I9@B8#7zwxV?$j6(in%1*x$LD+a0PZcg=1$K-g7_Ti*{Df6E1n0d)T3NKa)Jl zWhV#?FCAC3G`3#*rrqA$t7?B9Q#nBYw@4~D4r(K#N>#m1bRsC>+vqqk8&Q3G_oFO) zEHDI%|H0pkk#?$Li9*86N{D>JTqdA_(}0cGOVyP-yez-+6ajrvJ@;-?V#%_eHP&S}g^kG)E=?4*-v=EJ&mc74;>2t~lqoEN1|w+zi-2pgg2?t+d&R(<(ae0Y8{x(@r4 z`Qs#$W8kc3tjpDPh|0z1)QPCFM|5oKRN?6Nj8wS`g*xTG| z7g#&X!A^JvJ7C3B&7Y<&GA`6*5S^H-sA)LcqrOhv_wqa6o15W}lE?+s0sjcO(nM_} zKPHtX4ZU|@g)|=a7~dW3JZUOUt=_oKxC*`hn?d9W&jbSDXo_+$gsR+AN0P~mmzn}P z><+qqz#oL2{M5Ke#=wu%a8{S_iZ~9)b$0iM)>+HRs#4@QgAC?0)(|19JicW03Yn(^ z!y7H*Z@K$}o%WiO-*zr<(K%3Lrwyi;N;3vrM$}w87@^@{NUSbvwk{fOYnCAElqSETjo~Z z&+`h$t3}wis1l8ers7ur>Skwm#{AAH`u3%Qg#Wt7Gpq)g$q#h~w)EB$3pAoF$K>3# ztT!eyj~}+c=)K6Yg~oz)WX59#uG99xSsu53AmCqg%#Hf$I>bvu%77WNLJuR%GqPom zN;q}Xe&2&ECDnvHw$nvsYgRyEGUFSYU&!Tlik?cUheQewi`&nCmQC+$Z5+|uzZ;D;R zj7fDa$$c}LUZr+8iHdpO1+YR}ggr05^>hsTZ>FXI)7W54c-ru3VXa*3_%H-!NL1ytLIU{1Nv{GWDezR(PO`>u|Yqmny8E!tZwn!ZK4#V zLF*Hy+opp$e1<5bx4(!MDzjND|3`Zab|aIRF(MJve9cLS{9WDsm!d*c?jRV>rH}TW zgTUICg0r;vi>Z-Kss4$S)3)|9%7)jR#uBFlW&X=fg^>gLfkU2`a9g_bwBKnlr!bVi zA|FdPey%CmCrxqd2cVFh!R)4PMo$d*(MzYP*xv%Aa5*}E-HaQn1i|ch2`Wn1TOhfX z+V84s-)w=V!B>U$vC7z^hCt8DO^pZ;tD)8Tu#c^9Foym;*<{T$x@NKbc1Cbr^rDm2 zfaw+bm&}hibhigVWle!D)yQKB`!WgY$y?DI@(1WgY4CBW<=s9LDRSb@S-&Y}w^ei+ zbs5!;9qJ!BUTGD{bzXj=Co#6SM};qgI@EMi1u0@MXoz^9V$5;$E3p7Zp}UM8_q+!o zl;E+%TWm|BhYlP5f+Oe?V(8gqs(QGAk5mn<`>{a$UWZQ&mekoX#d7H#Y@PTgf9$L0 zoWkVRxGMg-0WHhl$EG2s%iXkP^?9ys)Sz< ze-da}3vy)7g`pIHtTy{$k*TZ2gQEZDG+t#phEx~2yk;E#lCsreS^&Az+&5_0zf%sd zNx$z#fy#K*7XT#tuX<)%0(O?X1i-5O#`Cq+-Z7GWt$)AjjqJ_snq-3V@4nRmBs7&0 zbIRz>Z5<5ZrW5x<(EnVNsxZhNd)ms~MDw?M2Q$d4>pJ_Ul?8Hb9XAL&+fb)%k?gQR zujY#gappJISU<>>q4EZei>#C4Bt#sUpNUa6iDXCf;vuHH^Q;VZrWpuPUM)2e5<#=5 zO|FgAw{OTp>ZBh>vtv)~k!?%YL~b1Y4dWy%6D6S}F1LMcS>%VS-%6b7%(mwUR_w5C z$5NfNm6d%@@l-RxM+WN%8IZ*neC_b=2z9b_s$z}#_nVPL3Ygk`Qu^3(`Alb_fm7r~ zhzh|p^GzVf?vw*Ag;p}E8=KHE)qBX!sF8W`;y1sDIV-T_y3ls&RT$rh)IKAX6RB)L ztP1{ZWlEB|PSJ_{r13>+$%as>_$uv_Yh7dFJi8#boz0+fZpf_eh5k?-SQ*Ehs%r>1 zAb!uM?uIcMi2cs%wz!@+8QVA?eWv^Py&ZbZDRS?Ingrrb|B(GM*(H8CUi}WNvc_4o zv)aGA-6B{=Q+hFetn%Gmk3c0059S$B;-As|(Bq^YF$ zL{yW*{ojhe298hVpqP?90qOe+dgI0Es)9Up_)9@QXImtTTt+M^CMG;;(l@@!`7UL0 z6@JqgINO+HwKW>;B*;u}#ipS5i&jIS>ocY@&4yNhvLD}A0;f=MfWYrxKMUP=p5&*= zbLlhOGdD69-Re}GYS770Q9?Vm zDp9K8=%317k=(9-z10U3fizOLpkp(KHsdJTt{{QKg%e=%PPfxaSRO2#h&9Vd4)TbOe@0u+ur)GL0*UA#Q) zZR5Wc%uJ{!^)!a*xpthDtwx5-+-@$dUaG0uF;w4BLRfOGmhpYMrkKSgPZFbNYHo|0 zFip=*HpO+Bu^TYUbM+G`+^^?)iViKT>4J(+q|fb*rv(wlv"@UG*IVz%_|d|Fb+ z1FP4HS91Z2Njd3WXSc$q>YAr$S7v2)6UrIFZkCAKA1s~s_V?yT-$Zu??hlKSwm^~w zN3XjIu4uXDf5vwLtD;A13Y{7YKZ0C>&O|ePS%2J*a(NO8-`ozJ(|RowB%vz;Xn=iQ zW0~sfa7-k##1-=#p{7b0+C_Nu-3)VP^U1LJf`rQ^>B6)z5$4TEY^oaMaZON)a)<4f zsn2~YuT_H8vE%tpmj#1+xEviJs#QPLNn$f5D)WGjat1dE0*9{vUPb%LeBZp9+G{ZF zi$U3wP~8a8jSS~)S}#jvdWrQUeE!Rszp!&1+YF;ffYeO5%4K37-f@G~Kw71EnQMpK zYNvG7MjZdrfr%7var6k+YOno#kfo=Y+3$#M-w&02SqLvY3|o~I3H3lnq_@3)9bEAE z!)3>cRpKezki=-f!OCl|)xzH8t(n_Pzw^6S8Q6xIDS!9xj>J@CDT3}U*Mo|>e(q?8 zoYKzcy1N^1*^BSoxh_z&=@ej z945SNI=M^ESDQHA8`#T;o)4erWq;^kpQ?{$U{h5_@(7xkLBs~!{hkDJu5yiu3|S}c zQ>aVvAunka#**=2oiB!rUr3LwO2r(7NCEEQRz3S3>pc-mi2)c=78$+T#0A}*p~)lz zc;kg;q5R>I;yp#vCQ^j&@Jo-d^k<5rtEa63ie_%VlBBo_QsDXU3TJyGrhzOkrIBfwxOcF zj#`F$OV<+k;X0q0>%7Tp$EY$W(|u+0U_`L_SkBmRe{kWV%Sq)nTzCIw?zn{@yyND& zc8xMh=*V}&E_3E~d-QU~(Y$okiu7#G=jJ+qW%ow+UVUA{I_Romp;^E%1)Uv*ZX2xb zp)q-5AB?>iyMTQ<@^f=WleQE>jQrTF(G`JT$bLV17-?-$@|m0dz(qEV zRf&VY&l#JBREdr#V{GSalVZN>cr;eig-)840j;@WG;SB=*17 z&+2QVvAk|;k&gGS`PwI2_X`SEiG9X7c$oN!BPTm^W;U?WX|~)wS0z8-GM%h5UC*;_+fUEg}+_9wqKX zQ$|=7Wq4;WY=JcIo)oMuurC8@>tkZd)+pmaX+ct&Av8%abV8x~k6{FOU{3%&b^>r7 zgVYEA7pLqFu^+B@3hc81Mz(cey!{9Y0yseJ9zf>Va`Gi|I>#DCfgrQy6F=De;m&vW%S#g2%dtQ9YN+BJHTB7Q!?G} zm(=?^Fi1v2J}CewXeXXsw1VT52m^E`KRvJ}KplvH&kqBH(#iohfyXFxZZ@E-)yBZh zos)P5KBTntA4p!a&a9&d!k;GBmm!4F9E}w$#K_=iV^dN7hdVIs1a5XkTasoNS={NX z1O8T~XW0sZOtPRRfEiXm`R15S!dS-x@1O`ZOX#O6ivLn>=(z3;=y5NGD6{w8Y~(U~ zl{8~|pMsr0(KeiKZsM6~p49Xn4LnHd?s#ssU|fHaqzkay`vaqdi^f5LOKpVrt3wOm zc@t6ZlS~QU)voA?K}h2`pyMn=MJwU56yE}l0(s`VG%T@Wq<*LW7T{yZ`crl=(b;_e z(CMZ+B-%?9ez)+O2@Pq6?`%pBsuH?uYTEiwN8v`_9N;6&+Q7sBZy|74sd>j{uMAWY z&^=t2%+FZQk0JT+LDH_Xr3zE>*~7mIoh_$ml0!`fk5C?j(vjhtX*C4(e&Iq7fJgo# z`jG^{S@r*(4CY~G0dx;{gZ@wXX22N?e(C?_oBux-#=c-^D31%<&If#hP+*dR<-7iW zW0D|<`eETD^K%I1=l6uVDkPap(dD6f5>i_AqLoEcU2JGiwn-c3wN-bb;+0yN$QX5zAeRFTDN(RBbb{Q(Glmm)rc^-t+&yYzI6Z#Sq}7E(V)bkTNi8qDivM%#_vqDX&@n z19xvR2O@zNa>RW^uQ~bhF^66sQ>;l)l_j50V1^xj9)FJE+rHiqfnxI4P6M|O(8;$2 zjnTm$T<98|WHAvsWmKNs-=ttwT&QieSr_U4Ee6uWS+-cV;454h19k1=}!!;>zJ zSJr*Dd%T@^@g=*hgJ+wX6a zD2$fvbMpjT67vsMix$Y<(kY4W!7ej%P}96Pg65tC^sfumBmNCBc;GZ6{`6`<&uakDJ7Waz`9C9EUq|`}R#|Wcd=<{2{-92lt z&aa$mgPeU6YxbV;X_-u@u&ET!*L@`qFq|>EH{;uog+-z)=CIn7o4?S-g)+F6k2)uG zr+nsmunmYA>ui=s*?sv;NnFta{g!OZwI)9M(QiskEDHJYP~a4(FY*Xx9Nq!!N@tPe znK69XCI@|svU4|y#D+*;^U?k{WjH!P-)S6p6m)|+=^h4aZ;aitszGi&I$JM1{YKDMK_~9p&SNp6Q#*QTw-f4_h*=6YZ5jadI6B?O^ zYJBAh^RqNntdE43=ym&}_$O%`6^o~^81v+%+E=d>i`1*wAA$-~qrUc+DD1N&lAqEjIpjhlcmu$L?wJ{#9ZY~qB zZ{`tP8{(@v`IKWjR2bHij$NR4DI{j-6V65YiCV1OQq2A6ZD z=wCe(xP7$cG)zfc?YNkqIlX)JkRaK?GP^1UezFDre+?maXgOsLe9(aYbGRnkG zJS01W*yCMHIM?9=vz<*xzP%=-*j;M}659-7C_cgWhen~<+uGsC7oIOg zW2Jxh5JewJ?LY<%V)Xb0gTUAa0Q6YyZ|$6%w_2YgpTGdmoY-nQ^WK~n=#mT_NuNi; zuHCHXa#6+F4HDR!|6oIX3$O~Se>iCQC^`t-IBM7zl$F*95>$!H;Sentr11Ro-HJr1 zJTt-J@cEMa-XeF%0t1;S$jI&`nA%{<=bJC=!jzT3T=wlc$GDyY=qJS_peRw%g?yI) z;bVLAU=vfhl@^OzSe-sz5vlhdR_I5yyAciZq_mVuO@M!Dgz&Po#;Z-2b(2HC=d4}6 z%-*Pxp5)Qx2iPke_INqlb6F>+9HG3DiY~?PQU5CY)n{>ZPSL5_Kbg`Gx?5~S1u{6a zpLr#nylOU>*5okldy%0Mm*G2Aw&!#cawHFN?)6>|WT0f0(xQ2y97^8WvkH*8Ck2eh zXvRORQiBBYmnBbkzMquDy6I;Ef=obyc^bTX4sjbi^oD78mn{(u85tf-2J3|;&1W`JVCzt)IQ-xy;Y#iH2Wi#TTLx}naj6H5oLA~LtLm| zhh9&xh^WBK)WB`rRu3Re1DL#r*u_<0Z%*$t(G1#I37JSr5fRsi^JkRNlOGjN~9o&)s9zl zXJ;-M-XCXWZq67!UgI^3j~fD)e)rii)v}EfjmCUtan1eOA(N69HF8H9hqoX#CB08% z#KzjjrpuX>I4B1L149#owD_Lt^Gp?1Wi%h%+2FDz&(;o(@sq)w0v!ydja@B%?eB7+ z7g31@yYsT^ZuW4v|11lo97Q)UXlZ-Q`9!D7mK{Vfm#}2G0cp{q>dVrYUyO8ix@D^z(=L-NZSi!k4QcTq;A-Yml; z6^gC_u?PuvXl}X!F;aEelOvkguYbKa!-EwH&i$DitI_+8-5z?0EVNo~h;x5RvHn&x zyvgMAA5nr(V$#>#3W2$+=cCWfQ=^Z@QfGd}gB_THl*>#0DNU!2^?De{lfSgh)E_B* zDPVh6Q*;Y<`*^O|Cs`d3KnrpHZ7W+At<5X2%Ms!J?HyU@w;ut`t!nx1<(0Oko&GsI zz*eWf5D$+HS+XGJ<2+^#288fk4g@GoKgv|Shou}Jnh-2@pdo)OQH5Szn{NyV=RGt3 zXl*6SVZmb4#%^{RUy))}>P4@^dLBt+71FEQ=qJ-Fs}=Ozir1_oy}qnch7%#5r050T z@-sU%%g_GmPG^oTS?E+(bH2L7ZgO~f6M%utQrpSqK$pDzs5_L(PR1+IX5F&rU!FIs zTksf4P0zNMz)+&$3-3FYc>;!GarY5dnHo*>d#G|yQGu0DX5w~xZ}I|X2wkF+_P5k` zU2NKR2&*k#jOBaw51_-wLs(RrIA{=_C9`?+t!9kF_1Z2^C^upQQjR|r zdjOGhk%_skK-^f>558+6%S3X(Z--;bso#B*!|?<^1x7}tiw~UprL3rAQ<8)4EcV7~ zr7<8lB`Bk~doh6t8CeLN*^HA=;+z_JM8ww|7e9fvrk_#2)8JU)WA)5l9X4^O)K$vC z37j zIK+ruV-@a^&O~r!Y219%K$ontd5E=Up5LRl`!6hzY<-Er{Qh8+qLRt`wS$6J-MR0` z`Cn%05d)`+*;?DrWA zv51gtb2$E0#fG;2sI=v3Dby`vGKmE?wv7xcFk+Re17Rcu24Qt>L1j1>Zx6R}rqI)# z8Z$G_u& zk7(RJYgJV=ZW?d}f!^pO6U2Evin;|)Alxh1#5f!s8LIZ-u-(>DfEiwuy4+6v=F?|> zL*)3aw~r#H@@!w^@&qd2cCPzsE@n9wD~lJel`9|NjabNP3*xEpoZhU04d z^@V}y>QDM!oFM)oK|onaY+xgbXT_4mg3dsx_ZG-C;!P*+-6SH0lvef{+Kgt{_J&kd zKRy)Ry}lHX6I7o!DQz5*`AMaaT^$fAs>NP>b^*ugbEB`SMEhw_$MmzP=v)<|*mkW5 zsSugA5f5vCPbhSaG~>u00|mxS#K2`T24F(TN+htv0b(=LBo12F_LMbJ*sH>H$4f=h zIIU5+-2h0-vF|p-7S9UJZWU(}?M8&D9DDLvzhe}2qNUo{t@L_9>9l!#a_*RrpUJ(zBd{at%XMGIHgl!6R)>- z_pCvppxjnk;Q~rf!jEPEJ7YpPfr}e?(&T7QPiSG`e8%4D>h(rmNe!sf)TK8s|FQHN zCBuc|9`KTC8BhR+{GtqNx~GOQ{2W>NajU~ifrp8NP5s~XstErEM(6paHO<#G`86@* zR$cT)ke(AK3T)E4Z*uRf{x!)5`*UCfl=zNo@OO;}Q$20KO%$L#B*w81&?|g4xue$$ zkQE5ml-gVNR2nqBTDYC%X8;ls+px|O9Aj8zKJ{5bKooGZty1P8P?LN2S_43$;y``n zH(8u=Om}_R-i*F;At(E>Y{uy+{OxCMVJ!4~m;jsQ+Ycz>z~KtX_}nh_v*_vHP9#a}!u5QEs-gs4|UL*P%KdIdFUwy*RYQ2gs! z1$;eFvWjw>+Pz4_sCzxU;3E}V1ui5dWmgd*mQ(_IR4MB`oj|jJyu&X(uS_cHfOZla z@e13`ba`##QZAx0+R+@almIkRCJLxo-xTmwq|oBjI?Om*qpWR&GIDayNtk`Xy*p9% z7E42;NQ1&Rd9<$zY|p}y;zozvz^J=vs_0RX?a!aXd2GItC&^aB&-&K(rMFfaKJoVn z55^iD#dUw%X0ImBV_NS-NvBaM5SjjpQDN)Tl&p5{MW@OPkbDvyQBAG%z`k}xZ3bVcY8i^k`t~!;0w=0(6 zJG{Nym0+w@G54bb^`i0Erpvd)yMidvL&H6@UqO5*thv6mPn0JnDK#}k&@X*Vr?fO^ zZ}rDt|JltQtasNAe<+K&%(ro&HpRASX2RtOYMEAy-`U03&K1Y7pkmdL(Cr;`__l?h z30O^62NLa4;>=H)&{0HUbeO5PXXR~lz2qT_#Ko&W3l|g?QOrHf6iqM}>v%nP%rz0w zKf+)l<7DhL`pU*E*bs{2CqczS@p5+V{O84&Zh()UBr@vx7PSyX#KsgGQTG+x!cH1{ z=zT%0oQzuW9AVCDnIl_w*b(;V-Mc}2rG)KY2(z&f5+I!|W6dHE3F0+t7|6x?Ev4XV z|Hk=qt&~T~7Gp;Vv})s&kFC{?zYC}}ozB{}SJkhxOOq2ks0VegJ)&CH9VwV~x?fer zgV_A2G2BM9g1awIa?uRJ%P1y<)Sn@9i#oFw7>GB<1C4e*jxEdHtqCK&W52x4MEA`j zn|{Bs!Ehf7q#3eyS{)_6-ToOCoE1|3t1>ZiunycB{s@|CJ39R|=Wm$=S!2s1trw%I zqiPMVN~a+^=lz?F(udCO&;6HyhonfZqE;zp)0+yPPNKz!15fooPt!ao?0*1s|CfpR z{|I0IKS9XoF z^AKQiBG?Rt-Tn>0o^+{lpXB9!=XC=f_J*DaF_|U*DeItEjH(*HKe?#6|6I}no_Pf=&s5H@Q*3Q?Xu5T5ptO)FX+cw%Y;%*SFsq29Z;zgZ3Qh z$?bcbj95PtjVh@JFIPfG-VTVJ5H8Vz=gwS@>ktx+%aaG#S&yJN`~GvMAloFk?;d%^ z<6E{Jd>a*s04VGLk@?HFr09|ls$5@lGc<0gC?zMB3|X#2gwL8hXxs86IfN#tvrHIr z5R4FefONs+OKP_Y&DH0ivCm)<5R{YyY}i@&p!S;+7Is+dG;6@Qd5K? zB;tmU>V0Ye+6O!lz)_dR5%Hk~=5|Mxdb$YN+AU%9D%HrmDcTj%AyxPSfC_b9xLwNhY{rPWd zLLPts@PQAg3Mk)y<3yAEKb-Ra4sl*KTM`+_v3Gf|StJq2@kD2H?u+At3^U>YYb@f) zm2%~&^thMF2NyQ=*q8#_JTGe@cZ({Hzfb&pncN~(n&p-Z+zY1*V4~fwj z515Y7d$?v3np7O0=A@l@)qmh1(osU=pN|R8*W~y-nk^Z!L*QF!-^n;dLe_D_4%rBX67D_)be~82r1~SA03SiqO znD(sn=dmfy-^5%Szd-%r^2CA? zQ_`ZKJXzj&_)bUeZ;{^2pHydIiz*!j)o4<0&AB!1Mg%IFTAr0=pxmm61&wQ|-hNoP_PYqj1WJG0TXMjc#5gE`24xAi z3je#ImTv|Vr=_>N10Ly)Fq^-|Mzd`|0b{uxH6&?MqoPYh4pCID)ipLp_@vM_{SQ%M ziV`0O6~=^~8jj_gNYt$P-r8YCfU~>Yz>i*jkWwvC>oHVm|Km{QBFUX;`R&tyvpLRd zmGO<<)O+r`JD9Y3yi80`y^gTGY8_n*toJe1?MOzlV9=(YpQYuw)587d-FnZU8U;Kk zszL_#&kBc8bO^A0D`?kV^P+Y9G1PEi4t@bF`^Q5LmVOrFLLCnIqjl8{%D3d;I=V2T z&4@gUIwPR>$pjnoG=uFx3aQrRFBd?=tn0g1o(qtc917cV&O4J3A)q&22R-LYv8PYA z$*1cn7NNY3E*+iC*Q`c?-e=oG(9^ES;@W4S1P$vB?ZD)zqDLvL7%-aM!p`EE zD=7}>oj&=Hei6Gl>L@6o?$rbyW$98%G*SJU5sl#IQxe35Cc9feN(F#^MiRGE*9}%y zDBB!ryGKujCgi|-Mo`-+S1~0oJ(160*4LZOLq}L}xdj%Gdyg=X)mo_?EG&^ZWiHvP zzX+u{ZWi`k4=c~QQ57}YW^^QkI9EEDEFkdCNGJ_%bf?CU{MFU%YzZ)*N zOCWg+&Aky!xxCsIx)@9@5Psc>qq1JI%i~SOr2fa#=E@2gcKvSWy<`iIK$nA9-@}Y7i)s+&sd+Qqy(15H;oZsE#FzU6UB^zF5})qOt!T$l7ylRso>#lxtc`f>H|nty zY_(*Hy17qa;;f$lI-EEe>JQ5Hz(i(HKZCH=kQsMn zT*wEUGrdwUtF#w2v3(5=#h&nIz*-S3JicVDEoEkNc3cTf^>ORs$Lye#8^?){`y$4g zpVr(gKV0+NZuO3Mb%yX>hc~rcyDjWo%d2>-TasAzaxUWD_EbB`({uZ6{WCWXep3z) zI_(%TbQr&E>-1L!&;a<%{;7YBQ@tsQy*@#ScJnj^WN>Hec4^AWSa5^>0NoGZ%M92U zq(9coHn`~5U4A{`OSsq#c~4&SSfZn4#i6?GSFgryfZd&W=HYlTylQIxS6s6#1z=SV z+w$wcU$@6ID*~~z-4=Z-(fP2NB*=k5s@fO|_i~O1xt0ddSEHgbA$;C#h~yuW+Wr$i zjG73~>*1)XDlUqzmTBjLaZPzfQw9Z*x~@-4tK5luY%S&c>WGeHikvzL5;KI7^muj$ zuUtn*Cuo1OxqgQSbu!wEkk0-y6kwctYOG?2bvI19MS7?rl1UwSfl!I@7)Jca-e1^b zOP27A2Z1N(wLDbL=`))3=>1vMBKsm@J;H4-;LO8~!y5mDhM=zhF?2v98(?h3%ED)N{!+uPgQDv4Q6NLEOATMf)yTsly!@j8~eE5`{bO@R6lFEmNFu7#>aFtIz3=`CR$8@Msm#l^E zJZ&G!QM`sSy{H(Aoi)GI^|w?*SgCdw=XTk$xl{>-yhxv~a_+NhYV-T)XVR>rly3S7 zqq)OkT^!2F^yc&!hH^YxkkhcieUu6|5{J>Ml z1d}Oe(U!NXTNyU%|Cm=tbPUt5iBI+=Mshn$dKUFML<^0MjM!*Q#_h#{h$GsI48`^^ z3Bn(yW8Ug@abBDR@ev97=t8^z>)XN{0Yq+hK(pCI^cFd@yh*LZU@xF;{nqRFbo!ykQn{}LO=XxjN>MN~wuLnTt z$httKkf)q(Sm`?byLYEVeZqWBEU(pg?$!KRF4-ir?`Is$W`&F^x|;7jRfY+BRJdE~ z*Om*J5fZqoa&!pUw$x zGCoIAI!LDfxkCS@!7_H%WWq%ClTbmiJ#syRh>(u4@2aMRxq|scd8C9__qv;GgtE4O z5bZNR58odSZvM+5sf{7Swz&-z&_{oncu=s}6;WJM?bm4AMJ|>6cxvsnUw%HXFWBSz z<+JGXP6n|n4luo%+#5skS7TwOd&>+V6YI+)<$9c)C{%3R3 zSBk>?(A66nPNU?>sq6{U8}(N2pDFzMD`l3@r8{Np51jGpl=69hahGn@5Z~Ud)6LO9 zm#7Rb=4bhrlXvq|0?XmN`@0HVKP%L|L`Q4}U``q2y*GQ8E45j<%EYKy8Hd=UX8~0u zydpOrhpVq^w$i;$C8E0-mh*e>4PGXG974_F#&~(*v!Q*p9~7{RdUu2Z%!WMYg=kqu zq{S3w-s?RBnynQfKG`s{3a}=7)@|s+^Jb1M6cW@W`P-1Xe>q!3sNJ7p%Y`*HRZ)uI zTd!rXe8p^iioPzzfJJLOW+iYIZnjMp#0RUqDKg3Dh}_*n#ba-&TpJNXc}nZ>qh9nTU?67?YAa6YM!25)wdKr^J5>uMz8-k>NBDN;yZYLg+L~zoyPTAE zMOHSu`R^WIp(Hf{q;}xT4zi#5uILw|YMzc?Mx7cj=q8=Qp81yhD9bwk*{7l*RjTu3 zmCfDwzJaQGsCmjAtSMJ)jIH|xXEeJeg4d87;PpN6Ag`hOQ3WFKzrKFWPo4X`_d&pP zw%@?_mC+6P_{fLV^bsd$Ew+VaeFLwf2EL~7ub^4sp~sor;=C1%n}U)<`#omd$=g_s zGqvb%i%Jehx2r#1EZ^m;6|S@8I8xiBQr2w>P5G@MXaLgwO~}DmRuboNE&l%`?qLPf$C5kX#PnU^)&HQZg4iFe zB{uZ(el-prL3@D#${2WlT8bs;q?P4}0L zYgL~QVL27Bm}6n1C(eBC{JkIu=Y4H?x>6cbgRx3t;puOd|~nJ0ZtWnBmdm zBoS;GL^T-~r5<+Fj$-&0jWn4x;#y=uOtTC(HBexwKb2TW(}?x^& zTmg^+`(HIRf~KC3aA2e5dMrLUxh}R7u|G5e@6@}W6Ah7>>8ENsJ}tO#`cj8_{bBQx z0rk4yD=#>(%lsyTh6!BU_Y~eUB7-=zG+et+y#L%xyfFvv%|Jh{-6s<8p2mjKzH;<6 z#X-#?tI#TA%;2fmi0IZ+i_cXfb>;@b_*@&ng{c@G3M9OD{WIMN#&E1ZiUPh7()2Os zrej?zGiX9eOQ?RHXfqDXCzI#@5a`Rd??JNT+NsqqofvEM3b7>8!^YoKVj_<`wD-_r z;6|9GYcwk;DKv@=hQim+!aN$F4hv|v%iTC`m?tj_$cT~lnm}5d)IY97bw`-(u}9-6 z;B9e#G9R#&YznO$vEp|Jq?Sw8f(hwUMgM@U~K0N}!k-17d@%BF4!SJL-z zySEc~(7H-3V&ld+dL@PKORn$f_26nmPG%+PrpR$*vnD3=&>qCgRx_&GwI@i19DkNb{xuX3aY~gyK6rOt|BCPaoo6#3+l@18Tu{9~^*J_9q0C=0JRpc66n1 zv-;|wxMtthn<)n0t<>9HyAsg6-gd^P@&*O%T$z^1%^p?{?$|%^6>LSQCXTg5K!JDDuRktjo!LdX&U&~|=|^FOxq*g4BX#5%=YlTQ-U{ z8nRV#aiCd!?xSSZ^?3>z@5LHdrwX+~hVLZ2oYjmxiTt)WXaXN!;Wh0swacb)EzPjCO2xbG|~cZ%33^q1`#t#+SbDct|2u<2L`7=H4KgLI=3(lK-lh=}y5gbdx?-Jq0o=P;zy44uOeb8oz^b+7x)z5dU$ zo;MFKhP`$ldmsC#-{<>1{Qr=|I48y~H-*r@U(C=HFO>H~TD;_9ty41nUC>Q$@YA!m zT#Hp9jnc8ErFf8;T3O zvacT9`H>L*Wd7B17#uzq3fUun(aDDKGzW@O2#PNDQ52VypBkfDaMi+3VW)$Iz5V+& z{0sR77zJVbB7EV)wbrOs?7&27q|J)t6A_*oLr()U_B%kEub4esn>TJ&99R8{b}xNu z;I8T3vde?Ddz4g?>!ZV3A(nCI+{FPOFn%COF3Fco@h?Owj{e8J7u8AK#wvp zJH(YB@rLyKZ*8IPm$g|(bZ^fHPPE?wD_V{4TLWZ?=!1cHgdFq(wV$sI;0t7_@)CAYBa^;IZ|SV{T?E#q2=EfL@sx9tB1h+ zwvKowRf!q3a!h`6|65iNfP6iXF|AcMTxSW1mWgKTR9e?Z7gIzgRDX z0~KEU-43EVsE;a2{Pm6#kIy%U3cu?CRN+8g>iNDyNlF3~E*IH+!o&Fj9}wh$p!}r9 zW+8jS=~E`HACq;quuhfM^zkE!{6azSRIo@MkAA%xU+o_Y9egrhG@H5+X%W8eMfHAS zwMut-$mjj>%i&{An+G%UT0uX5X8B6)W@yfT>F}{IhF+|_Vp)O8qif6`JfwBAS%&sz zJ-P`AJw7&c9_=q1%&Hcwir2pTg!e`=|GQ$>2bbAcgB-0WNK(G>E&b=cO$3VdHz8n^ z@zpEc?`}C$3NuuZWa?|kqm0eLeO)AArK!?M94cZ%A-5kVYSf0T5G~11KXSY;i1eY^ zGZOIRGOf|S{rS)`dpkaa zfY^Y{M&_$E6ovWtW9}LP-mL2pq=+~Ey=mWYSWA5gz%AMD^0)@2is?%)DR2b>4X6Q| zuI(ojT5+W}EU8%eiWE|M{_URAlOcBseYO&#aQxDo?QzYg#E+azc@xD&$J5qdzSIal zd>^MXU1=wyQN*RNxqo-3K{#cMZX1v*&^RS@@{Vx464^x{^qP8NJhgKPyk1IFsnPwElX z8z}ASI&P!0@5wUz2TA6+qA70NU90<5u6Uh^lU0Kr9h+}L8djBfTx4;1@@zuUiOkxo zgP*Vd)PqA1CmRNgnU>F8ymU42-v#oc^Z)rcN13W>lk#st&*n!?AcymO$Y$rKag<;Q zSvL*)O8WkR_2=HK0#B@cBAR0DXy_?}{?*eJ{qdAHI?}j_jfp{}FGV&zuRe%8f6}n# zUG=YxXt^*w+W31R$5(1+Atu@qn32fMz5a3&q~RTzeNUu>?NhXzq61mjp9!l@@sV!BEwUlk zIG$bYstYF#1rlURj9BviKZpAG)QXQ$r)fF265iw{OZ+oZp1@qO@KHw;u8`At#L!tt zBc-T_@eDwb~O zf6Z&J4RA%q7B)qRIK1Oa&*fmPjDPA9$x;7l3>&=~iP-F%?=-%Ig@s`&AEXoB)0%7w z39ZJX^`<1c{N1sfG4&W4WX-7~va94M=Q9UgJvHokBwTU8SnUHoOSz;OGGH6Z4pxz>)7#b&xc(>LqsImvg zCsDbmrtYhlZ5OUE{k$bxqHLk(JcM_T6ZH1?L+eN6cib#TCuQvB0*9eU8~8^*?qez2 zY@2dzx#vID;q)hZiYMcuOF$e~bIcpWWydv;FQgyOWXx9~^q&Qd6sY!S4_1+$5^T*YjA@cD~A`=ai?f?K-MT z_RQ32C~A$}8;My+S1tfl79xubi`1#W{`#kZg?ReVVszq~E04W-{bNc7>jMj(((un$+RIg;HEZ9^Ql>bVGWp4c8!fN}^gT~k7S((hL>&NYO@8GSP zR+xp8c>1?zJ`2f}La!Y{33o3vuEbWIf3JvCSf1?Yu}+-~fF%M-zr|-==Q@afqYay0 zG{c|EnIhTj368FIwvN|-jK zdi)GF&>6es`jH`{F}qG$2aZ&eCj)DqhkuwaS6tR-EoM)`hi%ePSyyPnkL2(kKb{7g`rzK^aEz}&k=PZV#q z!I#_k{eIOf4f$OD3}jeitEB_O;TKLfrNM~x|ESPS1N#u^G-Jn;kEKA0#Q#4Sk3WEs zDFG@n{)(hV2}l9(goZ{y(#+vIP@!8gMtimip8H1I`7M9`p(=gA=L^g2>wTI^0grvC zBDjkT75MDH5dIb3;zp;8kx{Tp?Trx)IRK2&<1PB$lm^v1?hS>qNq##I&cHx7$Q1|y zsOxxd+}>m-{k4i0RA&hQqKvR&BL7T!pyMrLi9i2R$Fo-qamNYLH&b;dw6N|5T;1?6WC>N zD)=l-y*+$p&5m(2g8u+CVg9>KD_|nqPWA@HZg=Z1FWkdVg9EZsPp|JGgrM+3XF3V6 zVwJu=C%=t6)Q0pgm*GDd39}*he+91hkBfw%iTpFX>+gW*cbi*Tq*YB|BbdIum6rgx z=Kl4&^1o&)z)S@GfGxcL*DL=A^yn|;vo8gn*A^fB)dIY+p#MTY42g_D{ZDWMH#5Zj zUmUpLXGrAOEqaiO4=y9R#V;ch83Ui%rIZtj{{; z&-wM!CR@l$m%#8W0Zu`=Kq#E_rRv@*xHoA_O7LG_Y&qTgi_T9_ZU21 zO;8lB19*KddMSn2W)bb+)@kNh zBqfUp*6Q?^SRoH&M^_QzXm%2!*b2W@dwn z`ulbeJiyhD*0Zam_n;2o@29a%2jGS#3EJY|EM(Uo$Swb%2r~ajQAnA2+~*uekj(O< zf;d)k`kU3BcRA>Ph>l^f&*25{6Z}XCr9t^TX5&>DW`hOZ3VsnFutN{D?=Y|=*ByNb ztAj50EU4jwIYnv_h(~+^HvBd-<3eC^H+&9m5`nDHg(LnVjV5>in3Rg_BvS^lzePmU zU!{LX_9U_Y56O)Gf zjRvk$59V7)8LQvdUx|mt*O#6v0WqKl%3+@~9VZ^T&o`sqAD;>i`QLbToivaG!Xl6i zN6mY|Q|@f7h~`+SXA`tZr{s)DM#n>l@t=C`(Gqb}*HjLO%JkuxPWyT~n#vSto4Ip6 ztuEn@I6f1?=4R{@wV&Y!1fE!6-Q`v_xpqzRSIt&%mzVmuY3Ixvbc5Y^_~QTsOpQGn zptDakQV>DfgmNu47^Qg$hUVN%{E~|8t{?h68xVUql`#7w)lsO)6xXbI-_Sl6k)Iz( zXq#W`$9e2h6Q;QddoP)clG4RR7B7P(H~7Xtr1LD<`sB|nUK$;N&aTpw=9&H&{{+)V zTDPc=iugNZq{?SLEw!a1^zCOtkItkwJIl^Z<&k<6w1;Q2H*!s!ul`Ki?+c!GD^ z(On&}yFY8Nm19lmez<}KuyBKv1>u?2`3e>kmw#Ag7UoBnxUj*bNiYn_3Yui?eT3*$ zDTx51K?mKITeajJ?^N_8;HN^r#Ub4K_Bkd|wlje{2j0kS!XXTDVqLtRUyv;*Jm8aA zE8@wZ+G%Ctd|>gAdjyq(Z)58q#aiswtVCW}sY~=;@A|sK!|RLx%KlEt&NGu*=oNgK z%$i6K$o8PJyvcBG!3g(WeVD_*KN%HlMmBVR=Il>91h$O_7-t-(k-46NWnhj=CCV?j zC;GPaiP`BJJqa~0ZpJ|q?Q_Dpnp;9S*d1l9s@D#gZ>6#uo*VdNaSu##TL`ui8uTYS zbl4$7RU?yqXRV7Cc(IGf$*`TKE7T4NMevouJd+#DP2}lE&V+ekH>5}Q4(J4NAJBiZ zl{pk@FOH2;rK8yEpyUndR@L3#K`^8?AB|e#95*|Ow{FBSfBRhzyx4^boG zOY2FUw~(=+JHtByTz+SkmV~!Ik4x7Lo{7(1d~r*_N=-a8JyFVJ6wHyev;zZnzAaZ8 zSELL9!iC3LLROPb+=X5i#cX27zx7Y;YuttE2@R5Q#-ew`)GW@ua?u|TMVZC0vrBHR zdP|A%!_o6QftI&l9nre8#M`H0FdHRG-d8JiwaCkvw?nuBLv`1M=~KU)=F3X{Se3#3 zS_;54`J?@6x!jWDs#F;_7`UlTt-l;$KOT2qP7No_1~9OY*HJ0L`x?rMvqF3T%Ja3? zdu~?FhtCT12k7bwdi7mn9VGvJ6L>>5P84&h_}Pt6va2-7+35;6o^a{eV`# zr&;;d+fQ3AoPXDTAn`xu@E_Ru=~Zfe35z+3j~i9+X*rn6Kg2!Yr~oaCz7@V^3omtR zU5AQBAb55_n>s+yK}7@Kzh?*qkOl6G83yqDFAwm(14>(wzXY+HeRn9*6EXq-^lOu z_UccBHTxdYio0Ha2B#W!&Bf0$EC{FKIDhTrf1#G9m>)f{&<15^WK*LlXQJK7Jq4Tu zZEL4(x4x$P!}_kcnxTu%dbl&Rm?F6$Z1?IffIq^Je=mVIimUfoQ5FNKW{8=rAe zw+B%|7hH%~Ej1RJz0%|Z;tVvb zEe66VDTw+YYK-cQd}SUr4J37L)9;c|?rQHtc9W@n;0w&E0m zt+Hf+j^});t*o5E{k-CrQv*3t4=$}=hSz(%YS{UWhB(5eO~Mlz$(b4H=r9)>C7-Vl z2ZmRNnW#FtwROdLENVB^OEUI*_W0p$s}pp}58>Rft-T=d_ilyMu81-F*S0pv+}gbk>_c1{ibOMs z-BwAjTBvgRX!)%+aI{`{FAe7t0uEW-Gpeh=dptky=V<958=;mYmddLI3?3EH=O~w`U3W4tLdk^l{k8B2r(9KQ z3yin=nb)4V2J)ENsJHb(>qH=B&i(q++st1h$Zm_0_tUh$KJO{#-6qV?ehq|o0FbLZp=w6V++r15*rYHdk52ty0(H2B>wiN49kav_~jbDLlTWyMtNMw zkZEoGaaD(N(?#RQ0&(OB=ef4Q@}Dh%mKE80ZD*bWnw3kXm&{a&BErHoE~ch?S$?9Y z)orG;?27+5JqfU}Z{~g<@ID}~D7n9p+?b9*Y*EMsj=+8KBj-u76`G}QVWd06#2s2< zZ+7#ra2b_*pZNPVi~YF7|3S!m&Oq4K=x$5~D>#H%>!~*Zx%R>u0 z6|$ZL{Jyrc5a@oeEmW$OC@w*o2Ab>0adSU$d|(g z++(mLoyAjhn9V&2er_U9972V1`-1%;{+%nVq;D9q!ocFDO7O=?4;MbHVpEnff8=k~ zP!QB#q(kaYc$BL#lTFfj^XnVB%UB!n+D2qRO!b`%t4Kf3iY!q(*Ct$R^Ep|>io&5p zi$n9anWpKk*jdo2itHa{(m-KeB^R<4y>SIqfylY^#Ct|@rpp*F@KB8vaxROl2%q+g$S3Qu;c<)_&x~&-%<&H>h0j?l> zx_nEXe?irPOS!PHvEYwW{uy}ro;sp@}g$^GK{slSMl|p{+wn~P=J?Oj-seO z$+4!SKeiaB9JdiVK;6y-jUX+Eh;3TM=~48)@6MKHd(`rVE>w9ph!>sntGwJVr4(0S zl33F$=&1}f0GsiA``O_#*mvn-F1W=jws5n0imRKVcKcB1D9L}4V=+*Qd62{1Qm$fV zIpqR&-S-&M#h;4@m|Bmr^Kr-8#%_8*(`j&Vu)m2eftq>DNs@rhIKvBPF0}B92;5kc z;%C0j_lkc4=YC@h0bm`knfXKNKHpj`p2#3}9SRgw|?*jduZbk<;^r z{4IHJ_eSeSHJo^&<3@T-CFn~YtYW{z`)DQdpWl0aS1&h)XjJBbS8RS$s2?i*rT*YX zdxA{cm4~qkm6*kmNDvvP%9mhUhXQIu($gdMEJo{CS?+at(d0yi)ZXl73qZ5f<<;tJ z3hA6H`FL|NL3Mrxf6BoJO>@_zY5;?vJ#r6!D@CYknOD6-kXe4f{w{E_m+u|XZ_Noxh78ZWvRu8=KY|ZaOY3Yk zUtN*}G~H2Bx%u&U{cO|~xz3vKY;)s$ZzdDEJ(`?3)pBQP``mduW3bGdZ6f*zU9vF) z69sN3pr;R48@;l;dzQLs0nvvyD?I96_;zTlBwY>MId zLJ)jIW#ij9v^bB`8^xdrSGC15O@daCA3cl@`6+=PIsE$e4|1i7TJ=PIxv#vgmI&Ep z&}-QT#qGH){i%{p`)ya0nJ3&P$-VH!7^1WsJS)6KUS<4vi7AtVOy4EUJ^|qI<;Jun zmF{Y5YX}O)Kx49#0$>WkuwS80Lh)q_8rTS3vhL29c*p&Cz2AC#!iP$NQRw07D=U$b z%xJ}D#tUV9KteWSex&-Ct%lb_RBUpBSKxpS1kLl=*tuiFc*$ym2BfDnv%sWVNLJ^EXEr+pIj5 zHv6M8cl?8QO~yv-X%_>_N9pExOZE~ilvq)HP zo1TZ-tXyqpZmNVIK;6rM?^>b4w`K3YYog$C`_b628zA~Lm!u$_nV;o}=Ca5aGb%uA zUP8>e&+1+dPb%E1cBiUD^T!KiHpVE9WG|Ww(FwJn#h9y`A@sg(iM2%IkUfFiJlX&s zH{9&dv88M`DWtpEq6l|-&^b&>%F^O@)it`fcSXsboKbnIaW=f$fo>J?-IJ?xKN=4p z*O}Kl?||Zqe=ONMjy-3|+G%Zc)|4~od$arLOxSaqGE5Kb*jUQ@m2|NFN|mAt&0M!^sUi5x71=&i z{%dXKl+53$c&x~Aq_clb&HDw?Io81y;Ht(?n_=ioifO<{%`?s{FJxL#d^N^Wl+3lB zyz!=xN26gW;nrU1dN&@#ou*=p^Oz;0>;EZ|2z!qc&%@i8@)FqpW222Z*$ix*To(}+ z+@6WvH1Lj^e>1=tHnv&IfsH=vCC4m*Uvk_HOv4-hF(;|-+E-|yKA%&9w%CA`(-Yq* zYUyM-xyAO28iYxY=g%b%Dygc%c*ygma-%M=y~l^Gj^s@hxQ-z^ z*J3CDP(rR*0B+<4g8VX8k^KteMOUCYA>s8=$y&HGt0IDisdU|KweU%(I?heS_@eY6)qooFf`t>T4VrYLG z`il3jofEeGV|GmD&ruM={z}K)e7_}z($C0k_s0kmFmR);grLZ4b-#f<5c-gR)Ca6y z!&7q}s^*-A60_2VU;O1oD@b|6x zgzz9Q8hvGK6jsE_xa3A4T?_hR%Wdy-BsQOdhtu8=Mz z;=3NzcdnX5W>oHM64aGR2>W>EfLLGPH^|+p_ZzV;w!5~xxSRGgdVXc!#KxUa+Gb&& zpqari21O%zDf4=u)KVIAMkr04DksLiQC~03_WIRh=-{;Q4MPO&_ zO>i&eFXRIt=hIGS2WmjZMnEfivnqL37cy>er*`d6rxOQ!yZ^IxQwun1Vqg?#JS{$X zub)hjtaM0l%;2W^$FZ+sGMyE5!vr%i^rY3;mZ9e%*b@_;A?c}{FJFx|y$B9Bi5pH;V z^c*}d6N38R`Yw;gxc~!6?cp=NGT|E;7CtJ?kg`+eyh>&Efi7k<;qSGX;p;VD{M+j_ zLHFM0ePYkFZRiYy(q`xYCL^k1jM?LtCb|X=#}!SjK4{uF$`ZrTLa}}WKfx2HBkor0 zYU2{TrxGSw&?si23>&%auf4G#>1BM1IlQje>RZS;Gc}CECV|Er@+duiz^ zi{L@eMSC!LR2=UYQapU1CjF+BK2F(Q4~M9Rs7YY*;WsvL7K>x5t#?b^RUkg|5Vapg zy}RxM0M`aIr#&V699>*8D8fcHxFPAM690^ZJHZz)XfIR0m(YK#Jqa~DXwv56Bz~*C zo+Ms*tKJ0?s25=-n(@jPSo!+#Wi+vCqDU_gXrwQRpuGw+e^uz^i`0Y)1 zW_zTc05vf2Q7+jsL(<ntG7-9Plwmrjvy;eD>zS#6&A2M z&9gcVM-&nt+~k}(W5-y0Z1aJ-BNZ$w%oA)$1ThduPd&#fnlNe-%{CMWVI~;9H~lk~ zo3OwGTKM;hf#v+f@!P=m_V$@A#~RF4Nab~$9sxQXisH6sfjThPISv`Nw)jvBsuq zX7z5_!h1<;VCO)8OJ5+J#DHkw)z1mJ;exsR=}eWE?BgPpc!Ipq>rgiJbj@9Yb=1n1 zwKie8nyrf zx43ss9w>$b1+M;>#V;bIv9G88oSAf^;R7o7msC}xFb)nTQq-k5>Ph%B(0-^)naJ@E z%wL;Me0wfrFkziGw!U6K%b9U z4@I5eX|t?69j~oe`;tHSEPeA?-h(8Ggowy7qVo3)-|HqVy~$!NKWp_940u@I+Y;#F zvgC^wd`l~fB(W}xbG&Q(yjJCspW^q+9;c-I4^KtQV zY*IB~%(98Lj75;QG)8qD9VupfOPn_~-XIjw2930LXd-N=TKA;XYi2X=vKg?178@?I za=EkF*+b$4{l%fH7w%aelmo&|mZ@KWIZr4fVabn|v>|t6~i|f)D=|6hDAn zl_tXe7w9VgQG#jTfQ8P-`+hT zPt=osvs0Gn-B^woFh7XUw{DvG{XyQ#*bn-A*R#gh6#XSE`qE%T1kGe*Vfak}-F_6H zZZ@n(&LN=~Ec?;d!rnq+cXf(#W{{wFg|bJ^wR{?tqm$J0r`ft-97SyvqUE}ChEl6i z$<{p{7xO&(E@eKiX>k-9{Sv1Dw=d+PbRrsjQpLlY{L#Grv)48_SA-~^hWH5BMsZ-mB{QYLJv%ZtbW2SjH7ROVq|p6i-k6O?imhGGGRkjmLQV$xX;`2y z|G4`3y+X76^wh11P_JzHPTO*>mkUZEb6X4KOTm5Frz|(83QpPk+==1i)CxUwm$}5sSum2g@K877Gd#IdXPWoW0i1~|lx6J;NE}Q^-*j8Z_w;L5p za=@?GZEd2B<}f(zY^wU&6%>{olJ*{N)#6l|cCMc@NPD-sqqg#l4Jl?-w?EP8FzmvO zQbUrc6G*@A7Pfggdps~X&tuuWCiNrQ#=)2C_K@lV?+^U^TrxI(_%RFFmnGzQ|3x;X z<-~~4ZfSGQN)9L`+5nASg1d9J50Zt*eFS}vI#|e~jQ^!2uOo!wHM#5)dUJ%lre%)d z$sP*bJMr~Q?=imiORV9Ah%W3YnhR+H%J^+TK`ka~^lD=!MjqcE);t6Tt4;$qP$X%1yVvx& z`^rp%T?dO1k)Zp@Mef>ieUq%)#i^ORa+4Ka+p-&$HZQ7SyjD6~szPig#)?;C zr~tLgg}l|RxoN2D^Mx|k!1W(5yIVakSDj~-(AVKgd<(17Sob4s$K^e2JMY+8Sc6oW ztM`YY&Ba-Anaj}0l+2$$8FWni>V1~O5~o<2P+w|GQ>AJ}ONvS&O?=N-XWIb4*zn~X z8N`wEv}*mJH}Mpb#Ob?IQ;a&jY_Q|S8c2>*E4VCJEEn;I+ zQ1ud3VbyZ)aGMKsD1H!#>h~zuO~@yR$X8aGBXgLC*=&TpppYweRdSs$>YZ;9sS!Rb zFy0iasl%+LwTSdtN`|b7@wDP5*>k)XXTJ%xP726RQ7_Gp$gnGmx@j$PCXqU*k}CT+ zwI$$;Ha-XE!mLh!5oMy&rem>diyFK|cZWlO&hPCzCvNs=@=tAp;It`wAr5fgU$lwv zj=0N%&%07*n;5^{6wBpK<1p47V&nS^Wb_{JeeO>-ZfdF4Hms3)^M%U^E&dxBmt~$; z+f32NIA%%q1i}Ut;R`cwspf2qu#jZe_Ak|IIKq(~oQnrzw5`eZC5DBy#f*5Z_M#Z~ zsr{EwM1!cRpJe2U{g?#igh(xXh_qSixd{u6!7f45em_(tjJM71lx6PD59FZMU>xGi z$R#1+#z3baLD{eiLIHU;yt!?lYL7^2i^JeSC z2{>O6+Nee7FLL8}j(;EIQT`wJ^8DAbCv}AHKUj~X3_!)%$c@`J(EU3L1m0rpZT}Ko zX5DG?Gl#TTW5_kAqiO=t+^y{E^0$O3UK=Ie0aE_~^d}G6N)-Gn@%#SE9qQqwamMiu z?Ifr#2F(QM+#rw!@uwdQ01TuD{Nr?6oX^IL?gE4*YB+oVfQ^8jNu&_Ymc}N7TmNAX zCG7ylIXb{daqBKkd+Rj_SI6`hJiM;JJ)OHiYsO#haEcCKaqtE%E>~x$uT#=? zyAM`^LG6~B=$#$6PCpSQW7;!_uhhs zZQW|U7oXfLenExbrfLB35f{d&@7Lm!z(5$ypJ{2jC4e(fH5QkWgEt-N-|l^VGJeuR z3~dC?9!BsKMUY3ixcItv7hJz*G*-U#WMyGY3s||gxPre+27-g8mL9uKNjPSyRDh0M z`uciYs?i%M&w%e>TODxoyz)kY%1*qy*Ar5>DV=!tVor;%A06Pv|M%bZn+5(tT>k?@ t_ - -### Using etcd as service discovery example - -1. Start etcd service. - -2. Modify dtm configuration file. - -```yaml -MicroService: - Driver: 'dtm-driver-sponge' - Target: 'etcd://127.0.0.1:2379/dtmservice' - EndPoint: 'grpc://127.0.0.1:36790' -``` - -3. Start dtm service. - -```bash -dmt -c conf.yml -``` - -4. Modify transfer configuration. - -Open the configuration file `configs/transfer.yml`, modify the app and grpcClient fields `registryDiscoveryType` value, indicating the use of etcd as service registration and discovery. - -```yaml -app: - registryDiscoveryType: "etcd" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used - -grpcClient: - - name: "dtmservice" # dtm service name, used for service discovery - registryDiscoveryType: "etcd" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port - host: "127.0.0.1" # dtm service address, used for direct connection - port: 36790 # dtm service port - -etcd: - addrs: ["127.0.0.1:2379"] -``` - -5. Start transfer service - -```bash -# Compile and run the service -make run -``` - -6. Test - -Open the generated grpc client test code `internal/service/transfer_client_test.go`, test before filling in the parameters, example - -```go - { - name: "Transfer", - fn: func() (interface{}, error) { - // todo type in the parameters to test - req := &transferV1.TransferRequest{ - Amount: 100, - FromUserId: 1, - ToUserId: 2, - } - return cli.Transfer(ctx, req) - }, - wantErr: false, - }, -``` - -Open a new terminal, switch to the directory `internal/service`, execute the command to test: - -```bash -go test -run Test_service_transfer_methods/Transfer -``` - -> Note: If using `Goland IDE` to open the code `internal/service/transfer_client_test.go`, click the green button on the left to test. - -
- -### Using consul as service discovery - -Same as the 6 steps above, replace `etcd` with `consul`, restart dtm and transfer services. - -### Using nacos as service discovery - -Same as the 6 steps above, replace `etcd` with `nacos`, restart dtm and transfer services. - -Note: The namespaceID in the dtm and transfer configuration files must be the same, the default namespaceID is `public`, if specifying other values, must modify both. diff --git a/b_sponge-dtm-msg/api/transfer/v1/transfer.pb.go b/b_sponge-dtm-msg/api/transfer/v1/transfer.pb.go deleted file mode 100644 index 954eb29..0000000 --- a/b_sponge-dtm-msg/api/transfer/v1/transfer.pb.go +++ /dev/null @@ -1,493 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.28.0 -// protoc v3.20.1 -// source: api/transfer/v1/transfer.proto - -package v1 - -import ( - - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type TransferRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Amount uint32 `protobuf:"varint,1,opt,name=Amount,proto3" json:"Amount"` - FromUserId uint64 `protobuf:"varint,2,opt,name=FromUserId,proto3" json:"FromUserId"` - ToUserId uint64 `protobuf:"varint,3,opt,name=ToUserId,proto3" json:"ToUserId"` -} - -func (x *TransferRequest) Reset() { - *x = TransferRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TransferRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TransferRequest) ProtoMessage() {} - -func (x *TransferRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TransferRequest.ProtoReflect.Descriptor instead. -func (*TransferRequest) Descriptor() ([]byte, []int) { - return file_api_transfer_v1_transfer_proto_rawDescGZIP(), []int{0} -} - -func (x *TransferRequest) GetAmount() uint32 { - if x != nil { - return x.Amount - } - return 0 -} - -func (x *TransferRequest) GetFromUserId() uint64 { - if x != nil { - return x.FromUserId - } - return 0 -} - -func (x *TransferRequest) GetToUserId() uint64 { - if x != nil { - return x.ToUserId - } - return 0 -} - -type TransferReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *TransferReply) Reset() { - *x = TransferReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TransferReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TransferReply) ProtoMessage() {} - -func (x *TransferReply) ProtoReflect() protoreflect.Message { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TransferReply.ProtoReflect.Descriptor instead. -func (*TransferReply) Descriptor() ([]byte, []int) { - return file_api_transfer_v1_transfer_proto_rawDescGZIP(), []int{1} -} - -type TransOutRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Amount uint32 `protobuf:"varint,1,opt,name=Amount,proto3" json:"Amount"` - UserId uint64 `protobuf:"varint,2,opt,name=UserId,proto3" json:"UserId"` -} - -func (x *TransOutRequest) Reset() { - *x = TransOutRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TransOutRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TransOutRequest) ProtoMessage() {} - -func (x *TransOutRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TransOutRequest.ProtoReflect.Descriptor instead. -func (*TransOutRequest) Descriptor() ([]byte, []int) { - return file_api_transfer_v1_transfer_proto_rawDescGZIP(), []int{2} -} - -func (x *TransOutRequest) GetAmount() uint32 { - if x != nil { - return x.Amount - } - return 0 -} - -func (x *TransOutRequest) GetUserId() uint64 { - if x != nil { - return x.UserId - } - return 0 -} - -type TransOutReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *TransOutReply) Reset() { - *x = TransOutReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TransOutReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TransOutReply) ProtoMessage() {} - -func (x *TransOutReply) ProtoReflect() protoreflect.Message { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TransOutReply.ProtoReflect.Descriptor instead. -func (*TransOutReply) Descriptor() ([]byte, []int) { - return file_api_transfer_v1_transfer_proto_rawDescGZIP(), []int{3} -} - -type TransInRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Amount uint32 `protobuf:"varint,1,opt,name=Amount,proto3" json:"Amount"` - UserId uint64 `protobuf:"varint,2,opt,name=UserId,proto3" json:"UserId"` -} - -func (x *TransInRequest) Reset() { - *x = TransInRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TransInRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TransInRequest) ProtoMessage() {} - -func (x *TransInRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TransInRequest.ProtoReflect.Descriptor instead. -func (*TransInRequest) Descriptor() ([]byte, []int) { - return file_api_transfer_v1_transfer_proto_rawDescGZIP(), []int{4} -} - -func (x *TransInRequest) GetAmount() uint32 { - if x != nil { - return x.Amount - } - return 0 -} - -func (x *TransInRequest) GetUserId() uint64 { - if x != nil { - return x.UserId - } - return 0 -} - -type TransInReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *TransInReply) Reset() { - *x = TransInReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TransInReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TransInReply) ProtoMessage() {} - -func (x *TransInReply) ProtoReflect() protoreflect.Message { - mi := &file_api_transfer_v1_transfer_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TransInReply.ProtoReflect.Descriptor instead. -func (*TransInReply) Descriptor() ([]byte, []int) { - return file_api_transfer_v1_transfer_proto_rawDescGZIP(), []int{5} -} - -var File_api_transfer_v1_transfer_proto protoreflect.FileDescriptor - -var file_api_transfer_v1_transfer_proto_rawDesc = []byte{ - 0x0a, 0x1e, 0x61, 0x70, 0x69, 0x2f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, 0x2f, 0x76, - 0x31, 0x2f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x12, 0x0f, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, 0x2e, 0x76, - 0x31, 0x1a, 0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, - 0x64, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x80, 0x01, 0x0a, 0x0f, 0x54, - 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, - 0x0a, 0x06, 0x41, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x42, 0x07, - 0xfa, 0x42, 0x04, 0x2a, 0x02, 0x20, 0x00, 0x52, 0x06, 0x41, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x12, - 0x27, 0x0a, 0x0a, 0x46, 0x72, 0x6f, 0x6d, 0x55, 0x73, 0x65, 0x72, 0x49, 0x64, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x04, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x52, 0x0a, 0x46, 0x72, - 0x6f, 0x6d, 0x55, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x23, 0x0a, 0x08, 0x54, 0x6f, 0x55, 0x73, - 0x65, 0x72, 0x49, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x32, - 0x02, 0x20, 0x00, 0x52, 0x08, 0x54, 0x6f, 0x55, 0x73, 0x65, 0x72, 0x49, 0x64, 0x22, 0x0f, 0x0a, - 0x0d, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x53, - 0x0a, 0x0f, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x4f, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x1f, 0x0a, 0x06, 0x41, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0d, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x2a, 0x02, 0x20, 0x00, 0x52, 0x06, 0x41, 0x6d, 0x6f, 0x75, - 0x6e, 0x74, 0x12, 0x1f, 0x0a, 0x06, 0x55, 0x73, 0x65, 0x72, 0x49, 0x64, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x04, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, 0x52, 0x06, 0x55, 0x73, 0x65, - 0x72, 0x49, 0x64, 0x22, 0x0f, 0x0a, 0x0d, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x4f, 0x75, 0x74, 0x52, - 0x65, 0x70, 0x6c, 0x79, 0x22, 0x52, 0x0a, 0x0e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x49, 0x6e, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x06, 0x41, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x2a, 0x02, 0x20, 0x00, 0x52, - 0x06, 0x41, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1f, 0x0a, 0x06, 0x55, 0x73, 0x65, 0x72, 0x49, - 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x07, 0xfa, 0x42, 0x04, 0x32, 0x02, 0x20, 0x00, - 0x52, 0x06, 0x55, 0x73, 0x65, 0x72, 0x49, 0x64, 0x22, 0x0e, 0x0a, 0x0c, 0x54, 0x72, 0x61, 0x6e, - 0x73, 0x49, 0x6e, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x32, 0xf7, 0x01, 0x0a, 0x08, 0x74, 0x72, 0x61, - 0x6e, 0x73, 0x66, 0x65, 0x72, 0x12, 0x4e, 0x0a, 0x08, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, - 0x72, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, - 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, - 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, 0x52, 0x65, - 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x4e, 0x0a, 0x08, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x4f, 0x75, - 0x74, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, - 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x4f, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, - 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x4f, 0x75, 0x74, 0x52, 0x65, - 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x4b, 0x0a, 0x07, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x49, 0x6e, - 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, 0x2e, - 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x49, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, - 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x49, 0x6e, 0x52, 0x65, 0x70, 0x6c, 0x79, - 0x22, 0x00, 0x42, 0x1d, 0x5a, 0x1b, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, 0x2f, 0x61, - 0x70, 0x69, 0x2f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, 0x2f, 0x76, 0x31, 0x3b, 0x76, - 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_api_transfer_v1_transfer_proto_rawDescOnce sync.Once - file_api_transfer_v1_transfer_proto_rawDescData = file_api_transfer_v1_transfer_proto_rawDesc -) - -func file_api_transfer_v1_transfer_proto_rawDescGZIP() []byte { - file_api_transfer_v1_transfer_proto_rawDescOnce.Do(func() { - file_api_transfer_v1_transfer_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_transfer_v1_transfer_proto_rawDescData) - }) - return file_api_transfer_v1_transfer_proto_rawDescData -} - -var file_api_transfer_v1_transfer_proto_msgTypes = make([]protoimpl.MessageInfo, 6) -var file_api_transfer_v1_transfer_proto_goTypes = []interface{}{ - (*TransferRequest)(nil), // 0: api.transfer.v1.TransferRequest - (*TransferReply)(nil), // 1: api.transfer.v1.TransferReply - (*TransOutRequest)(nil), // 2: api.transfer.v1.TransOutRequest - (*TransOutReply)(nil), // 3: api.transfer.v1.TransOutReply - (*TransInRequest)(nil), // 4: api.transfer.v1.TransInRequest - (*TransInReply)(nil), // 5: api.transfer.v1.TransInReply -} -var file_api_transfer_v1_transfer_proto_depIdxs = []int32{ - 0, // 0: api.transfer.v1.transfer.Transfer:input_type -> api.transfer.v1.TransferRequest - 2, // 1: api.transfer.v1.transfer.TransOut:input_type -> api.transfer.v1.TransOutRequest - 4, // 2: api.transfer.v1.transfer.TransIn:input_type -> api.transfer.v1.TransInRequest - 1, // 3: api.transfer.v1.transfer.Transfer:output_type -> api.transfer.v1.TransferReply - 3, // 4: api.transfer.v1.transfer.TransOut:output_type -> api.transfer.v1.TransOutReply - 5, // 5: api.transfer.v1.transfer.TransIn:output_type -> api.transfer.v1.TransInReply - 3, // [3:6] is the sub-list for method output_type - 0, // [0:3] is the sub-list for method input_type - 0, // [0:0] is the sub-list for extension type_name - 0, // [0:0] is the sub-list for extension extendee - 0, // [0:0] is the sub-list for field type_name -} - -func init() { file_api_transfer_v1_transfer_proto_init() } -func file_api_transfer_v1_transfer_proto_init() { - if File_api_transfer_v1_transfer_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_api_transfer_v1_transfer_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TransferRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_transfer_v1_transfer_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TransferReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_transfer_v1_transfer_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TransOutRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_transfer_v1_transfer_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TransOutReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_transfer_v1_transfer_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TransInRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_transfer_v1_transfer_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TransInReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_api_transfer_v1_transfer_proto_rawDesc, - NumEnums: 0, - NumMessages: 6, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_api_transfer_v1_transfer_proto_goTypes, - DependencyIndexes: file_api_transfer_v1_transfer_proto_depIdxs, - MessageInfos: file_api_transfer_v1_transfer_proto_msgTypes, - }.Build() - File_api_transfer_v1_transfer_proto = out.File - file_api_transfer_v1_transfer_proto_rawDesc = nil - file_api_transfer_v1_transfer_proto_goTypes = nil - file_api_transfer_v1_transfer_proto_depIdxs = nil -} diff --git a/b_sponge-dtm-msg/api/transfer/v1/transfer.pb.validate.go b/b_sponge-dtm-msg/api/transfer/v1/transfer.pb.validate.go deleted file mode 100644 index defcddd..0000000 --- a/b_sponge-dtm-msg/api/transfer/v1/transfer.pb.validate.go +++ /dev/null @@ -1,712 +0,0 @@ -// Code generated by protoc-gen-validate. DO NOT EDIT. -// source: api/transfer/v1/transfer.proto - -package v1 - -import ( - "bytes" - "errors" - "fmt" - "net" - "net/mail" - "net/url" - "regexp" - "sort" - "strings" - "time" - "unicode/utf8" - - "google.golang.org/protobuf/types/known/anypb" -) - -// ensure the imports are used -var ( - _ = bytes.MinRead - _ = errors.New("") - _ = fmt.Print - _ = utf8.UTFMax - _ = (*regexp.Regexp)(nil) - _ = (*strings.Reader)(nil) - _ = net.IPv4len - _ = time.Duration(0) - _ = (*url.URL)(nil) - _ = (*mail.Address)(nil) - _ = anypb.Any{} - _ = sort.Sort -) - -// Validate checks the field values on TransferRequest with the rules defined -// in the proto definition for this message. If any rules are violated, the -// first error encountered is returned, or nil if there are no violations. -func (m *TransferRequest) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on TransferRequest with the rules -// defined in the proto definition for this message. If any rules are -// violated, the result is a list of violation errors wrapped in -// TransferRequestMultiError, or nil if none found. -func (m *TransferRequest) ValidateAll() error { - return m.validate(true) -} - -func (m *TransferRequest) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if m.GetAmount() <= 0 { - err := TransferRequestValidationError{ - field: "Amount", - reason: "value must be greater than 0", - } - if !all { - return err - } - errors = append(errors, err) - } - - if m.GetFromUserId() <= 0 { - err := TransferRequestValidationError{ - field: "FromUserId", - reason: "value must be greater than 0", - } - if !all { - return err - } - errors = append(errors, err) - } - - if m.GetToUserId() <= 0 { - err := TransferRequestValidationError{ - field: "ToUserId", - reason: "value must be greater than 0", - } - if !all { - return err - } - errors = append(errors, err) - } - - if len(errors) > 0 { - return TransferRequestMultiError(errors) - } - - return nil -} - -// TransferRequestMultiError is an error wrapping multiple validation errors -// returned by TransferRequest.ValidateAll() if the designated constraints -// aren't met. -type TransferRequestMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m TransferRequestMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m TransferRequestMultiError) AllErrors() []error { return m } - -// TransferRequestValidationError is the validation error returned by -// TransferRequest.Validate if the designated constraints aren't met. -type TransferRequestValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e TransferRequestValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e TransferRequestValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e TransferRequestValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e TransferRequestValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e TransferRequestValidationError) ErrorName() string { return "TransferRequestValidationError" } - -// Error satisfies the builtin error interface -func (e TransferRequestValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sTransferRequest.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = TransferRequestValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = TransferRequestValidationError{} - -// Validate checks the field values on TransferReply with the rules defined in -// the proto definition for this message. If any rules are violated, the first -// error encountered is returned, or nil if there are no violations. -func (m *TransferReply) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on TransferReply with the rules defined -// in the proto definition for this message. If any rules are violated, the -// result is a list of violation errors wrapped in TransferReplyMultiError, or -// nil if none found. -func (m *TransferReply) ValidateAll() error { - return m.validate(true) -} - -func (m *TransferReply) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if len(errors) > 0 { - return TransferReplyMultiError(errors) - } - - return nil -} - -// TransferReplyMultiError is an error wrapping multiple validation errors -// returned by TransferReply.ValidateAll() if the designated constraints -// aren't met. -type TransferReplyMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m TransferReplyMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m TransferReplyMultiError) AllErrors() []error { return m } - -// TransferReplyValidationError is the validation error returned by -// TransferReply.Validate if the designated constraints aren't met. -type TransferReplyValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e TransferReplyValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e TransferReplyValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e TransferReplyValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e TransferReplyValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e TransferReplyValidationError) ErrorName() string { return "TransferReplyValidationError" } - -// Error satisfies the builtin error interface -func (e TransferReplyValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sTransferReply.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = TransferReplyValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = TransferReplyValidationError{} - -// Validate checks the field values on TransOutRequest with the rules defined -// in the proto definition for this message. If any rules are violated, the -// first error encountered is returned, or nil if there are no violations. -func (m *TransOutRequest) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on TransOutRequest with the rules -// defined in the proto definition for this message. If any rules are -// violated, the result is a list of violation errors wrapped in -// TransOutRequestMultiError, or nil if none found. -func (m *TransOutRequest) ValidateAll() error { - return m.validate(true) -} - -func (m *TransOutRequest) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if m.GetAmount() <= 0 { - err := TransOutRequestValidationError{ - field: "Amount", - reason: "value must be greater than 0", - } - if !all { - return err - } - errors = append(errors, err) - } - - if m.GetUserId() <= 0 { - err := TransOutRequestValidationError{ - field: "UserId", - reason: "value must be greater than 0", - } - if !all { - return err - } - errors = append(errors, err) - } - - if len(errors) > 0 { - return TransOutRequestMultiError(errors) - } - - return nil -} - -// TransOutRequestMultiError is an error wrapping multiple validation errors -// returned by TransOutRequest.ValidateAll() if the designated constraints -// aren't met. -type TransOutRequestMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m TransOutRequestMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m TransOutRequestMultiError) AllErrors() []error { return m } - -// TransOutRequestValidationError is the validation error returned by -// TransOutRequest.Validate if the designated constraints aren't met. -type TransOutRequestValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e TransOutRequestValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e TransOutRequestValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e TransOutRequestValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e TransOutRequestValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e TransOutRequestValidationError) ErrorName() string { return "TransOutRequestValidationError" } - -// Error satisfies the builtin error interface -func (e TransOutRequestValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sTransOutRequest.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = TransOutRequestValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = TransOutRequestValidationError{} - -// Validate checks the field values on TransOutReply with the rules defined in -// the proto definition for this message. If any rules are violated, the first -// error encountered is returned, or nil if there are no violations. -func (m *TransOutReply) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on TransOutReply with the rules defined -// in the proto definition for this message. If any rules are violated, the -// result is a list of violation errors wrapped in TransOutReplyMultiError, or -// nil if none found. -func (m *TransOutReply) ValidateAll() error { - return m.validate(true) -} - -func (m *TransOutReply) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if len(errors) > 0 { - return TransOutReplyMultiError(errors) - } - - return nil -} - -// TransOutReplyMultiError is an error wrapping multiple validation errors -// returned by TransOutReply.ValidateAll() if the designated constraints -// aren't met. -type TransOutReplyMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m TransOutReplyMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m TransOutReplyMultiError) AllErrors() []error { return m } - -// TransOutReplyValidationError is the validation error returned by -// TransOutReply.Validate if the designated constraints aren't met. -type TransOutReplyValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e TransOutReplyValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e TransOutReplyValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e TransOutReplyValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e TransOutReplyValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e TransOutReplyValidationError) ErrorName() string { return "TransOutReplyValidationError" } - -// Error satisfies the builtin error interface -func (e TransOutReplyValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sTransOutReply.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = TransOutReplyValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = TransOutReplyValidationError{} - -// Validate checks the field values on TransInRequest with the rules defined in -// the proto definition for this message. If any rules are violated, the first -// error encountered is returned, or nil if there are no violations. -func (m *TransInRequest) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on TransInRequest with the rules defined -// in the proto definition for this message. If any rules are violated, the -// result is a list of violation errors wrapped in TransInRequestMultiError, -// or nil if none found. -func (m *TransInRequest) ValidateAll() error { - return m.validate(true) -} - -func (m *TransInRequest) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if m.GetAmount() <= 0 { - err := TransInRequestValidationError{ - field: "Amount", - reason: "value must be greater than 0", - } - if !all { - return err - } - errors = append(errors, err) - } - - if m.GetUserId() <= 0 { - err := TransInRequestValidationError{ - field: "UserId", - reason: "value must be greater than 0", - } - if !all { - return err - } - errors = append(errors, err) - } - - if len(errors) > 0 { - return TransInRequestMultiError(errors) - } - - return nil -} - -// TransInRequestMultiError is an error wrapping multiple validation errors -// returned by TransInRequest.ValidateAll() if the designated constraints -// aren't met. -type TransInRequestMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m TransInRequestMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m TransInRequestMultiError) AllErrors() []error { return m } - -// TransInRequestValidationError is the validation error returned by -// TransInRequest.Validate if the designated constraints aren't met. -type TransInRequestValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e TransInRequestValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e TransInRequestValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e TransInRequestValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e TransInRequestValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e TransInRequestValidationError) ErrorName() string { return "TransInRequestValidationError" } - -// Error satisfies the builtin error interface -func (e TransInRequestValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sTransInRequest.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = TransInRequestValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = TransInRequestValidationError{} - -// Validate checks the field values on TransInReply with the rules defined in -// the proto definition for this message. If any rules are violated, the first -// error encountered is returned, or nil if there are no violations. -func (m *TransInReply) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on TransInReply with the rules defined -// in the proto definition for this message. If any rules are violated, the -// result is a list of violation errors wrapped in TransInReplyMultiError, or -// nil if none found. -func (m *TransInReply) ValidateAll() error { - return m.validate(true) -} - -func (m *TransInReply) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - if len(errors) > 0 { - return TransInReplyMultiError(errors) - } - - return nil -} - -// TransInReplyMultiError is an error wrapping multiple validation errors -// returned by TransInReply.ValidateAll() if the designated constraints aren't met. -type TransInReplyMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m TransInReplyMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m TransInReplyMultiError) AllErrors() []error { return m } - -// TransInReplyValidationError is the validation error returned by -// TransInReply.Validate if the designated constraints aren't met. -type TransInReplyValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e TransInReplyValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e TransInReplyValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e TransInReplyValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e TransInReplyValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e TransInReplyValidationError) ErrorName() string { return "TransInReplyValidationError" } - -// Error satisfies the builtin error interface -func (e TransInReplyValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sTransInReply.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = TransInReplyValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = TransInReplyValidationError{} diff --git a/b_sponge-dtm-msg/api/transfer/v1/transfer.proto b/b_sponge-dtm-msg/api/transfer/v1/transfer.proto deleted file mode 100644 index 97a4cd3..0000000 --- a/b_sponge-dtm-msg/api/transfer/v1/transfer.proto +++ /dev/null @@ -1,42 +0,0 @@ -syntax = "proto3"; - -package api.transfer.v1; - -import "validate/validate.proto"; - -option go_package = "transfer/api/transfer/v1;v1"; - -service transfer { - // 转账 - rpc Transfer(TransferRequest) returns (TransferReply) {} - - // 转出 - rpc TransOut(TransOutRequest) returns (TransOutReply) {} - // 转入 - rpc TransIn(TransInRequest) returns (TransInReply) {} -} - -message TransferRequest { - uint32 Amount = 1 [(validate.rules).uint32.gt = 0]; - uint64 FromUserId = 2 [(validate.rules).uint64.gt = 0]; - uint64 ToUserId = 3 [(validate.rules).uint64.gt = 0]; -} - -message TransferReply { -} - -message TransOutRequest { - uint32 Amount = 1 [(validate.rules).uint32.gt = 0]; - uint64 UserId = 2 [(validate.rules).uint64.gt = 0]; -} - -message TransOutReply { -} - -message TransInRequest { - uint32 Amount = 1 [(validate.rules).uint32.gt = 0]; - uint64 UserId = 2 [(validate.rules).uint64.gt = 0]; -} - -message TransInReply { -} \ No newline at end of file diff --git a/b_sponge-dtm-msg/api/transfer/v1/transfer_grpc.pb.go b/b_sponge-dtm-msg/api/transfer/v1/transfer_grpc.pb.go deleted file mode 100644 index de3e52d..0000000 --- a/b_sponge-dtm-msg/api/transfer/v1/transfer_grpc.pb.go +++ /dev/null @@ -1,183 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.2.0 -// - protoc v3.20.1 -// source: api/transfer/v1/transfer.proto - -package v1 - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 - -// TransferClient is the client API for Transfer service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -type TransferClient interface { - // 转账 - Transfer(ctx context.Context, in *TransferRequest, opts ...grpc.CallOption) (*TransferReply, error) - // 转出 - TransOut(ctx context.Context, in *TransOutRequest, opts ...grpc.CallOption) (*TransOutReply, error) - // 转入 - TransIn(ctx context.Context, in *TransInRequest, opts ...grpc.CallOption) (*TransInReply, error) -} - -type transferClient struct { - cc grpc.ClientConnInterface -} - -func NewTransferClient(cc grpc.ClientConnInterface) TransferClient { - return &transferClient{cc} -} - -func (c *transferClient) Transfer(ctx context.Context, in *TransferRequest, opts ...grpc.CallOption) (*TransferReply, error) { - out := new(TransferReply) - err := c.cc.Invoke(ctx, "/api.transfer.v1.transfer/Transfer", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *transferClient) TransOut(ctx context.Context, in *TransOutRequest, opts ...grpc.CallOption) (*TransOutReply, error) { - out := new(TransOutReply) - err := c.cc.Invoke(ctx, "/api.transfer.v1.transfer/TransOut", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *transferClient) TransIn(ctx context.Context, in *TransInRequest, opts ...grpc.CallOption) (*TransInReply, error) { - out := new(TransInReply) - err := c.cc.Invoke(ctx, "/api.transfer.v1.transfer/TransIn", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// TransferServer is the server API for Transfer service. -// All implementations must embed UnimplementedTransferServer -// for forward compatibility -type TransferServer interface { - // 转账 - Transfer(context.Context, *TransferRequest) (*TransferReply, error) - // 转出 - TransOut(context.Context, *TransOutRequest) (*TransOutReply, error) - // 转入 - TransIn(context.Context, *TransInRequest) (*TransInReply, error) - mustEmbedUnimplementedTransferServer() -} - -// UnimplementedTransferServer must be embedded to have forward compatible implementations. -type UnimplementedTransferServer struct { -} - -func (UnimplementedTransferServer) Transfer(context.Context, *TransferRequest) (*TransferReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method Transfer not implemented") -} -func (UnimplementedTransferServer) TransOut(context.Context, *TransOutRequest) (*TransOutReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method TransOut not implemented") -} -func (UnimplementedTransferServer) TransIn(context.Context, *TransInRequest) (*TransInReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method TransIn not implemented") -} -func (UnimplementedTransferServer) mustEmbedUnimplementedTransferServer() {} - -// UnsafeTransferServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to TransferServer will -// result in compilation errors. -type UnsafeTransferServer interface { - mustEmbedUnimplementedTransferServer() -} - -func RegisterTransferServer(s grpc.ServiceRegistrar, srv TransferServer) { - s.RegisterService(&Transfer_ServiceDesc, srv) -} - -func _Transfer_Transfer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(TransferRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TransferServer).Transfer(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.transfer.v1.transfer/Transfer", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TransferServer).Transfer(ctx, req.(*TransferRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _Transfer_TransOut_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(TransOutRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TransferServer).TransOut(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.transfer.v1.transfer/TransOut", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TransferServer).TransOut(ctx, req.(*TransOutRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _Transfer_TransIn_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(TransInRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TransferServer).TransIn(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.transfer.v1.transfer/TransIn", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TransferServer).TransIn(ctx, req.(*TransInRequest)) - } - return interceptor(ctx, in, info, handler) -} - -// Transfer_ServiceDesc is the grpc.ServiceDesc for Transfer service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var Transfer_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "api.transfer.v1.transfer", - HandlerType: (*TransferServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Transfer", - Handler: _Transfer_Transfer_Handler, - }, - { - MethodName: "TransOut", - Handler: _Transfer_TransOut_Handler, - }, - { - MethodName: "TransIn", - Handler: _Transfer_TransIn_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "api/transfer/v1/transfer.proto", -} diff --git a/b_sponge-dtm-msg/cmd/transfer/initial/initApp.go b/b_sponge-dtm-msg/cmd/transfer/initial/initApp.go deleted file mode 100644 index 6b0b308..0000000 --- a/b_sponge-dtm-msg/cmd/transfer/initial/initApp.go +++ /dev/null @@ -1,87 +0,0 @@ -// Package initial is the package that starts the service to initialize the service, including -// the initialization configuration, service configuration, connecting to the database, and -// resource release needed when shutting down the service. -package initial - -import ( - "flag" - "strconv" - - "github.com/zhufuyi/sponge/pkg/logger" - "github.com/zhufuyi/sponge/pkg/stat" - "github.com/zhufuyi/sponge/pkg/tracer" - - "transfer/configs" - "transfer/internal/config" - //"transfer/internal/model" -) - -var ( - version string - configFile string -) - -// InitApp initial app configuration -func InitApp() { - initConfig() - cfg := config.Get() - - // initializing log - _, err := logger.Init( - logger.WithLevel(cfg.Logger.Level), - logger.WithFormat(cfg.Logger.Format), - logger.WithSave(cfg.Logger.IsSave), - ) - if err != nil { - panic(err) - } - logger.Debug(config.Show()) - logger.Info("init logger succeeded") - - // initializing database - //model.InitMysql() - //logger.Info("init mysql succeeded") - //model.InitCache(cfg.App.CacheType) - - // initializing tracing - if cfg.App.EnableTrace { - tracer.InitWithConfig( - cfg.App.Name, - cfg.App.Env, - cfg.App.Version, - cfg.Jaeger.AgentHost, - strconv.Itoa(cfg.Jaeger.AgentPort), - cfg.App.TracingSamplingRate, - ) - logger.Info("init tracer succeeded") - } - - // initializing the print system and process resources - if cfg.App.EnableStat { - stat.Init( - stat.WithLog(logger.Get()), - stat.WithAlarm(), // invalid if it is windows, the default threshold for cpu and memory is 0.8, you can modify them - ) - logger.Info("init statistics succeeded") - } -} - -func initConfig() { - flag.StringVar(&version, "version", "", "service Version Number") - flag.StringVar(&configFile, "c", "", "configuration file") - flag.Parse() - - // get configuration from local configuration file - if configFile == "" { - configFile = configs.Path("transfer.yml") - } - err := config.Init(configFile) - if err != nil { - panic("init config error: " + err.Error()) - } - - if version != "" { - config.Get().App.Version = version - } - //fmt.Println(config.Show()) -} diff --git a/b_sponge-dtm-msg/configs/transfer.yml b/b_sponge-dtm-msg/configs/transfer.yml deleted file mode 100644 index fccda80..0000000 --- a/b_sponge-dtm-msg/configs/transfer.yml +++ /dev/null @@ -1,68 +0,0 @@ -# Generate the go struct command: sponge config --server-dir=./serverDir - -# app settings -app: - name: "transfer" # server name - env: "dev" # runtime environment, dev: development environment, prod: production environment, test: test environment - version: "v0.0.0" - host: "127.0.0.1" # domain or ip, for service registration - enableHTTPProfile: false # whether to turn on performance analysis, true:enable, false:disable - enableStat: false # whether to turn on printing statistics, true:enable, false:disable - enableMetrics: true # whether to turn on indicator collection, true:enable, false:disable - enableLimit: false # whether to turn on rate limiting (adaptive), true:on, false:off - enableCircuitBreaker: false # whether to turn on circuit breaker(adaptive), true:on, false:off - enableTrace: false # whether to turn on trace, true:enable, false:disable, if true jaeger configuration must be set - tracingSamplingRate: 1.0 # tracing sampling rate, between 0 and 1, 0 means no sampling, 1 means sampling all links - registryDiscoveryType: "" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used - cacheType: "" # cache type, "memory" or "redis", if set to redis, must set redis configuration - - -# grpc service settings -grpc: - port: 8282 # listen port - httpPort: 8283 # profile and metrics ports - readTimeout: 5 # read timeout, unit(second) - writeTimeout: 5 # write timeout, unit(second) - enableToken: false # whether to enable server-side token authentication, default appID=grpc, appKey=123456 - # serverSecure parameter setting - # if type="", it means no secure connection, no need to fill in any parameters - # if type="one-way", it means server-side certification, only the fields "certFile" and "keyFile" should be filled in - # if type="two-way", it means both client and server side certification, fill in all fields - serverSecure: - type: "" # secures type, "", "one-way", "two-way" - certFile: "" # server side cert file, absolute path - keyFile: "" # server side key file, absolute path - caFile: "" # ca certificate file, valid only in "two-way", absolute path - - -# grpc client settings, support for setting up multiple grpc clients -grpcClient: - - name: "dtmservice" # grpc service name, used for service discovery - host: "127.0.0.1" # grpc service address, used for direct connection - port: 36790 # grpc service port - registryDiscoveryType: "" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port - enableLoadBalance: true - - -# logger settings -logger: - level: "debug" # output log levels debug, info, warn, error, default is debug - format: "console" # output format, console or json, default is console - isSave: false # false:output to terminal, true:output to file, default is false - - -# etcd settings -etcd: - addrs: ["127.0.0.1:2379"] - - -# consul settings -consul: - addr: "127.0.0.1:8500" - - -# nacos settings, used in service registration discovery -nacosRd: - ipAddr: "127.0.0.1" - port: 8848 - namespaceID: "" # namespace id, default is public diff --git a/b_sponge-dtm-msg/docs/gen.info b/b_sponge-dtm-msg/docs/gen.info deleted file mode 100644 index ac2995c..0000000 --- a/b_sponge-dtm-msg/docs/gen.info +++ /dev/null @@ -1 +0,0 @@ -transfer,transfer \ No newline at end of file diff --git a/b_sponge-dtm-msg/internal/ecode/transfer_rpc.go b/b_sponge-dtm-msg/internal/ecode/transfer_rpc.go deleted file mode 100644 index 269dd2d..0000000 --- a/b_sponge-dtm-msg/internal/ecode/transfer_rpc.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by https://github.com/zhufuyi/sponge - -package ecode - -import ( - "github.com/zhufuyi/sponge/pkg/errcode" -) - -// transfer business-level rpc error codes. -// the _transferNO value range is 1~100, if the same number appears, it will cause a failure to start the service. -var ( - _transferNO = 31 - _transferName = "transfer" - _transferBaseCode = errcode.RCode(_transferNO) - - StatusTransOutTransfer = errcode.NewRPCStatus(_transferBaseCode+1, "failed to TransOut "+_transferName) - StatusTransInTransfer = errcode.NewRPCStatus(_transferBaseCode+2, "failed to TransIn "+_transferName) - StatusTransferTransfer = errcode.NewRPCStatus(_transferBaseCode+3, "failed to Transfer "+_transferName) - // error codes are globally unique, adding 1 to the previous error code -) diff --git a/b_sponge-dtm-msg/internal/rpcclient/transfer.go b/b_sponge-dtm-msg/internal/rpcclient/transfer.go deleted file mode 100644 index ff6d7d8..0000000 --- a/b_sponge-dtm-msg/internal/rpcclient/transfer.go +++ /dev/null @@ -1,36 +0,0 @@ -package rpcclient - -import ( - "fmt" - "sync" - - "transfer/internal/config" -) - -var ( - transferEndPoint string - transferOnce sync.Once -) - -// InitTransferEndpoint init transfer endpoint -func InitTransferEndpoint() { - switch config.Get().App.RegistryDiscoveryType { - case "consul", "etcd": - transferEndPoint = "discovery:///" + config.Get().App.Name - case "nacos": - transferEndPoint = "discovery:///" + config.Get().App.Name + ".grpc" - default: - transferEndPoint = fmt.Sprintf("%s:%d", config.Get().App.Host, config.Get().Grpc.Port) - } -} - -// GetTransferEndpoint get transfer endpoint -func GetTransferEndpoint() string { - if transferEndPoint == "" { - transferOnce.Do(func() { - InitTransferEndpoint() - }) - } - - return transferEndPoint -} diff --git a/b_sponge-dtm-msg/internal/server/grpc_option.go b/b_sponge-dtm-msg/internal/server/grpc_option.go deleted file mode 100644 index bf11ba1..0000000 --- a/b_sponge-dtm-msg/internal/server/grpc_option.go +++ /dev/null @@ -1,54 +0,0 @@ -package server - -import ( - "time" - - "github.com/zhufuyi/sponge/pkg/servicerd/registry" -) - -// GrpcOption grpc settings -type GrpcOption func(*grpcOptions) - -type grpcOptions struct { - readTimeout time.Duration - writeTimeout time.Duration - instance *registry.ServiceInstance - iRegistry registry.Registry -} - -func defaultGrpcOptions() *grpcOptions { - return &grpcOptions{ - readTimeout: time.Second * 3, - writeTimeout: time.Second * 3, - instance: nil, - iRegistry: nil, - } -} - -func (o *grpcOptions) apply(opts ...GrpcOption) { - for _, opt := range opts { - opt(o) - } -} - -// WithGrpcReadTimeout setting up read timeout -func WithGrpcReadTimeout(timeout time.Duration) GrpcOption { - return func(o *grpcOptions) { - o.readTimeout = timeout - } -} - -// WithGrpcWriteTimeout setting up writer timeout -func WithGrpcWriteTimeout(timeout time.Duration) GrpcOption { - return func(o *grpcOptions) { - o.writeTimeout = timeout - } -} - -// WithGrpcRegistry registration services -func WithGrpcRegistry(iRegistry registry.Registry, instance *registry.ServiceInstance) GrpcOption { - return func(o *grpcOptions) { - o.iRegistry = iRegistry - o.instance = instance - } -} diff --git a/b_sponge-dtm-msg/internal/service/transfer.go b/b_sponge-dtm-msg/internal/service/transfer.go deleted file mode 100644 index 2f32542..0000000 --- a/b_sponge-dtm-msg/internal/service/transfer.go +++ /dev/null @@ -1,93 +0,0 @@ -// Code generated by https://github.com/zhufuyi/sponge - -package service - -import ( - "context" - - "github.com/dtm-labs/client/dtmgrpc" - "github.com/zhufuyi/sponge/pkg/grpc/interceptor" - "github.com/zhufuyi/sponge/pkg/logger" - "google.golang.org/grpc" - - transferV1 "transfer/api/transfer/v1" - "transfer/internal/ecode" - "transfer/internal/rpcclient" -) - -func init() { - registerFns = append(registerFns, func(server *grpc.Server) { - transferV1.RegisterTransferServer(server, NewTransferServer()) - }) -} - -var _ transferV1.TransferServer = (*transfer)(nil) - -type transfer struct { - transferV1.UnimplementedTransferServer -} - -// NewTransferServer create a server -func NewTransferServer() transferV1.TransferServer { - rpcclient.InitDtmServerResolver() - return &transfer{} -} - -// Transfer 转账 -func (s *transfer) Transfer(ctx context.Context, req *transferV1.TransferRequest) (*transferV1.TransferReply, error) { - err := req.Validate() - if err != nil { - logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) - return nil, ecode.StatusInvalidParams.Err() - } - - var ( - // 直连ip方式 或 服务发现方式,由配置文件决定 - dtmServer = rpcclient.GetDtmEndpoint() - transferServer = rpcclient.GetTransferEndpoint() - ) - - logger.Debug("server endpoint", logger.String("dtm", dtmServer), logger.String("transfer", transferServer)) - - transOutData := &transferV1.TransOutRequest{ - Amount: req.Amount, - UserId: req.FromUserId, - } - transInData := &transferV1.TransInRequest{ - Amount: req.Amount, - UserId: req.ToUserId, - } - gid := dtmgrpc.MustGenGid(dtmServer) - m := dtmgrpc.NewMsgGrpc(dtmServer, gid). - Add(transferServer+"/api.transfer.v1.transfer/TransOut", transOutData). - Add(transferServer+"/api.transfer.v1.transfer/TransIn", transInData) - m.WaitResult = true - err = m.Submit() - if err != nil { - logger.Error("Transfer error", logger.Err(err), interceptor.ServerCtxRequestIDField(ctx)) - return nil, ecode.StatusInternalServerError.Err() - } - return &transferV1.TransferReply{}, nil -} - -// TransOut 转出 -func (s *transfer) TransOut(ctx context.Context, req *transferV1.TransOutRequest) (*transferV1.TransOutReply, error) { - err := req.Validate() - if err != nil { - logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) - return nil, ecode.StatusInvalidParams.Err() - } - logger.Info("转出成功", logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) - return &transferV1.TransOutReply{}, nil -} - -// TransIn 转入 -func (s *transfer) TransIn(ctx context.Context, req *transferV1.TransInRequest) (*transferV1.TransInReply, error) { - err := req.Validate() - if err != nil { - logger.Warn("req.Validate error", logger.Err(err), logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) - return nil, ecode.StatusInvalidParams.Err() - } - logger.Info("转入成功", logger.Any("req", req), interceptor.ServerCtxRequestIDField(ctx)) - return &transferV1.TransInReply{}, nil -} diff --git a/b_sponge-dtm-msg/internal/service/transfer_client_test.go b/b_sponge-dtm-msg/internal/service/transfer_client_test.go deleted file mode 100644 index 5316c7c..0000000 --- a/b_sponge-dtm-msg/internal/service/transfer_client_test.go +++ /dev/null @@ -1,166 +0,0 @@ -// Code generated by https://github.com/zhufuyi/sponge - -package service - -import ( - "context" - "encoding/json" - "fmt" - "testing" - "time" - - "github.com/zhufuyi/sponge/pkg/grpc/benchmark" - - transferV1 "transfer/api/transfer/v1" - "transfer/configs" - "transfer/internal/config" -) - -// Test each method of transfer via the rpc client -func Test_service_transfer_methods(t *testing.T) { - conn := getRPCClientConnForTest() - cli := transferV1.NewTransferClient(conn) - ctx, _ := context.WithTimeout(context.Background(), time.Second*5) - - tests := []struct { - name string - fn func() (interface{}, error) - wantErr bool - }{ - { - name: "Transfer", - fn: func() (interface{}, error) { - // todo type in the parameters to test - req := &transferV1.TransferRequest{ - Amount: 100, - FromUserId: 1, - ToUserId: 2, - } - return cli.Transfer(ctx, req) - }, - wantErr: false, - }, - { - name: "TransOut", - fn: func() (interface{}, error) { - // todo type in the parameters to test - req := &transferV1.TransOutRequest{ - Amount: 0, - UserId: 0, - } - return cli.TransOut(ctx, req) - }, - wantErr: false, - }, - { - name: "TransIn", - fn: func() (interface{}, error) { - // todo type in the parameters to test - req := &transferV1.TransInRequest{ - Amount: 0, - UserId: 0, - } - return cli.TransIn(ctx, req) - }, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := tt.fn() - if (err != nil) != tt.wantErr { - t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) - return - } - data, _ := json.MarshalIndent(got, "", " ") - fmt.Println(string(data)) - }) - } -} - -// Perform a stress test on transfer's method and -// copy the press test report to your browser when you are finished. -func Test_service_transfer_benchmark(t *testing.T) { - err := config.Init(configs.Path("transfer.yml")) - if err != nil { - panic(err) - } - host := fmt.Sprintf("127.0.0.1:%d", config.Get().Grpc.Port) - protoFile := configs.Path("../api/transfer/v1/transfer.proto") - // If third-party dependencies are missing during the press test, - // copy them to the project's third_party directory. - dependentProtoFilePath := []string{ - configs.Path("../third_party"), // third_party directory - configs.Path(".."), // Previous level of third_party - } - - tests := []struct { - name string - fn func() error - wantErr bool - }{ - { - name: "Transfer", - fn: func() error { - // todo type in the parameters to test - message := &transferV1.TransferRequest{ - Amount: 0, - FromUserId: 0, - ToUserId: 0, - } - var total = 1000 // total number of requests - b, err := benchmark.New(host, protoFile, "Transfer", message, dependentProtoFilePath, total) - if err != nil { - return err - } - return b.Run() - }, - wantErr: false, - }, - { - name: "TransOut", - fn: func() error { - // todo type in the parameters to test - message := &transferV1.TransOutRequest{ - Amount: 0, - UserId: 0, - } - var total = 1000 // total number of requests - b, err := benchmark.New(host, protoFile, "TransOut", message, dependentProtoFilePath, total) - if err != nil { - return err - } - return b.Run() - }, - wantErr: false, - }, - { - name: "TransIn", - fn: func() error { - // todo type in the parameters to test - message := &transferV1.TransInRequest{ - Amount: 0, - UserId: 0, - } - var total = 1000 // total number of requests - b, err := benchmark.New(host, protoFile, "TransIn", message, dependentProtoFilePath, total) - if err != nil { - return err - } - return b.Run() - }, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := tt.fn() - if (err != nil) != tt.wantErr { - t.Errorf("test '%s' error = %v, wantErr %v", tt.name, err, tt.wantErr) - return - } - }) - } -} diff --git a/b_sponge-dtm-msg/readme-cn.md b/b_sponge-dtm-msg/readme-cn.md deleted file mode 100644 index 07b32ea..0000000 --- a/b_sponge-dtm-msg/readme-cn.md +++ /dev/null @@ -1,95 +0,0 @@ -## Transfer 服务 - -transfer服务是一个示例服务,演示了如何使用dtm的服务注册与发现功能,以及如何使用dtm的二阶段消息事务。 - -在dtm添加[dtmdriver-sponge](https://github.com/zhufuyi/dtmdriver-sponge)驱动后,使用服务注册与发现示例,示例中所涉及的服务运行在同一个机器,因此ip地址都是127.0.0.1,如果有服务运行在不同机器,需要在transfer和dtm配置文件把127.0.0.1修改为对应的宿主机ip或域名。 - -关键代码: - -- 在 [internal/rpcclient/dtmservice.go](internal/rpcclient/dtmservice.go) 第9行代码导入驱动代码。 -- 在 [internal/service/transfer.go](internal/service/transfer.go) 第65行代码提交二阶段消息事务。 - -
- -### 使用etcd作为服务发现示例 - -1. 启动etcd服务。 - -2. 修改dtm配置文件。 - -```yaml -MicroService: - Driver: 'dtm-driver-sponge' - Target: 'etcd://127.0.0.1:2379/dtmservice' - EndPoint: 'grpc://127.0.0.1:36790' -``` - -3. 启动dtm服务。 - -```bash -dmt -c conf.yml -``` - -4. 修改transfer配置。 - -打开配置文件 `configs/transfer.yml`,修改app和grpcClient下字段`registryDiscoveryType`值,表示使用etcd作为服务注册与发现。 - -```yaml -app: - registryDiscoveryType: "etcd" # registry and discovery types: consul, etcd, nacos, if empty, registration and discovery are not used - -grpcClient: - - name: "dtmservice" # dtm service name, used for service discovery - registryDiscoveryType: "etcd" # registration and discovery types: consul, etcd, nacos, if empty, connecting to server using host and port - host: "127.0.0.1" # dtm service address, used for direct connection - port: 36790 # dtm service port - -etcd: - addrs: ["127.0.0.1:2379"] -``` - -5. 启动 transfer 服务 - -```bash -# 编译和运行服务 -make run -``` - -6. 测试 - -打开生成的grpc客户端测试代码 `internal/service/transfer_client_test.go`,测试前填写参数,示例 - -```go - { - name: "Transfer", - fn: func() (interface{}, error) { - // todo type in the parameters to test - req := &transferV1.TransferRequest{ - Amount: 100, - FromUserId: 1, - ToUserId: 2, - } - return cli.Transfer(ctx, req) - }, - wantErr: false, - }, -``` - -打开一个新终端,切换到目录`internal/service`,执行命令测试: - -```bash -go test -run Test_service_transfer_methods/Transfer -``` - -> Note:如果使用`Goland IDE`打开代码`internal/service/transfer_client_test.go`,直接点击左边绿色按钮测试。 - - -### 使用consul作为服务发现 - -与上面**使用etcd作为服务发现**的6个操作步骤一样,把`etcd`改为`consul`,重启dtm和transfer服务。 - -### 使用nacos作为服务发现 - -与上面**使用etcd作为服务发现**的6个操作步骤一样,把`etcd`改为`nacos`,重启dtm和transfer服务。 - -注:在dmt和transfer的配置文件的namespaceID必须一样,默认namespaceID是`public`,如果指定其他值,必须同时修改。