diff --git a/server/e2e/integration_schema_export_test.go b/server/e2e/integration_schema_export_test.go new file mode 100644 index 0000000000..3eae4d0a88 --- /dev/null +++ b/server/e2e/integration_schema_export_test.go @@ -0,0 +1,199 @@ +package e2e + +import ( + "net/http" + "testing" + + "github.com/reearth/reearth-cms/server/internal/app" + "github.com/reearth/reearth-cms/server/pkg/id" +) + +func TestIntegrationSchemaJSONExportAPI(t *testing.T) { + e := StartServer(t, &app.Config{}, true, baseSeeder) + + // /api/schemata/{schemaId}/schema.json + e.GET("/api/schemata/{schemaId}/schema.json", sid1). + WithHeader("authorization", "Bearer abcd"). + Expect(). + Status(http.StatusUnauthorized) + + e.GET("/api/schemata/{schemaId}/schema.json", id.NewSchemaID()). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusNotFound) + + e.GET("/api/schemata/{schemaId}/schema.json", sid1). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusOK). + JSON(). + IsEqual(map[string]any{ + "$id": sid1, + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": map[string]any{ + "asset": map[string]any{ + "type": "string", + "format": "binary", + }, + sfKey1.String(): map[string]any{ + "type": "string", + }, + }, + "type": "object", + }) + + // /api/projects/{projectIdOrKey}/schemata/{schemaId}/schema.json + e.GET("/api/projects/{projectIdOrKey}/schemata/{schemaId}/schema.json", pid, sid1). + WithHeader("authorization", "Bearer abcd"). + Expect(). + Status(http.StatusUnauthorized) + + e.GET("/api/projects/{projectIdOrKey}/schemata/{schemaId}/schema.json", pid, id.NewSchemaID()). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusNotFound) + + e.GET("/api/projects/{projectIdOrKey}/schemata/{schemaId}/schema.json", pid, sid1). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusOK). + JSON(). + IsEqual(map[string]any{ + "$id": sid1, + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": map[string]any{ + "asset": map[string]any{ + "type": "string", + "format": "binary", + }, + sfKey1.String(): map[string]any{ + "type": "string", + }, + }, + "type": "object", + }) + + // /api/projects/{projectIdOrKey}/models/{modelId}/schema.json + e.GET("/api/projects/{projectIdOrKey}/models/{modelId}/schema.json", pid, mId1). + WithHeader("authorization", "Bearer abcd"). + Expect(). + Status(http.StatusUnauthorized) + + e.GET("/api/projects/{projectIdOrKey}/models/{modelId}/schema.json", pid, id.NewModelID()). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusNotFound) + + e.GET("/api/projects/{projectIdOrKey}/models/{modelId}/schema.json", pid, mId1). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusOK). + JSON(). + IsEqual(map[string]any{ + "$id": mId1, + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": map[string]any{ + "asset": map[string]any{ + "type": "string", + "format": "binary", + }, + sfKey1.String(): map[string]any{ + "type": "string", + }, + }, + "type": "object", + "description": "m1 desc", + "title": "m1", + }) + + // /api/projects/{projectIdOrKey}/models/{modelId}/metadata_schema.json + e.GET("/api/projects/{projectIdOrKey}/models/{modelId}/metadata_schema.json", pid, mId1). + WithHeader("authorization", "Bearer abcd"). + Expect(). + Status(http.StatusUnauthorized) + + e.GET("/api/projects/{projectIdOrKey}/models/{modelId}/metadata_schema.json", pid, id.NewModelID()). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusNotFound) + + e.GET("/api/projects/{projectIdOrKey}/models/{modelId}/metadata_schema.json", pid, mId1). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusOK). + JSON(). + IsEqual(map[string]any{ + "$id": mId1, + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": map[string]any{ + sfKey4.String(): map[string]any{ + "type": "boolean", + }, + }, + "type": "object", + "description": "m1 desc", + "title": "m1", + }) + + // /api/models/{modelId}/schema.json + e.GET("/api/models/{modelId}/schema.json", mId1). + WithHeader("authorization", "Bearer abcd"). + Expect(). + Status(http.StatusUnauthorized) + + e.GET("/api/models/{modelId}/schema.json", id.NewModelID()). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusNotFound) + + e.GET("/api/models/{modelId}/schema.json", mId1). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusOK). + JSON(). + IsEqual(map[string]any{ + "$id": mId1, + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": map[string]any{ + "asset": map[string]any{ + "type": "string", + "format": "binary", + }, + sfKey1.String(): map[string]any{ + "type": "string", + }, + }, + "type": "object", + "description": "m1 desc", + "title": "m1", + }) + + // /api/models/{modelId}/metadata_schema.json + e.GET("/api/models/{modelId}/metadata_schema.json", mId1). + WithHeader("authorization", "Bearer abcd"). + Expect(). + Status(http.StatusUnauthorized) + + e.GET("/api/models/{modelId}/metadata_schema.json", id.NewModelID()). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusNotFound) + + e.GET("/api/models/{modelId}/metadata_schema.json", mId1). + WithHeader("authorization", "Bearer "+secret). + Expect(). + Status(http.StatusOK). + JSON(). + IsEqual(map[string]any{ + "$id": mId1, + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": map[string]any{ + sfKey4.String(): map[string]any{ + "type": "boolean", + }, + }, + "type": "object", + "description": "m1 desc", + "title": "m1", + }) +} diff --git a/server/e2e/publicapi_test.go b/server/e2e/publicapi_test.go index bca973a4a6..f943fdd459 100644 --- a/server/e2e/publicapi_test.go +++ b/server/e2e/publicapi_test.go @@ -355,6 +355,49 @@ func TestPublicAPI(t *testing.T) { // publicAPIField2Key should be removed }) + // schema export json + e.GET("/api/p/{project}/{model}/schema.json", publicAPIProjectAlias, id.RandomKey()). + Expect(). + Status(http.StatusNotFound) + + e.GET("/api/p/{project}/{model}/schema.json", publicAPIProjectAlias, publicAPIModelKey). + Expect(). + Status(http.StatusOK). + JSON(). + IsEqual(map[string]any{ + "$id": publicAPIModelID, + "properties": map[string]any{ + "asset": map[string]any{ + "title": "asset", + "type": "string", + "format": "binary", + }, + "asset2": map[string]any{ + "title": "asset2", + "type": "string", + "format": "binary", + }, + "geometry-editor": map[string]any{ + "title": "geometry-editor", + "type": "object", + }, + "geometry-object": map[string]any{ + "title": "geometry-object", + "type": "object", + }, + "test-field-1": map[string]any{ + "title": "test-field-1", + "type": "string", + }, + "test-field-2": map[string]any{ + "title": "test-field-2", + "type": "string", + }, + }, + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + }) + // make the project private prj.Publication().SetScope(project.PublicationScopePrivate) lo.Must0(repos.Project.Save(ctx, prj)) diff --git a/server/internal/adapter/integration/schema_export.go b/server/internal/adapter/integration/schema_export.go new file mode 100644 index 0000000000..b27c509b40 --- /dev/null +++ b/server/internal/adapter/integration/schema_export.go @@ -0,0 +1,243 @@ +package integration + +import ( + "context" + "errors" + + "github.com/reearth/reearth-cms/server/internal/adapter" + "github.com/reearth/reearth-cms/server/pkg/exporters" + "github.com/reearth/reearth-cms/server/pkg/integrationapi" + "github.com/reearth/reearthx/rerror" + "github.com/samber/lo" +) + +func (s *Server) SchemaByModelAsJSON(ctx context.Context, request SchemaByModelAsJSONRequestObject) (SchemaByModelAsJSONResponseObject, error) { + op := adapter.Operator(ctx) + uc := adapter.Usecases(ctx) + + m, err := uc.Model.FindByID(ctx, request.ModelId, op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return SchemaByModelAsJSON404Response{}, err + } + return SchemaByModelAsJSON400Response{}, err + } + + sp, err := uc.Schema.FindByModel(ctx, m.ID(), op) + if err != nil { + return SchemaByModelAsJSON404Response{}, err + } + + gsMap := exporters.BuildGroupSchemaMap(sp) + res := exporters.NewSchemaJSON(m.ID().Ref().StringRef(), lo.ToPtr(m.Name()), lo.ToPtr(m.Description()), exporters.BuildProperties(sp.Schema().Fields(), gsMap)) + return SchemaByModelAsJSON200JSONResponse{ + Schema: res.Schema, + Id: res.Id, + Title: res.Title, + Description: res.Description, + Type: res.Type, + Properties: toSchemaJSONProperties(res.Properties), + }, nil +} + +func (s *Server) MetadataSchemaByModelAsJSON(ctx context.Context, request MetadataSchemaByModelAsJSONRequestObject) (MetadataSchemaByModelAsJSONResponseObject, error) { + op := adapter.Operator(ctx) + uc := adapter.Usecases(ctx) + + m, err := uc.Model.FindByID(ctx, request.ModelId, op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return MetadataSchemaByModelAsJSON404Response{}, err + } + return MetadataSchemaByModelAsJSON400Response{}, err + } + + sp, err := uc.Schema.FindByModel(ctx, request.ModelId, op) + if err != nil { + return MetadataSchemaByModelAsJSON404Response{}, err + } + + res := exporters.NewSchemaJSON(m.ID().Ref().StringRef(), lo.ToPtr(m.Name()), lo.ToPtr(m.Description()), exporters.BuildProperties(sp.MetaSchema().Fields(), nil)) + return MetadataSchemaByModelAsJSON200JSONResponse{ + Schema: res.Schema, + Id: res.Id, + Title: res.Title, + Description: res.Description, + Type: res.Type, + Properties: toSchemaJSONProperties(res.Properties), + }, nil +} + +func (s *Server) SchemaByModelWithProjectAsJSON(ctx context.Context, request SchemaByModelWithProjectAsJSONRequestObject) (SchemaByModelWithProjectAsJSONResponseObject, error) { + uc := adapter.Usecases(ctx) + op := adapter.Operator(ctx) + + p, err := uc.Project.FindByIDOrAlias(ctx, request.ProjectIdOrAlias, op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return SchemaByModelWithProjectAsJSON404Response{}, err + } + return SchemaByModelWithProjectAsJSON400Response{}, err + } + + m, err := uc.Model.FindByIDOrKey(ctx, p.ID(), request.ModelIdOrKey, op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return SchemaByModelWithProjectAsJSON404Response{}, err + } + return SchemaByModelWithProjectAsJSON400Response{}, err + } + + sp, err := uc.Schema.FindByModel(ctx, m.ID(), op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return SchemaByModelWithProjectAsJSON404Response{}, err + } + return SchemaByModelWithProjectAsJSON400Response{}, err + } + + gsMap := exporters.BuildGroupSchemaMap(sp) + res := exporters.NewSchemaJSON(m.ID().Ref().StringRef(), lo.ToPtr(m.Name()), lo.ToPtr(m.Description()), exporters.BuildProperties(sp.Schema().Fields(), gsMap)) + return SchemaByModelWithProjectAsJSON200JSONResponse{ + Schema: res.Schema, + Id: res.Id, + Title: res.Title, + Description: res.Description, + Type: res.Type, + Properties: toSchemaJSONProperties(res.Properties), + }, nil +} + +func (s *Server) MetadataSchemaByModelWithProjectAsJSON(ctx context.Context, request MetadataSchemaByModelWithProjectAsJSONRequestObject) (MetadataSchemaByModelWithProjectAsJSONResponseObject, error) { + uc := adapter.Usecases(ctx) + op := adapter.Operator(ctx) + + p, err := uc.Project.FindByIDOrAlias(ctx, request.ProjectIdOrAlias, op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return MetadataSchemaByModelWithProjectAsJSON404Response{}, err + } + return MetadataSchemaByModelWithProjectAsJSON400Response{}, err + } + + m, err := uc.Model.FindByIDOrKey(ctx, p.ID(), request.ModelIdOrKey, op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return MetadataSchemaByModelWithProjectAsJSON404Response{}, err + } + return MetadataSchemaByModelWithProjectAsJSON400Response{}, err + } + + sch, err := uc.Schema.FindByModel(ctx, m.ID(), op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return MetadataSchemaByModelWithProjectAsJSON404Response{}, err + } + return MetadataSchemaByModelWithProjectAsJSON400Response{}, err + } + + res := exporters.NewSchemaJSON(m.ID().Ref().StringRef(), lo.ToPtr(m.Name()), lo.ToPtr(m.Description()), exporters.BuildProperties(sch.MetaSchema().Fields(), nil)) + return MetadataSchemaByModelWithProjectAsJSON200JSONResponse{ + Schema: res.Schema, + Id: res.Id, + Title: res.Title, + Description: res.Description, + Type: res.Type, + Properties: toSchemaJSONProperties(res.Properties), + }, nil +} + +func (s *Server) SchemaByIDAsJSON(ctx context.Context, request SchemaByIDAsJSONRequestObject) (SchemaByIDAsJSONResponseObject, error) { + uc := adapter.Usecases(ctx) + op := adapter.Operator(ctx) + + m, err := uc.Model.FindBySchema(ctx, request.SchemaId, op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return SchemaByIDAsJSON404Response{}, err + } + return SchemaByIDAsJSON400Response{}, err + } + + sp, err := uc.Schema.FindByModel(ctx, m.ID(), op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return SchemaByIDAsJSON404Response{}, err + } + return SchemaByIDAsJSON400Response{}, err + } + + gsMap := exporters.BuildGroupSchemaMap(sp) + res := exporters.NewSchemaJSON(sp.Schema().ID().Ref().StringRef(), nil, nil, exporters.BuildProperties(sp.Schema().Fields(), gsMap)) + return SchemaByIDAsJSON200JSONResponse{ + Schema: res.Schema, + Id: res.Id, + Type: res.Type, + Properties: toSchemaJSONProperties(res.Properties), + }, nil +} + +func (s *Server) SchemaByIDWithProjectAsJSON(ctx context.Context, request SchemaByIDWithProjectAsJSONRequestObject) (SchemaByIDWithProjectAsJSONResponseObject, error) { + uc := adapter.Usecases(ctx) + op := adapter.Operator(ctx) + + _, err := uc.Project.FindByIDOrAlias(ctx, request.ProjectIdOrAlias, op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return SchemaByIDWithProjectAsJSON404Response{}, err + } + return SchemaByIDWithProjectAsJSON400Response{}, err + } + + m, err := uc.Model.FindBySchema(ctx, request.SchemaId, op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return SchemaByIDWithProjectAsJSON404Response{}, err + } + return SchemaByIDWithProjectAsJSON400Response{}, err + } + + sp, err := uc.Schema.FindByModel(ctx, m.ID(), op) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return SchemaByIDWithProjectAsJSON404Response{}, err + } + return SchemaByIDWithProjectAsJSON400Response{}, err + } + + gsMap := exporters.BuildGroupSchemaMap(sp) + res := exporters.NewSchemaJSON(sp.Schema().ID().Ref().StringRef(), nil, nil, exporters.BuildProperties(sp.Schema().Fields(), gsMap)) + return SchemaByIDWithProjectAsJSON200JSONResponse{ + Schema: res.Schema, + Id: res.Id, + Type: res.Type, + Properties: toSchemaJSONProperties(res.Properties), + }, nil +} + +func toSchemaJSONProperties(pp map[string]exporters.SchemaJSONProperties) map[string]integrationapi.SchemaJSONProperties { + res := map[string]integrationapi.SchemaJSONProperties{} + for k, v := range pp { + res[k] = integrationapi.SchemaJSONProperties{ + Type: v.Type, + Title: v.Title, + Description: v.Description, + Format: v.Format, + Minimum: v.Minimum, + Maximum: v.Maximum, + MaxLength: v.MaxLength, + Items: toSchemaJSONItems(v.Items), + } + } + return res +} + +func toSchemaJSONItems(pp *exporters.SchemaJSON) *integrationapi.SchemaJSON { + if pp == nil { + return nil + } + return &integrationapi.SchemaJSON{ + Type: pp.Type, + Properties: toSchemaJSONProperties(pp.Properties), + } +} diff --git a/server/internal/adapter/integration/server.gen.go b/server/internal/adapter/integration/server.gen.go index 78d98f6d97..3f010f7d53 100644 --- a/server/internal/adapter/integration/server.gen.go +++ b/server/internal/adapter/integration/server.gen.go @@ -90,6 +90,12 @@ type ServerInterface interface { // Returns a GeoJSON that has a list of items as features. // (GET /models/{modelId}/items.geojson) ItemsAsGeoJSON(ctx echo.Context, modelId ModelIdParam, params ItemsAsGeoJSONParams) error + // Returns a metadata schema as json by model ID + // (GET /models/{modelId}/metadata_schema.json) + MetadataSchemaByModelAsJSON(ctx echo.Context, modelId ModelIdParam) error + // Returns a schema as json by model ID + // (GET /models/{modelId}/schema.json) + SchemaByModelAsJSON(ctx echo.Context, modelId ModelIdParam) error // Returns a list of models. // (GET /projects/{projectIdOrAlias}/models) ModelFilter(ctx echo.Context, projectIdOrAlias ProjectIdOrAliasParam, params ModelFilterParams) error @@ -126,9 +132,18 @@ type ServerInterface interface { // Returns a GeoJSON that has a list of items as features. // (GET /projects/{projectIdOrAlias}/models/{modelIdOrKey}/items.geojson) ItemsWithProjectAsGeoJSON(ctx echo.Context, projectIdOrAlias ProjectIdOrAliasParam, modelIdOrKey ModelIdOrKeyParam, params ItemsWithProjectAsGeoJSONParams) error + // Returns a metadata schema as json by project and model ID + // (GET /projects/{projectIdOrAlias}/models/{modelIdOrKey}/metadata_schema.json) + MetadataSchemaByModelWithProjectAsJSON(ctx echo.Context, projectIdOrAlias ProjectIdOrAliasParam, modelIdOrKey ModelIdOrKeyParam) error + // Returns a schema as json by project and model ID + // (GET /projects/{projectIdOrAlias}/models/{modelIdOrKey}/schema.json) + SchemaByModelWithProjectAsJSON(ctx echo.Context, projectIdOrAlias ProjectIdOrAliasParam, modelIdOrKey ModelIdOrKeyParam) error // Returns a schema. // (GET /projects/{projectIdOrAlias}/schemata) SchemaFilter(ctx echo.Context, projectIdOrAlias ProjectIdOrAliasParam, params SchemaFilterParams) error + // Returns a schema as json by project and schema ID + // (GET /projects/{projectIdOrAlias}/schemata/{schemaId}/schema.json) + SchemaByIDWithProjectAsJSON(ctx echo.Context, projectIdOrAlias ProjectIdOrAliasParam, schemaId SchemaIdParam) error // Returns a list of assets. // (GET /projects/{projectId}/assets) AssetFilter(ctx echo.Context, projectId ProjectIdParam, params AssetFilterParams) error @@ -147,6 +162,9 @@ type ServerInterface interface { // update a field // (PATCH /schemata/{schemaId}/fields/{fieldIdOrKey}) FieldUpdate(ctx echo.Context, schemaId SchemaIdParam, fieldIdOrKey FieldIdOrKeyParam) error + // Returns a schema as json by schema ID + // (GET /schemata/{schemaId}/schema.json) + SchemaByIDAsJSON(ctx echo.Context, schemaId SchemaIdParam) error // Returns a list of projects. // (GET /{workspaceId}/projects) ProjectFilter(ctx echo.Context, workspaceId WorkspaceIdParam, params ProjectFilterParams) error @@ -676,6 +694,42 @@ func (w *ServerInterfaceWrapper) ItemsAsGeoJSON(ctx echo.Context) error { return err } +// MetadataSchemaByModelAsJSON converts echo context to params. +func (w *ServerInterfaceWrapper) MetadataSchemaByModelAsJSON(ctx echo.Context) error { + var err error + // ------------- Path parameter "modelId" ------------- + var modelId ModelIdParam + + err = runtime.BindStyledParameterWithOptions("simple", "modelId", ctx.Param("modelId"), &modelId, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter modelId: %s", err)) + } + + ctx.Set(BearerAuthScopes, []string{}) + + // Invoke the callback with all the unmarshaled arguments + err = w.Handler.MetadataSchemaByModelAsJSON(ctx, modelId) + return err +} + +// SchemaByModelAsJSON converts echo context to params. +func (w *ServerInterfaceWrapper) SchemaByModelAsJSON(ctx echo.Context) error { + var err error + // ------------- Path parameter "modelId" ------------- + var modelId ModelIdParam + + err = runtime.BindStyledParameterWithOptions("simple", "modelId", ctx.Param("modelId"), &modelId, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter modelId: %s", err)) + } + + ctx.Set(BearerAuthScopes, []string{}) + + // Invoke the callback with all the unmarshaled arguments + err = w.Handler.SchemaByModelAsJSON(ctx, modelId) + return err +} + // ModelFilter converts echo context to params. func (w *ServerInterfaceWrapper) ModelFilter(ctx echo.Context) error { var err error @@ -1110,6 +1164,58 @@ func (w *ServerInterfaceWrapper) ItemsWithProjectAsGeoJSON(ctx echo.Context) err return err } +// MetadataSchemaByModelWithProjectAsJSON converts echo context to params. +func (w *ServerInterfaceWrapper) MetadataSchemaByModelWithProjectAsJSON(ctx echo.Context) error { + var err error + // ------------- Path parameter "projectIdOrAlias" ------------- + var projectIdOrAlias ProjectIdOrAliasParam + + err = runtime.BindStyledParameterWithOptions("simple", "projectIdOrAlias", ctx.Param("projectIdOrAlias"), &projectIdOrAlias, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter projectIdOrAlias: %s", err)) + } + + // ------------- Path parameter "modelIdOrKey" ------------- + var modelIdOrKey ModelIdOrKeyParam + + err = runtime.BindStyledParameterWithOptions("simple", "modelIdOrKey", ctx.Param("modelIdOrKey"), &modelIdOrKey, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter modelIdOrKey: %s", err)) + } + + ctx.Set(BearerAuthScopes, []string{}) + + // Invoke the callback with all the unmarshaled arguments + err = w.Handler.MetadataSchemaByModelWithProjectAsJSON(ctx, projectIdOrAlias, modelIdOrKey) + return err +} + +// SchemaByModelWithProjectAsJSON converts echo context to params. +func (w *ServerInterfaceWrapper) SchemaByModelWithProjectAsJSON(ctx echo.Context) error { + var err error + // ------------- Path parameter "projectIdOrAlias" ------------- + var projectIdOrAlias ProjectIdOrAliasParam + + err = runtime.BindStyledParameterWithOptions("simple", "projectIdOrAlias", ctx.Param("projectIdOrAlias"), &projectIdOrAlias, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter projectIdOrAlias: %s", err)) + } + + // ------------- Path parameter "modelIdOrKey" ------------- + var modelIdOrKey ModelIdOrKeyParam + + err = runtime.BindStyledParameterWithOptions("simple", "modelIdOrKey", ctx.Param("modelIdOrKey"), &modelIdOrKey, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter modelIdOrKey: %s", err)) + } + + ctx.Set(BearerAuthScopes, []string{}) + + // Invoke the callback with all the unmarshaled arguments + err = w.Handler.SchemaByModelWithProjectAsJSON(ctx, projectIdOrAlias, modelIdOrKey) + return err +} + // SchemaFilter converts echo context to params. func (w *ServerInterfaceWrapper) SchemaFilter(ctx echo.Context) error { var err error @@ -1151,6 +1257,32 @@ func (w *ServerInterfaceWrapper) SchemaFilter(ctx echo.Context) error { return err } +// SchemaByIDWithProjectAsJSON converts echo context to params. +func (w *ServerInterfaceWrapper) SchemaByIDWithProjectAsJSON(ctx echo.Context) error { + var err error + // ------------- Path parameter "projectIdOrAlias" ------------- + var projectIdOrAlias ProjectIdOrAliasParam + + err = runtime.BindStyledParameterWithOptions("simple", "projectIdOrAlias", ctx.Param("projectIdOrAlias"), &projectIdOrAlias, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter projectIdOrAlias: %s", err)) + } + + // ------------- Path parameter "schemaId" ------------- + var schemaId SchemaIdParam + + err = runtime.BindStyledParameterWithOptions("simple", "schemaId", ctx.Param("schemaId"), &schemaId, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter schemaId: %s", err)) + } + + ctx.Set(BearerAuthScopes, []string{}) + + // Invoke the callback with all the unmarshaled arguments + err = w.Handler.SchemaByIDWithProjectAsJSON(ctx, projectIdOrAlias, schemaId) + return err +} + // AssetFilter converts echo context to params. func (w *ServerInterfaceWrapper) AssetFilter(ctx echo.Context) error { var err error @@ -1312,6 +1444,24 @@ func (w *ServerInterfaceWrapper) FieldUpdate(ctx echo.Context) error { return err } +// SchemaByIDAsJSON converts echo context to params. +func (w *ServerInterfaceWrapper) SchemaByIDAsJSON(ctx echo.Context) error { + var err error + // ------------- Path parameter "schemaId" ------------- + var schemaId SchemaIdParam + + err = runtime.BindStyledParameterWithOptions("simple", "schemaId", ctx.Param("schemaId"), &schemaId, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter schemaId: %s", err)) + } + + ctx.Set(BearerAuthScopes, []string{}) + + // Invoke the callback with all the unmarshaled arguments + err = w.Handler.SchemaByIDAsJSON(ctx, schemaId) + return err +} + // ProjectFilter converts echo context to params. func (w *ServerInterfaceWrapper) ProjectFilter(ctx echo.Context) error { var err error @@ -1395,6 +1545,8 @@ func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL router.POST(baseURL+"/models/:modelId/items", wrapper.ItemCreate) router.GET(baseURL+"/models/:modelId/items.csv", wrapper.ItemsAsCSV) router.GET(baseURL+"/models/:modelId/items.geojson", wrapper.ItemsAsGeoJSON) + router.GET(baseURL+"/models/:modelId/metadata_schema.json", wrapper.MetadataSchemaByModelAsJSON) + router.GET(baseURL+"/models/:modelId/schema.json", wrapper.SchemaByModelAsJSON) router.GET(baseURL+"/projects/:projectIdOrAlias/models", wrapper.ModelFilter) router.POST(baseURL+"/projects/:projectIdOrAlias/models", wrapper.ModelCreate) router.DELETE(baseURL+"/projects/:projectIdOrAlias/models/:modelIdOrKey", wrapper.ModelDeleteWithProject) @@ -1407,13 +1559,17 @@ func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL router.POST(baseURL+"/projects/:projectIdOrAlias/models/:modelIdOrKey/items", wrapper.ItemCreateWithProject) router.GET(baseURL+"/projects/:projectIdOrAlias/models/:modelIdOrKey/items.csv", wrapper.ItemsWithProjectAsCSV) router.GET(baseURL+"/projects/:projectIdOrAlias/models/:modelIdOrKey/items.geojson", wrapper.ItemsWithProjectAsGeoJSON) + router.GET(baseURL+"/projects/:projectIdOrAlias/models/:modelIdOrKey/metadata_schema.json", wrapper.MetadataSchemaByModelWithProjectAsJSON) + router.GET(baseURL+"/projects/:projectIdOrAlias/models/:modelIdOrKey/schema.json", wrapper.SchemaByModelWithProjectAsJSON) router.GET(baseURL+"/projects/:projectIdOrAlias/schemata", wrapper.SchemaFilter) + router.GET(baseURL+"/projects/:projectIdOrAlias/schemata/:schemaId/schema.json", wrapper.SchemaByIDWithProjectAsJSON) router.GET(baseURL+"/projects/:projectId/assets", wrapper.AssetFilter) router.POST(baseURL+"/projects/:projectId/assets", wrapper.AssetCreate) router.POST(baseURL+"/projects/:projectId/assets/uploads", wrapper.AssetUploadCreate) router.POST(baseURL+"/schemata/:schemaId/fields", wrapper.FieldCreate) router.DELETE(baseURL+"/schemata/:schemaId/fields/:fieldIdOrKey", wrapper.FieldDelete) router.PATCH(baseURL+"/schemata/:schemaId/fields/:fieldIdOrKey", wrapper.FieldUpdate) + router.GET(baseURL+"/schemata/:schemaId/schema.json", wrapper.SchemaByIDAsJSON) router.GET(baseURL+"/:workspaceId/projects", wrapper.ProjectFilter) } @@ -2342,6 +2498,102 @@ func (response ItemsAsGeoJSON500Response) VisitItemsAsGeoJSONResponse(w http.Res return nil } +type MetadataSchemaByModelAsJSONRequestObject struct { + ModelId ModelIdParam `json:"modelId"` +} + +type MetadataSchemaByModelAsJSONResponseObject interface { + VisitMetadataSchemaByModelAsJSONResponse(w http.ResponseWriter) error +} + +type MetadataSchemaByModelAsJSON200JSONResponse SchemaJSON + +func (response MetadataSchemaByModelAsJSON200JSONResponse) VisitMetadataSchemaByModelAsJSONResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type MetadataSchemaByModelAsJSON400Response struct { +} + +func (response MetadataSchemaByModelAsJSON400Response) VisitMetadataSchemaByModelAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(400) + return nil +} + +type MetadataSchemaByModelAsJSON401Response = UnauthorizedErrorResponse + +func (response MetadataSchemaByModelAsJSON401Response) VisitMetadataSchemaByModelAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(401) + return nil +} + +type MetadataSchemaByModelAsJSON404Response struct { +} + +func (response MetadataSchemaByModelAsJSON404Response) VisitMetadataSchemaByModelAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(404) + return nil +} + +type MetadataSchemaByModelAsJSON500Response struct { +} + +func (response MetadataSchemaByModelAsJSON500Response) VisitMetadataSchemaByModelAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(500) + return nil +} + +type SchemaByModelAsJSONRequestObject struct { + ModelId ModelIdParam `json:"modelId"` +} + +type SchemaByModelAsJSONResponseObject interface { + VisitSchemaByModelAsJSONResponse(w http.ResponseWriter) error +} + +type SchemaByModelAsJSON200JSONResponse SchemaJSON + +func (response SchemaByModelAsJSON200JSONResponse) VisitSchemaByModelAsJSONResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type SchemaByModelAsJSON400Response struct { +} + +func (response SchemaByModelAsJSON400Response) VisitSchemaByModelAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(400) + return nil +} + +type SchemaByModelAsJSON401Response = UnauthorizedErrorResponse + +func (response SchemaByModelAsJSON401Response) VisitSchemaByModelAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(401) + return nil +} + +type SchemaByModelAsJSON404Response struct { +} + +func (response SchemaByModelAsJSON404Response) VisitSchemaByModelAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(404) + return nil +} + +type SchemaByModelAsJSON500Response struct { +} + +func (response SchemaByModelAsJSON500Response) VisitSchemaByModelAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(500) + return nil +} + type ModelFilterRequestObject struct { ProjectIdOrAlias ProjectIdOrAliasParam `json:"projectIdOrAlias"` Params ModelFilterParams @@ -2900,6 +3152,104 @@ func (response ItemsWithProjectAsGeoJSON500Response) VisitItemsWithProjectAsGeoJ return nil } +type MetadataSchemaByModelWithProjectAsJSONRequestObject struct { + ProjectIdOrAlias ProjectIdOrAliasParam `json:"projectIdOrAlias"` + ModelIdOrKey ModelIdOrKeyParam `json:"modelIdOrKey"` +} + +type MetadataSchemaByModelWithProjectAsJSONResponseObject interface { + VisitMetadataSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error +} + +type MetadataSchemaByModelWithProjectAsJSON200JSONResponse SchemaJSON + +func (response MetadataSchemaByModelWithProjectAsJSON200JSONResponse) VisitMetadataSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type MetadataSchemaByModelWithProjectAsJSON400Response struct { +} + +func (response MetadataSchemaByModelWithProjectAsJSON400Response) VisitMetadataSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(400) + return nil +} + +type MetadataSchemaByModelWithProjectAsJSON401Response = UnauthorizedErrorResponse + +func (response MetadataSchemaByModelWithProjectAsJSON401Response) VisitMetadataSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(401) + return nil +} + +type MetadataSchemaByModelWithProjectAsJSON404Response struct { +} + +func (response MetadataSchemaByModelWithProjectAsJSON404Response) VisitMetadataSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(404) + return nil +} + +type MetadataSchemaByModelWithProjectAsJSON500Response struct { +} + +func (response MetadataSchemaByModelWithProjectAsJSON500Response) VisitMetadataSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(500) + return nil +} + +type SchemaByModelWithProjectAsJSONRequestObject struct { + ProjectIdOrAlias ProjectIdOrAliasParam `json:"projectIdOrAlias"` + ModelIdOrKey ModelIdOrKeyParam `json:"modelIdOrKey"` +} + +type SchemaByModelWithProjectAsJSONResponseObject interface { + VisitSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error +} + +type SchemaByModelWithProjectAsJSON200JSONResponse SchemaJSON + +func (response SchemaByModelWithProjectAsJSON200JSONResponse) VisitSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type SchemaByModelWithProjectAsJSON400Response struct { +} + +func (response SchemaByModelWithProjectAsJSON400Response) VisitSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(400) + return nil +} + +type SchemaByModelWithProjectAsJSON401Response = UnauthorizedErrorResponse + +func (response SchemaByModelWithProjectAsJSON401Response) VisitSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(401) + return nil +} + +type SchemaByModelWithProjectAsJSON404Response struct { +} + +func (response SchemaByModelWithProjectAsJSON404Response) VisitSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(404) + return nil +} + +type SchemaByModelWithProjectAsJSON500Response struct { +} + +func (response SchemaByModelWithProjectAsJSON500Response) VisitSchemaByModelWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(500) + return nil +} + type SchemaFilterRequestObject struct { ProjectIdOrAlias ProjectIdOrAliasParam `json:"projectIdOrAlias"` Params SchemaFilterParams @@ -2954,6 +3304,55 @@ func (response SchemaFilter500Response) VisitSchemaFilterResponse(w http.Respons return nil } +type SchemaByIDWithProjectAsJSONRequestObject struct { + ProjectIdOrAlias ProjectIdOrAliasParam `json:"projectIdOrAlias"` + SchemaId SchemaIdParam `json:"schemaId"` +} + +type SchemaByIDWithProjectAsJSONResponseObject interface { + VisitSchemaByIDWithProjectAsJSONResponse(w http.ResponseWriter) error +} + +type SchemaByIDWithProjectAsJSON200JSONResponse SchemaJSON + +func (response SchemaByIDWithProjectAsJSON200JSONResponse) VisitSchemaByIDWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type SchemaByIDWithProjectAsJSON400Response struct { +} + +func (response SchemaByIDWithProjectAsJSON400Response) VisitSchemaByIDWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(400) + return nil +} + +type SchemaByIDWithProjectAsJSON401Response = UnauthorizedErrorResponse + +func (response SchemaByIDWithProjectAsJSON401Response) VisitSchemaByIDWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(401) + return nil +} + +type SchemaByIDWithProjectAsJSON404Response struct { +} + +func (response SchemaByIDWithProjectAsJSON404Response) VisitSchemaByIDWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(404) + return nil +} + +type SchemaByIDWithProjectAsJSON500Response struct { +} + +func (response SchemaByIDWithProjectAsJSON500Response) VisitSchemaByIDWithProjectAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(500) + return nil +} + type AssetFilterRequestObject struct { ProjectId ProjectIdParam `json:"projectId"` Params AssetFilterParams @@ -3191,6 +3590,54 @@ func (response FieldUpdate401Response) VisitFieldUpdateResponse(w http.ResponseW return nil } +type SchemaByIDAsJSONRequestObject struct { + SchemaId SchemaIdParam `json:"schemaId"` +} + +type SchemaByIDAsJSONResponseObject interface { + VisitSchemaByIDAsJSONResponse(w http.ResponseWriter) error +} + +type SchemaByIDAsJSON200JSONResponse SchemaJSON + +func (response SchemaByIDAsJSON200JSONResponse) VisitSchemaByIDAsJSONResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type SchemaByIDAsJSON400Response struct { +} + +func (response SchemaByIDAsJSON400Response) VisitSchemaByIDAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(400) + return nil +} + +type SchemaByIDAsJSON401Response = UnauthorizedErrorResponse + +func (response SchemaByIDAsJSON401Response) VisitSchemaByIDAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(401) + return nil +} + +type SchemaByIDAsJSON404Response struct { +} + +func (response SchemaByIDAsJSON404Response) VisitSchemaByIDAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(404) + return nil +} + +type SchemaByIDAsJSON500Response struct { +} + +func (response SchemaByIDAsJSON500Response) VisitSchemaByIDAsJSONResponse(w http.ResponseWriter) error { + w.WriteHeader(500) + return nil +} + type ProjectFilterRequestObject struct { WorkspaceId WorkspaceIdParam `json:"workspaceId"` Params ProjectFilterParams @@ -3309,6 +3756,12 @@ type StrictServerInterface interface { // Returns a GeoJSON that has a list of items as features. // (GET /models/{modelId}/items.geojson) ItemsAsGeoJSON(ctx context.Context, request ItemsAsGeoJSONRequestObject) (ItemsAsGeoJSONResponseObject, error) + // Returns a metadata schema as json by model ID + // (GET /models/{modelId}/metadata_schema.json) + MetadataSchemaByModelAsJSON(ctx context.Context, request MetadataSchemaByModelAsJSONRequestObject) (MetadataSchemaByModelAsJSONResponseObject, error) + // Returns a schema as json by model ID + // (GET /models/{modelId}/schema.json) + SchemaByModelAsJSON(ctx context.Context, request SchemaByModelAsJSONRequestObject) (SchemaByModelAsJSONResponseObject, error) // Returns a list of models. // (GET /projects/{projectIdOrAlias}/models) ModelFilter(ctx context.Context, request ModelFilterRequestObject) (ModelFilterResponseObject, error) @@ -3345,9 +3798,18 @@ type StrictServerInterface interface { // Returns a GeoJSON that has a list of items as features. // (GET /projects/{projectIdOrAlias}/models/{modelIdOrKey}/items.geojson) ItemsWithProjectAsGeoJSON(ctx context.Context, request ItemsWithProjectAsGeoJSONRequestObject) (ItemsWithProjectAsGeoJSONResponseObject, error) + // Returns a metadata schema as json by project and model ID + // (GET /projects/{projectIdOrAlias}/models/{modelIdOrKey}/metadata_schema.json) + MetadataSchemaByModelWithProjectAsJSON(ctx context.Context, request MetadataSchemaByModelWithProjectAsJSONRequestObject) (MetadataSchemaByModelWithProjectAsJSONResponseObject, error) + // Returns a schema as json by project and model ID + // (GET /projects/{projectIdOrAlias}/models/{modelIdOrKey}/schema.json) + SchemaByModelWithProjectAsJSON(ctx context.Context, request SchemaByModelWithProjectAsJSONRequestObject) (SchemaByModelWithProjectAsJSONResponseObject, error) // Returns a schema. // (GET /projects/{projectIdOrAlias}/schemata) SchemaFilter(ctx context.Context, request SchemaFilterRequestObject) (SchemaFilterResponseObject, error) + // Returns a schema as json by project and schema ID + // (GET /projects/{projectIdOrAlias}/schemata/{schemaId}/schema.json) + SchemaByIDWithProjectAsJSON(ctx context.Context, request SchemaByIDWithProjectAsJSONRequestObject) (SchemaByIDWithProjectAsJSONResponseObject, error) // Returns a list of assets. // (GET /projects/{projectId}/assets) AssetFilter(ctx context.Context, request AssetFilterRequestObject) (AssetFilterResponseObject, error) @@ -3366,6 +3828,9 @@ type StrictServerInterface interface { // update a field // (PATCH /schemata/{schemaId}/fields/{fieldIdOrKey}) FieldUpdate(ctx context.Context, request FieldUpdateRequestObject) (FieldUpdateResponseObject, error) + // Returns a schema as json by schema ID + // (GET /schemata/{schemaId}/schema.json) + SchemaByIDAsJSON(ctx context.Context, request SchemaByIDAsJSONRequestObject) (SchemaByIDAsJSONResponseObject, error) // Returns a list of projects. // (GET /{workspaceId}/projects) ProjectFilter(ctx context.Context, request ProjectFilterRequestObject) (ProjectFilterResponseObject, error) @@ -3978,6 +4443,56 @@ func (sh *strictHandler) ItemsAsGeoJSON(ctx echo.Context, modelId ModelIdParam, return nil } +// MetadataSchemaByModelAsJSON operation middleware +func (sh *strictHandler) MetadataSchemaByModelAsJSON(ctx echo.Context, modelId ModelIdParam) error { + var request MetadataSchemaByModelAsJSONRequestObject + + request.ModelId = modelId + + handler := func(ctx echo.Context, request interface{}) (interface{}, error) { + return sh.ssi.MetadataSchemaByModelAsJSON(ctx.Request().Context(), request.(MetadataSchemaByModelAsJSONRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "MetadataSchemaByModelAsJSON") + } + + response, err := handler(ctx, request) + + if err != nil { + return err + } else if validResponse, ok := response.(MetadataSchemaByModelAsJSONResponseObject); ok { + return validResponse.VisitMetadataSchemaByModelAsJSONResponse(ctx.Response()) + } else if response != nil { + return fmt.Errorf("unexpected response type: %T", response) + } + return nil +} + +// SchemaByModelAsJSON operation middleware +func (sh *strictHandler) SchemaByModelAsJSON(ctx echo.Context, modelId ModelIdParam) error { + var request SchemaByModelAsJSONRequestObject + + request.ModelId = modelId + + handler := func(ctx echo.Context, request interface{}) (interface{}, error) { + return sh.ssi.SchemaByModelAsJSON(ctx.Request().Context(), request.(SchemaByModelAsJSONRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "SchemaByModelAsJSON") + } + + response, err := handler(ctx, request) + + if err != nil { + return err + } else if validResponse, ok := response.(SchemaByModelAsJSONResponseObject); ok { + return validResponse.VisitSchemaByModelAsJSONResponse(ctx.Response()) + } else if response != nil { + return fmt.Errorf("unexpected response type: %T", response) + } + return nil +} + // ModelFilter operation middleware func (sh *strictHandler) ModelFilter(ctx echo.Context, projectIdOrAlias ProjectIdOrAliasParam, params ModelFilterParams) error { var request ModelFilterRequestObject @@ -4325,6 +4840,58 @@ func (sh *strictHandler) ItemsWithProjectAsGeoJSON(ctx echo.Context, projectIdOr return nil } +// MetadataSchemaByModelWithProjectAsJSON operation middleware +func (sh *strictHandler) MetadataSchemaByModelWithProjectAsJSON(ctx echo.Context, projectIdOrAlias ProjectIdOrAliasParam, modelIdOrKey ModelIdOrKeyParam) error { + var request MetadataSchemaByModelWithProjectAsJSONRequestObject + + request.ProjectIdOrAlias = projectIdOrAlias + request.ModelIdOrKey = modelIdOrKey + + handler := func(ctx echo.Context, request interface{}) (interface{}, error) { + return sh.ssi.MetadataSchemaByModelWithProjectAsJSON(ctx.Request().Context(), request.(MetadataSchemaByModelWithProjectAsJSONRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "MetadataSchemaByModelWithProjectAsJSON") + } + + response, err := handler(ctx, request) + + if err != nil { + return err + } else if validResponse, ok := response.(MetadataSchemaByModelWithProjectAsJSONResponseObject); ok { + return validResponse.VisitMetadataSchemaByModelWithProjectAsJSONResponse(ctx.Response()) + } else if response != nil { + return fmt.Errorf("unexpected response type: %T", response) + } + return nil +} + +// SchemaByModelWithProjectAsJSON operation middleware +func (sh *strictHandler) SchemaByModelWithProjectAsJSON(ctx echo.Context, projectIdOrAlias ProjectIdOrAliasParam, modelIdOrKey ModelIdOrKeyParam) error { + var request SchemaByModelWithProjectAsJSONRequestObject + + request.ProjectIdOrAlias = projectIdOrAlias + request.ModelIdOrKey = modelIdOrKey + + handler := func(ctx echo.Context, request interface{}) (interface{}, error) { + return sh.ssi.SchemaByModelWithProjectAsJSON(ctx.Request().Context(), request.(SchemaByModelWithProjectAsJSONRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "SchemaByModelWithProjectAsJSON") + } + + response, err := handler(ctx, request) + + if err != nil { + return err + } else if validResponse, ok := response.(SchemaByModelWithProjectAsJSONResponseObject); ok { + return validResponse.VisitSchemaByModelWithProjectAsJSONResponse(ctx.Response()) + } else if response != nil { + return fmt.Errorf("unexpected response type: %T", response) + } + return nil +} + // SchemaFilter operation middleware func (sh *strictHandler) SchemaFilter(ctx echo.Context, projectIdOrAlias ProjectIdOrAliasParam, params SchemaFilterParams) error { var request SchemaFilterRequestObject @@ -4351,6 +4918,32 @@ func (sh *strictHandler) SchemaFilter(ctx echo.Context, projectIdOrAlias Project return nil } +// SchemaByIDWithProjectAsJSON operation middleware +func (sh *strictHandler) SchemaByIDWithProjectAsJSON(ctx echo.Context, projectIdOrAlias ProjectIdOrAliasParam, schemaId SchemaIdParam) error { + var request SchemaByIDWithProjectAsJSONRequestObject + + request.ProjectIdOrAlias = projectIdOrAlias + request.SchemaId = schemaId + + handler := func(ctx echo.Context, request interface{}) (interface{}, error) { + return sh.ssi.SchemaByIDWithProjectAsJSON(ctx.Request().Context(), request.(SchemaByIDWithProjectAsJSONRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "SchemaByIDWithProjectAsJSON") + } + + response, err := handler(ctx, request) + + if err != nil { + return err + } else if validResponse, ok := response.(SchemaByIDWithProjectAsJSONResponseObject); ok { + return validResponse.VisitSchemaByIDWithProjectAsJSONResponse(ctx.Response()) + } else if response != nil { + return fmt.Errorf("unexpected response type: %T", response) + } + return nil +} + // AssetFilter operation middleware func (sh *strictHandler) AssetFilter(ctx echo.Context, projectId ProjectIdParam, params AssetFilterParams) error { var request AssetFilterRequestObject @@ -4536,6 +5129,31 @@ func (sh *strictHandler) FieldUpdate(ctx echo.Context, schemaId SchemaIdParam, f return nil } +// SchemaByIDAsJSON operation middleware +func (sh *strictHandler) SchemaByIDAsJSON(ctx echo.Context, schemaId SchemaIdParam) error { + var request SchemaByIDAsJSONRequestObject + + request.SchemaId = schemaId + + handler := func(ctx echo.Context, request interface{}) (interface{}, error) { + return sh.ssi.SchemaByIDAsJSON(ctx.Request().Context(), request.(SchemaByIDAsJSONRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "SchemaByIDAsJSON") + } + + response, err := handler(ctx, request) + + if err != nil { + return err + } else if validResponse, ok := response.(SchemaByIDAsJSONResponseObject); ok { + return validResponse.VisitSchemaByIDAsJSONResponse(ctx.Response()) + } else if response != nil { + return fmt.Errorf("unexpected response type: %T", response) + } + return nil +} + // ProjectFilter operation middleware func (sh *strictHandler) ProjectFilter(ctx echo.Context, workspaceId WorkspaceIdParam, params ProjectFilterParams) error { var request ProjectFilterRequestObject @@ -4565,74 +5183,79 @@ func (sh *strictHandler) ProjectFilter(ctx echo.Context, workspaceId WorkspaceId // Base64 encoded, gzipped, json marshaled Swagger object var swaggerSpec = []string{ - "H4sIAAAAAAAC/+w9WW/buLp/RdC9j2qcOZ3zkrfcpCkyp22CcTrFRVEUjPTZ5kQmXZLKMoH/+wE3ibKo", - "zZaTOPFLG0sU9fHbN1KPYUznC0qACB4ePYYLxNAcBDD1C3EO4jy5lBfl7wR4zPBCYErCo/D8NKCTQMwg", - "4JBCLCAJ1ANhFGJ5f4HELIxCguYQHtm5wihk8CvDDJLwSLAMopDHM5gjOb94WMihXDBMpmEU3r+b0nfm", - "Ik4OjtUUp+FyGenpagAbLyDGEww8uJuBmAHTcAUJEihADAKYX0OSQBJgouBnwLNUcAv4rwzYwwrkoQvn", - "/zKYhEfh/4wK5I30XT5Soz+oF8hFSFhjOp8D6YVI84gflfl8myDzxEyi0TnBkCbnyQX7Dzw0QMmCG3iw", - "wKpnLArnNIGUB+b1XrDdd6wNuR51cKbmOtVzyQVgAfM+CJbj/WDqmTZB7bmcQeP1Bh7uKKuDy9wN8ol8", - "7GcGhfUAyBcp/PckoHrGEnDB6N8Q13CcO/vamFGTHLhEM9O2Uq03oJtQ77OaQpNvgaZQA91XDkkgqOEo", - "DRmaQg0Rza0CiAQmKEtFePRbFM4xwfNsrv62cBABU2AaCGCXg8Gh5/KD8u/DKJyjewPL4WE7ZJoUkjGO", - "U4x4I+MhOcJStJGIq9OuTU0zkeI5PVMJ6u7aohu4jXCucNmleUjzGYNJN/KigMFEYvMWWA2JpW3ykjdM", - "kQAuFwFE0vR7cWGRXac4Dn9EHs2iZ+qCLTWwZBD8CLMzbiKlYz2HRh+nTJxi1oLCBCaYgAKOsgRYkGAG", - "sRxkV8CALyjhEKSYiyi4w2kaXEOAp4QyaTMmzsOYB4SKYMGAAxGQ1FAjwayGGhJIhxZI/VIX/WSgTPRd", - "oG9ZNXDK6WsAjRkgAcmxyznutWyRmL+9gN9RdsMXKIY+Apc/5OcgZ87OQofimGZEJHSOMDn4ls8gWUiJ", - "oEaScny/UHFGM5J8YIyyKsBXCqm/MuASVgacZiyG4A5pnpjIR8NlFH4lKBMzyvA/UDfVcRwD54GgN0Ak", - "T80x55hMpYhjcotSnDhCqGA7AyQyBspbZ3QBTGAN9BToHAR7aPNQP9px0m1Kevgz0coLzQh6rXTj0hL/", - "MecSC6qXLypPm9EnNE21WFaXONFD1N/ST+Nta7UQFO9DjKGHBmCd13cD+yPQP8YXX3YG2JxHytDGlLIE", - "E2kR5E9K4GISHn1vhviSYiLnbR71OUsF7jb0EyYwNvB3mbXH+EuaPkwp6QqtGfxjGVnBwj1I6cpYGy01", - "ZqLQQVMUOgszd0pXLHz5U/anfXFvznCm77pIS1LpJp7rB/5VXe4q8F1nL5HWP6sGoDe4NXNpFHafzbJT", - "Zb4qWBPK5kgZfZpdp9KomWdINr+WzrRyvA0O37cg1AfpZggoXvd79aZOf1T0BWLxDN/Ch3vBkOKzsUAi", - "4y5jL4AkNq79uWB0yoBLZz6hRKJggnAKiYc9ozCmRAARV0ZSqvdz96OEXCTgncBzB7/FIxOcQhuC1Jiu", - "VjHPRlmvxAPngsEthrurFYnHcxOhyf9/8ls5+xSo/vfn++TnFU6Bm5/zW6kPlDv98710d2J+K70uckPo", - "HfGir4hI2pfhBCJRKKhA6Rj/466mYNHC0euM9Yyl/nxF4bN9l+iOSlGUfCpq9TEL3bWSc3MwjVI5k3QL", - "FcOlHGr4Tafbqlyu/Ld2RCKiZUUNXyV3xk2wJmDKBPLr5Jzph2L4TkxczgJWEBtTkmC/K4ZI0lnxFNN4", - "lM814jj2eE86W9guspAmYxU2UMWkcg4ktKttCQC/MpRKeSJUfNB/+whwi9JMEs6LimtK0xcFpb0jAQNE", - "KlJlQXNeZh/2ydBcGsFFCttdIyZxmiXAj8mDXuh56UJ+W4mteztNm5Fh+bDCYJthhWRpiq63jRWYL4TB", - "xwf1ZzeXzWjmrYI2VYqHXc2Q9C5T4Nz86dy4YIpdr6gzorjWhYetjdmMWBryzTUSzx3V7eFVKnuEiRH3", - "k+IXF4gJ/g2rdAeQxP5JqBi7tySv2LtdUFxje3uiWBmbrSLmGiaUSYOGJkKZTX3hgl0Qe9H8TSdXM8y/", - "AdzkPz5TopCjf/0/INaMmy6WdBOE+aRWTeDJ3jCaLTomYz7Ksdpj62TlTbUs1AUpr4Nhg9Im+qlVKuem", - "zViWKd3EL90hXw2bldeo/CJMySkS4Pz8qj2uOU3wBMfuCPeSGcV14GIpE4VzEEi9uKMetqHFSkJlhtOE", - "QfeI0kYfq+qoLRiqjz6QmHlvcL+H71ubTuNXF9ffHy3lPR/X9Ffz4lw9J6eIi8+KypB0h07SPEECjTsV", - "+U2GufJcJ54uSheNkeOaIZwp5Xj8w64dDMXi+HqL6h0n+hjPltyqgYcq5A0UJA3ClCX811J0jejZLXO0", - "BaAN1Q0PdgtOKCP3CosUzqyF6q6c10rIQJp0T17p/zVoHiWJ+zLpmgJWj8wzv10fykC78VlVsgsPxXe3", - "p3H3rbG47ZhgAfdCkhbuxTEDFEYhw/HsSl+dI3aT0DvpqcUziG+u6X0Y5c1UiTbHKqqOQl33szkSZZWN", - "UVK1cWBAVClQJ4a0qxSFApnEmcp4X1ybEr298CHB0vvwOoDAOKYEEulNDWLdenLzZCM+LgpymH82BshP", - "eWuezgYCz/bX+N0NZtsIq4nvLFP+Wo3fWbwgp3Zy3iu3Xaaof+LybK2grKGwDRQdVr70xQoc4oxh8aD0", - "lGbFa0AM2HGmPTm1WkVidbmYdibEQtexMZnQapn5T/iAmJi9O/k8Ds5VHlJ5wMHx5bmcRGr91lH54sLf", - "Dg4PDk0AR9ACh0fh+4PDg/eh9jkV4LoRko8eTePnUgOVglAqRAdPmBLJTKHKpZ/qmyul+H8dHuryZJ4c", - "RYtFajz40d9cY7vOnvXL5HuIsuojaAXGdSvFMgp/1+CtNDToyr3tEQjyrtpAR03qud/qWDpf/qjaP6Ce", - "/L36xi9F24Hko2w+R+xBNZlInOZ9uQJNuVTcasU81MVNUUOPj+qRjYjR2ii7+xiegmhCr9tQXVNML4aM", - "Sg3XqvZcEaORqVWYlo864pnE/ifd8TOgRLmv75hq07UVX2a2m7TZbuiXzhNGeytCu3r7+4/lDy/L5Aur", - "8E5xZ2MmisIF5S1scqIcHtNKBVz8H00eNuKRukqWn+blBq7lFpVOzoxVVtt9vtJuax/WalQwo8d8o0G7", - "8TaM9Gw2vLGQWSW2tYukjK3d5wHX4D+Feolax6/sflEKCYl41sxIXxfJm9dIGgfB8atjUrswh96takr5", - "GqNHvTunUR/JCPDZ9JCz96eHEsImaH0duofY3VWWojqUd6KN1eBUZIxw++BBXltzKaqjkX6qKt9W0UFN", - "ORsK5aq2Ju8riYoqVxy/ZHaIwn/7YRLACEoDDuwWWAB6vj7M42GCKvv0I7+7J7BkdrxqtpH7BjZHeU9l", - "n52kwyUZh80KPrcF3UtUo5VtEKiqXW3PLshnX0lyQb7hVeYWNMWdTeQlwm/i/FdUqje14PDIPrPwmjIL", - "3RmrQbV0zSs4XLR7aYUSnl6LZ/8kWmXIjILDQvuEgptQeF3sadYlqR2cdNNN+ryS0aMpaDcqItV09mwq", - "yD2PorMCMtvvn4Gy6yQM8sMCLMnUmrtkDPST1ZhNTbDlAqZBsSfeCP4YX3wJlCca0EmQcWABQXPgbzao", - "L+jkIXE/Y1E6NKZDWN/IIgNbhbZ2wrrmrpqmwec2JW0crqGSLL4T6qbKERVm9JmGEZ4vKBOrx6Otw6mZ", - "qLEu5/oVg+aXznvuYrUdS87mH6B/cLWtXL3Q10lnm+1UKuk/tb2LAgkYr57t4nYmC4YETB/Ke8U4sGLz", - "p/pDXfnRtkejOGvOrMl5gXeThm6uREyM5APvbD9dHXptw3/e43WNCVKnqVT7At8qUgdXVCuukz6T54Rm", - "JSc8P50qMittHiI9w4b7td2OTfsSCNydDdrabPoQa+Hs4hdWFLbWaYHZ+/Isbsm2nA6tS/VpjxlJgJWP", - "Fqr1Nf263xKwxQ9NMdd4VcHGHRazYIJTAZJdAkQSfSITJlN/feFMje1d4CoOheoQNpdOyeowvjj8rstg", - "95C6DuPXLc21jy4duahLeUOYVE3NHvtKfWI5sA4ctkNaHRt49Nh46F9+GmH7QHWWRK608rGH0SYKbB9T", - "1emcIQqmVYe1Pr0/bF5/X9lcr7L5oqRi/YpCTW3Sb40PYn7bwSKfjP8KxAyJYIaqBhrxwB7P5jfI/Jif", - "jP/qbZCfyGa2N6gIuBcjg6iC2VoDFh+L6XsBJgqlZoo3q3T7sFU5A10cIis5a2Pt3CAgU6BW0bQIiTm8", - "cDNBsScg7qywrK+h7dK9gmORW+yifJsi05fJyqagOGFzEJExUshHj6vHTC+NOPWI9vQDNbnlPKYb0OUv", - "IOzk1+Rp2b2v/0p8/YLjNi6k+E9v325ioDamUKsYOKjYV2N2IQJoTsi1q+vc+1HflVgp5JdXflqqN9eo", - "bT3oGxazy9xhe9E1/6viCxnJW1OQVYoO3kIwICfsuwleXDfB2kaw+sGdYXoRVtltbwh30RC+hL7/ljaH", - "3pZ1VCRpn1fGvA6kyvhqB3IbIvQij2V6WrErlak7CF+eVN8JL9Qe/mhlxVQQNpOV0aP70btG39QaN+fL", - "eknVUCioXoKHmp9Y1i2gtyt6s/6pQsCBj7+e02Npf6j6YciGdnu1pu25MXsd/Dp1cGYdloF18JP27pQZ", - "ft/Gs90d9vtGmH06oGsjTF54ff7sgIlcyqg4yVshzmvPH9heYLPvvHljnTdVdquTlg2s7tP06DjysG/X", - "2bfrvKx2nUFtxyai+KTdQCWR3DcG7RuDttYY5Ajo+g1CTyqkmjf01rIWSdRDqzI2NvH/vrNokODJfLJe", - "P/CGA6iC3ayIjS2v7kZfUe9tQTWyujTH4fbIGpnjynuljdSu2ze358uzM+uZUkDm7Pkd0147cTD++tkb", - "vbyD6kmwA+RvuuZgAgJ35rRdKcFMAalqcuabMPpmjUQP3L/Ib/DiFOTKGHD7XZEEJihLRXikvmbsfqFU", - "sAwiX4mD3oC/76P268xPuUO+0yo937odfHPlBh+r2H1xLLJCuQQ0SmKL/Rxli5Qi0yzilbhzzjMpcF//", - "/KREDQWKTwNBA/1sfjh7jbB9VaNykdtYMQx6LtgnINPSxxcdoxBnjOuvYm7ScLUc+ODNdrDbPkMJ9/5v", - "lw+gf2q+C6IZ5TWcX1Zh+EbBs4Hk6NF+oXH95iw7Q4/+qn1Bf1/QH6Cpqp6LG9umahuiXn4X1C7SMil1", - "MA3RwLSicbbXg7TXU3s9NUDj0aPzGd5l7vT2yBDlj6z6saZQso107sCJDHfVnTIs1mHx5Fi2m9LNIX1O", - "r7D5yS9UnEkncesnYdWzoutiXlqM9VfljmQ8ye7R5XL53wAAAP//H0uKi7KeAAA=", + "H4sIAAAAAAAC/+w9W2/bONZ/RdA3j2qc2c6+5C2bNIPMTptgk+7gQ1EUtHRscyOTLknlsoH/+4I3XSxK", + "omw5sRO/tLFEUofnfhP1HMZ0vqAEiODhyXO4QAzNQQBTvxDnIC6Ta3lR/k6AxwwvBKYkPAkvzwM6CcQM", + "Ag4pxAKSQE0IoxDL+wskZmEUEjSH8MSuFUYhg58ZZpCEJ4JlEIU8nsEcyfXF00IO5YJhMg2j8PHDlH4w", + "F3FydKqWOA+Xy0gv1wDYzQJiPMHAg4cZiBkwDVeQIIECxCCA+RiSBJIAEwU/A56lglvAf2bAnlYgD8tw", + "/sJgEp6E/zcqkDfSd/lIjf6kHiA3IWGN6XwOpBcizRQ3KvP1NkHmmVlEo3OCIU0ukyv2T3hqgZIFd/Bk", + "gVVzLArnNIGUB+bxTrDLz1gbcj3q6EKtda7XkhvAAuZ9ECzHu8HUK22C2ku5gsbrHTw9UNYEl7kb5Au5", + "2M8MCpsBkA9S+O9JQDXHEnDB6H8gbuC48uprY0YtclQmmlm2k2q9Ad2Eep/VEpp8CzSFBui+ckgCQQ1H", + "acjQFBqIaG4VQCQwQVkqwpNfo3COCZ5nc/W3hYMImALTQAC7HgwOvZYblL8fR+EcPRpYjo+7IdOkkIxx", + "mmLEWxkPyRGWoq1EXF12bWqahRTP6ZUqUPtrCz9wW+Fc4bJrM0nzGYOJH3lRwGAisXkPrIHE0jY5yRum", + "SACXmwAiafqtuLDIximOw++RQ7PolXywpQZWDIIbYXbFTaT0Rq+h0ccpE+eYdaAwgQkmoICjLAEWJJhB", + "LAfZHTDgC0o4BCnmIgoecJoGYwjwlFAmbcakNBnzgFARLBhwIAKSBmokmDVQQwJZogVSv9RFNxkoE303", + "6NpWA5xy+QZAYwZIQHJa5pzytWyRmL+dgD9QdscXKIY+ApdPcnNQaU1voUNxTDMiEjpHmBz9la8gWUiJ", + "oEaScny/UHFBM5J8YoyyOsC3Cqk/M+ASVgacZiyG4AFpnpjIqeEyCr8SlIkZZfi/0LTUaRwD54Ggd0Ak", + "T80x55hMpYhjco9SnJSEUMF2AUhkDJS3zugCmMAa6CnQOQj21OWh/m7HSbcp6eHPRCsPNCPoWOnGpSX+", + "c84lFlQnX9Rmm9FnNE21WNa3ONFD1N/ST+Nde7UQFM9DjKGnFmBLj/cD+3egf9xcfdkbYHMeqUIbU8oS", + "TKRFkD8pgatJePKtHeJriolct33U5ywV2G/on5jAjYHfZ9Ue469p+jSlxBdaM/j7MrKChXuQsixjXbTU", + "mInCEpqisLQxc6dyxcKXz7I/7YN7c0Zped9NWpJKN/FST/hbfburwPuuXiGte1UNQG9wG9bSKPRfzbJT", + "bb06WBPK5kgZfZqNU2nUzBySzcfSmVaOt8Hhxw6EuiDdDAHF436r39Tpj5q+QCye4Xv49CgYUnx2I5DI", + "eJmxF0ASG9f+WDA6ZcClM59QIlEwQTiFxMGeURhTIoCIWyMp9fu5+1FBLhLwQeB5Cb/FlAlOoQtBaoyv", + "VcyzUdYrccC5YHCP4eF2ReLx3ERo8v8f/F6uPgWq//3xMflxi1Pg5uf8XuoD5U7/+CjdnZjfS6+L3BH6", + "QJzoKyKS7m2UApEoFFSg9Ab/t7ybgkULR88b6xlL3fmKwmf7JtEdVaIoOSvq9DEL3bWScythGqVyJekW", + "KoZLOTTwm0631blc+W/diEREy4oavkrujJtgTcCUCeTWyTnTD8XwXkxczQLWEBtTkmC3K4ZI4q14imUc", + "ymeMOI4d3pPOFnaLLKTJjQobqGJSuQYS2tW2BICfGUqlPBEqPum/XQS4R2kmCedExZjSdKegtHckYIBI", + "TaosaKWH2ckuGZpLI7hIYbt7xCROswT4KXnSG72sXMhvK7Et307TdmRYPqwx2GZYIVmaovG2sQLzhTD4", + "+KT+9HPZjGbeKmhTpXjY7QxJ7zIFzs2fpRtXTLHrLS2NKK758LC1MZsRS0O+uUbiuaO6PbxKZY8wMeJ+", + "VvziAjHB/8Iq3QEksX8SKm7KtySv2Ls+KG6wvT1RrIzNVhEzhgll0qChiVBmU1+4YlfEXjR/08ntDPO/", + "AO7yH58pUcjRv/4fEGvHjY8l3QRhLqlVCziyN4xmC89kzO9yrPbYvKy8qZaFuiDldDBsUNpGP7VL5dx0", + "Gcsqpdv4xR/y1bBZeY3KL8KUnCMBpZ9ftcc1pwme4Lg8onzJjOI6cLGUicI5CKQe7KmHbWixklCZ4TRh", + "4B9R2uhjVR11BUPN0QcSM+cN7vbwXXvTafz65vr7o5W85/Oa/mpenGvm5BRx8VlRGRJ/6CTNEyTQjVeR", + "32SYa/O8eLooXbRGjmuGcKaU4/APfTsYis3x9TbVO050MZ4tudUDD1XIGyhIGoQpK/hvpOga0XO5zNEV", + "gLZUNxzYLTihitxbLFK4sBbKXzmvlZCBNPFPXun/NWgOJYn7MumaAtaMzAu3XR/KQJfjs7pkFx6K625P", + "4968R3eh4xfXHpdR+Etjw0q34K3IfKK9dZReV5/cyTAS4tKcpdOnFSm0OkXtHqG6W4H4eysCq1uobrQL", + "LVa0XKrKX4oUGXXO+U8g04qXkPd3lHpB/BLYtlfEa/QgSHfhueDjkq8o4FFIKOBRnDJAYRQyHM9u9dU5", + "YncJfZAhRTyD+G5MH8Mo7/pLtN+o0j9RqAvUNpmn3EezJ9XEAQyIqlnrDKb26aNQIJPhVaWZq7HpJbEX", + "PiVYusnOSAUYx5RAIt3+Qdywnmp3spHCLSrHmH82npJbRVk/6mIg8GwjmNsvZrbftV6hyTIVWDSwZfGA", + "nNrJZa8iTJWi7oWrq3WCsoZnYaDw2PnSFdRyiDOGxZMyqJoVx4AYsNNMKxO1W0VidblYdibEQjdcYDKh", + "9X6If8EnxMTsw9nnm+BSJcxVqBacXl+GudboGJVvLvz16Pjo2GQaCFrg8CT8eHR89DHUwZECXHfs8tGz", + "6VBeaqBSEEqF6CgfUyKZKVRFn3N9c6Vn5G/Hx7qOnmfx0WKRmlBz9B+usd3kePUrOTmIsmpTtQLjuudn", + "GYW/afBWOm90i4ltZgny9u9Ah/dq3q9NLJ1vf1RvdFEzf6s/8UvRHyP5KJvPEXtS3VASp3kDuUBTLhW3", + "2jEPdRVeNNDjdzVlI2J0dnTvP4anINrQW+78b+j6KIaMKm8GqCaJmhiNTFHN9CY1Ec9UoP7UrWkDSlT5", + "8Z45YV0EdJUQ/KTNtu3vOk8Y7a0IXdbb374vvztZJt9YjXeKOxszURQuKO9gkzPl8JieP+DiHzR52ohH", + "mkqubppXOw2XW1Q6OTPWWW3/+Uq7rX1Yq1XBjJ7zN2K6jbdhpFez4a0V9zqxrV0kVWztPw+UDf5LqJeo", + "c/zKa1pKISERz9oZ6esiefcaSeMgOH1zTGo3VqJ3p5pSvsboWb9G1qqPZAT4anqo9JJaDyWETdD6NnQP", + "sa8BWorqUL4UbawGpyJjhNuJR3kRuExRHY30U1X5+z8eaqr05qvc1dbkfSVRUeeK011mhyj8uxsmAYyg", + "NODA7oEFoNfrwzwOJqizTz/yl19erZgdp5pt5b6BzVHe/NvnlefhkozDZgVf24IeJKrVyrYIVN2udmcX", + "5Nw3klyQT3iTuQVN8dJpBxXCb+L811SqM7VQ4pFDZuEtZRb8GatFtfjmFUpctH9phQqe3opn/yJaZciM", + "QomFDgmFckLhbbGn2ZekdnDmp5v0wTqjZ1PQblVEqjvy1VRQ+eAUbwVkzol4BcqukzDIT7WwJFN79skY", + "6Jn1mE0tsOUCpkGxI94I/ri5+hIoTzSgkyDjwAKC5rpR6l0G9QWdHCTuZywqpxt5hPWtLDKwVejqM2vq", + "Qmzobn1tU9LF4RoqyeJ7oW7qHFFjRpdpGOH5gjKxeo7fOpyaiQbrcqkfMWh+6bLn69ZFE2T+lhrQP7g6", + "/0A90NVJZ5vtVCrpn41NtgIJuFltWy230AuGBEyfqi81cmDFW8rqD3Xle9fLRMWhiGZPpQc43ybSXcCI", + "iZGc8MH20zWh176Zkvd4jTFB6tifls7Sd4bUwRXViuukD486o1nFCS+12eqdtg+RnmHL/cZux7YXaAg8", + "XAzag2/6EBvh9PELawpb67TAvKT1Km7JtpwOrUv1saQZSYBVz8Bq9DXdut8SsMMPTTHXeFXBxgMWs2CC", + "UwGSXQJEEn10GCZTd33hQo3tXeAqTi/zCJsrx7l5jC9OafQZXD5N0WP8uqW57tGVs0F1KW8Ik6qp2eMF", + "aJdYDqwDh+2QVudbnjy3nk6ZH5vZPVAdepIrrXzscbSJAjvEVE06Z4iCad1hbU7vD5vXP1Q216ts7pRU", + "rF9RaKhNuq3xUczvPSzy2c2/AzFDIpihuoFGPLDnCLoNMj/lZzf/7m2QX8hmdjeoCHgUI4Oogtk6AxYX", + "i+l7ASYKpWaJd6t0+7BVNQNdnHYsOWtj7dwiIFOgVtF0CIk5ZXMzQbFHde6tsKyvoe3WnYJjkVu8Rfk+", + "RaYvk1VNQXEU7HZExroMP8w3EDwlx06z53MjHsiZwfjJnKt/eV5POVdOtfjHkwo9T7kRnq0xafnt5I7U", + "wDsuUHjRM08aVM81icItMmg/vuzBjgc23D029OK+LXCdcU746Hn1MxFLw5E9kmB6QkPJLU91DZgJKSD0", + "CvfyatUhBfJGUiAFx21cX3Z/fWW7+dLGVIvaxcC5lkOReh8SI+11im51nTsQ6rtQK/1N1Z2fV9pwGtS2", + "HvQXFrPrPI7d6Vao2+ILV8l7U5B1ig7eWTUgJxyarHauyWptI1j/YN4wLVqr7HYwhPtoCHfhdaiO7q/e", + "lnVU1K5eV8acDqQqhGkHchsitJPHKr6s2FW6dzyEL6817oUXag9vtrJiCqubycroufzR2lbf1Bq30pdx", + "k7qhUFDtgoeanzjqF9DbHb1b/1Qh4MjFX6/psXRPqn/YueUtJLWn7bkxBx38NnVwZh2WgXXwi7Y0Vhn+", + "0N243YNHDv2Bh3SAb39g3o/y+tkBE7lUUXGWd4hdNh7Lsr3A5tCQ+M4aEuvs1iQtG1jdl2ldLMnDoYvx", + "0MW4W12Mg9qOTUTxRZskKyJ56Jc89EturV+yJKDr903ugJAO35ZpnqxC134tmhXpPbTJ7Xa3ZgOZh+7c", + "3AER2bQx1EsgDoKwZ/2iHfy/V3yvd6cPY/Bi76MG/j00nQ6UVzP8pifw9y53RzXBEmhPWk57v0jvK6uj", + "Z/t902EtlLnbYqIuzw/2ab/sU5mmr26gLNt2MPzSfDGlRwXNfNGqVwlNHcz07o4FcRze8UrlMPN5sj0z", + "13vx7bT1K1l6e0f1j4UMUMvyrUcFBB7MB1mkBDMFpOpPMp8N1TcbJHrgdzn4HV6cg9wZA24/PZnABGWp", + "CE8mKOUQhSRLUzROQdeHIle7B70Ddw9sxlK/VtdtHqLmtcvVXW3j/J0Nvme4/+JYVMhyCWiVxA77OcoW", + "KUWmcdYpcZecZ1Lgvv7rTyVqKFB8Ggga6Ln597sahO2rGpWL3MaKYdCjo9s+ER1njFO2afP5cuBvM3SD", + "rYfcuj81HYUEHt0f2R5A/zR8OlIzyls44rrG8K2C54rG1m1UX3WMPXrND82Nh+bGARrMm7m4tYW8sTl8", + "9zvC95GWSaWbe4hm7hWNs71+7IOeOuipAZqwt5H79Ml3HpKcO5rk3EZi05WffH6g7I4vUAyS5Wy01SM1", + "mU9ZZTKTRt9G4WzgDFp5116pPespO5J72y2e5ZC+pqS0z/xCxYUUnK2f0t3MiuXY5tpirL+4lCTjRY5w", + "WS6X/wsAAP//hHQYUfexAAA=", } // GetSwagger returns the content of the embedded swagger specification file diff --git a/server/internal/adapter/publicapi/api.go b/server/internal/adapter/publicapi/api.go index 4385fa8242..6cee233250 100644 --- a/server/internal/adapter/publicapi/api.go +++ b/server/internal/adapter/publicapi/api.go @@ -47,6 +47,8 @@ func PublicApiItemOrAsset() echo.HandlerFunc { var err error if m == "assets" { res, err = ctrl.GetAsset(ctx, p, i) + } else if i == "schema.json" { + res, err = ctrl.GetSchemaJSON(ctx, p, m) } else { res, err = ctrl.GetItem(ctx, p, m, i) } diff --git a/server/internal/adapter/publicapi/schema_export.go b/server/internal/adapter/publicapi/schema_export.go new file mode 100644 index 0000000000..fd58d04df9 --- /dev/null +++ b/server/internal/adapter/publicapi/schema_export.go @@ -0,0 +1,69 @@ +package publicapi + +import ( + "context" + + "github.com/reearth/reearth-cms/server/pkg/exporters" + "github.com/reearth/reearth-cms/server/pkg/model" + "github.com/reearth/reearthx/rerror" + "github.com/samber/lo" +) + +func (c *Controller) GetSchemaJSON(ctx context.Context, pKey, mKey string) (SchemaJSON, error) { + pr, err := c.checkProject(ctx, pKey) + if err != nil { + return SchemaJSON{}, err + } + + m, err := c.usecases.Model.FindByIDOrKey(ctx, pr.ID(), model.IDOrKey(mKey), nil) + if err != nil || !m.Public() { + return SchemaJSON{}, rerror.ErrNotFound + } + + sp, err := c.usecases.Schema.FindByModel(ctx, m.ID(), nil) + if err != nil { + return SchemaJSON{}, rerror.ErrNotFound + } + + gsMap := exporters.BuildGroupSchemaMap(sp) + res := exporters.NewSchemaJSON(m.ID().Ref().StringRef(), lo.ToPtr(m.Name()), lo.ToPtr(m.Description()), exporters.BuildProperties(sp.Schema().Fields(), gsMap)) + return toSchemaJSON(res), nil +} + +func toSchemaJSON(s exporters.SchemaJSON) SchemaJSON { + return SchemaJSON{ + Schema: s.Schema, + Id: s.Id, + Title: s.Title, + Description: s.Description, + Type: s.Type, + Properties: toSchemaJSONProperties(s.Properties), + } +} + +func toSchemaJSONProperties(pp map[string]exporters.SchemaJSONProperties) map[string]SchemaJSONProperties { + res := map[string]SchemaJSONProperties{} + for k, v := range pp { + res[k] = SchemaJSONProperties{ + Type: v.Type, + Title: v.Title, + Description: v.Description, + Format: v.Format, + Minimum: v.Minimum, + Maximum: v.Maximum, + MaxLength: v.MaxLength, + Items: toSchemaJSONItems(v.Items), + } + } + return res +} + +func toSchemaJSONItems(pp *exporters.SchemaJSON) *SchemaJSON { + if pp == nil { + return nil + } + return &SchemaJSON{ + Type: pp.Type, + Properties: toSchemaJSONProperties(pp.Properties), + } +} diff --git a/server/internal/adapter/publicapi/types.go b/server/internal/adapter/publicapi/types.go index 4e1d495e91..664127d7e9 100644 --- a/server/internal/adapter/publicapi/types.go +++ b/server/internal/adapter/publicapi/types.go @@ -212,6 +212,26 @@ func NewItemAsset(a *asset.Asset, urlResolver asset.URLResolver) ItemAsset { } } +type SchemaJSON struct { + Id *string `json:"$id,omitempty"` + Schema *string `json:"$schema,omitempty"` + Description *string `json:"description,omitempty"` + Properties map[string]SchemaJSONProperties `json:"properties"` + Title *string `json:"title,omitempty"` + Type string `json:"type"` +} + +type SchemaJSONProperties struct { + Description *string `json:"description,omitempty"` + Format *string `json:"format,omitempty"` + Items *SchemaJSON `json:"items,omitempty"` + MaxLength *int `json:"maxLength,omitempty"` + Maximum *float64 `json:"maximum,omitempty"` + Minimum *float64 `json:"minimum,omitempty"` + Title *string `json:"title,omitempty"` + Type string `json:"type"` +} + // GeoJSON type GeoJSON = FeatureCollection diff --git a/server/pkg/asset/asset_test.go b/server/pkg/asset/asset_test.go index 2897a71a03..27a5955ba7 100644 --- a/server/pkg/asset/asset_test.go +++ b/server/pkg/asset/asset_test.go @@ -34,6 +34,7 @@ func TestAsset_Type(t *testing.T) { uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", thread: thid, archiveExtractionStatus: &gotStatus, + flatFiles: false, } assert.Equal(t, aid, got.ID()) @@ -46,6 +47,7 @@ func TestAsset_Type(t *testing.T) { assert.Equal(t, &wantPreviewType, got.PreviewType()) assert.Equal(t, "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", got.UUID()) assert.Equal(t, thid, got.Thread()) + assert.Equal(t, false, got.FlatFiles()) assert.Equal(t, &wantStatus, got.ArchiveExtractionStatus()) } diff --git a/server/pkg/asset/builder_test.go b/server/pkg/asset/builder_test.go index e233921d57..118aef9e7e 100644 --- a/server/pkg/asset/builder_test.go +++ b/server/pkg/asset/builder_test.go @@ -28,6 +28,7 @@ type Input struct { uuid string thread ThreadID archiveExtractionStatus *ArchiveExtractionStatus + flatFiles bool } func TestBuilder_Build(t *testing.T) { @@ -53,6 +54,7 @@ func TestBuilder_Build(t *testing.T) { uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", thread: thid, archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + flatFiles: false, }, want: &Asset{ id: aid, @@ -65,6 +67,7 @@ func TestBuilder_Build(t *testing.T) { uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", thread: thid, archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + flatFiles: false, }, }, { @@ -217,7 +220,8 @@ func TestBuilder_Build(t *testing.T) { Type(tt.input.previewType). UUID(tt.input.uuid). Thread(tt.input.thread). - ArchiveExtractionStatus(tt.input.archiveExtractionStatus) + ArchiveExtractionStatus(tt.input.archiveExtractionStatus). + FlatFiles(tt.input.flatFiles) if !tt.input.createdByUser.IsNil() { ab.CreatedByUser(tt.input.createdByUser) } diff --git a/server/pkg/asset/file_test.go b/server/pkg/asset/file_test.go index 414f3f71c6..664035b73b 100644 --- a/server/pkg/asset/file_test.go +++ b/server/pkg/asset/file_test.go @@ -27,6 +27,13 @@ func TestFile_FileType(t *testing.T) { dir := NewFile().Name("dir").Path("/aaa").Children([]*File{c}).Build() assert.True(t, dir.IsDir()) + + // object is nil test + f = nil + assert.Equal(t, "", f.Name()) + assert.Equal(t, uint64(0), f.Size()) + assert.Equal(t, "", f.ContentType()) + assert.Equal(t, "", f.Path()) } func TestFile_Children(t *testing.T) { @@ -60,14 +67,36 @@ func TestFile_Files(t *testing.T) { }, } - assert.Equal(t, []*File{ + tests := []struct { + name string + files *File + want []*File + }{ { - path: "aaa/a/a.txt", + name: "success", + files: f, + want: []*File{ + { + path: "aaa/a/a.txt", + }, + { + path: "aaa/b.txt", + }, + }, }, { - path: "aaa/b.txt", + name: "file object is empyy", + files: nil, + want: nil, }, - }, f.FlattenChildren()) + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + flatten := tt.files.FlattenChildren() + assert.Equal(t, flatten, tt.want) + }) + } } func TestFile_SetFiles(t *testing.T) { @@ -194,5 +223,89 @@ func Test_FoldFiles(t *testing.T) { } func Test_File_RootPath(t *testing.T) { - assert.Equal(t, "xx/xxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/hoge.zip", (&File{path: "hoge.zip"}).RootPath("xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx")) + tests := []struct { + name string + file *File + uuid string + want string + }{ + { + name: "success", + file: &File{path: "hoge.zip"}, + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + want: "xx/xxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/hoge.zip", + }, + { + name: "File object is nil", + file: nil, + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + want: "", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := tt.file.RootPath(tt.uuid) + assert.Equal(t, result, tt.want) + }) + } +} + +func Test_Clone(t *testing.T) { + tests := []struct { + name string + file *File + want *File + }{ + { + name: "success", + file: &File{ + name: "test", + size: 1, + contentType: "type", + path: "hoge.zip", + children: []*File{ + {name: "a.txt", path: "/hello/good/a.txt", size: 10, contentType: "text/plain"}, + {name: "b.txt", path: "/hello/good/b.txt", size: 10, contentType: "text/plain"}, + }, + }, + want: &File{ + name: "test", + size: 1, + contentType: "type", + path: "hoge.zip", + children: []*File{ + {name: "a.txt", path: "/hello/good/a.txt", size: 10, contentType: "text/plain"}, + {name: "b.txt", path: "/hello/good/b.txt", size: 10, contentType: "text/plain"}, + }, + }, + }, + { + name: "file is nil", + file: nil, + want: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + cloned := tt.file.Clone() + assert.Equal(t, cloned, tt.want) + }) + } +} + +func Test_FilePath(t *testing.T) { + t.Parallel() + assert.Equal(t, + []string{ + "/hello/c.txt", + }, + (&File{ + name: "hello.zip", path: "/hello.zip", size: 100, contentType: "application/zip", + files: []*File{ + {name: "c.txt", path: "/hello/c.txt", size: 20, contentType: "text/plain"}, + }, + }).FilePaths(), + ) } diff --git a/server/pkg/asset/preview_type_test.go b/server/pkg/asset/preview_type_test.go index c7705d3df2..bdd324bce8 100644 --- a/server/pkg/asset/preview_type_test.go +++ b/server/pkg/asset/preview_type_test.go @@ -212,7 +212,7 @@ func TestPreviewType_DetectPreviewType(t *testing.T) { f1 := file.File{ Name: "image.png", ContentType: "image/png", - } + } want1 := PreviewTypeImage got1 := DetectPreviewType(&f1) assert.Equal(t, want1, *got1) @@ -220,7 +220,7 @@ func TestPreviewType_DetectPreviewType(t *testing.T) { f2 := file.File{ Name: "file.geojson", ContentType: "application/json", - } + } want2 := PreviewTypeGeo got2 := DetectPreviewType(&f2) assert.Equal(t, want2, *got2) @@ -278,6 +278,10 @@ func TestPreviewType_PreviewTypeFromExtension(t *testing.T) { want6 := PreviewTypeGeoMvt got6 := PreviewTypeFromExtension(ext6) assert.Equal(t, want6, got6) + + want7 := PreviewTypeUnknown + got7 := PreviewTypeFromExtension("") + assert.Equal(t, want7, got7) } func TestPreviewType_String(t *testing.T) { @@ -316,3 +320,8 @@ func TestPreviewType_StringRef(t *testing.T) { }) } } + +func TestPreviewType_Prev(t *testing.T) { + t.Parallel() + assert.Equal(t, func() *PreviewType { pt := PreviewType("image"); return &pt }(), PreviewTypeImage.Ref()) +} diff --git a/server/pkg/asset/status_test.go b/server/pkg/asset/status_test.go index 7a6bf316db..7b6eac53d9 100644 --- a/server/pkg/asset/status_test.go +++ b/server/pkg/asset/status_test.go @@ -75,6 +75,10 @@ func TestStatus_StatusFromRef(t *testing.T) { res = ArchiveExtractionStatusFromRef(s) assert.Equal(t, &f, res) + s = lo.ToPtr("test") + res = ArchiveExtractionStatusFromRef(s) + assert.Nil(t, res) + s = nil res = ArchiveExtractionStatusFromRef(s) assert.Nil(t, res) diff --git a/server/pkg/asset/upload_builder_test.go b/server/pkg/asset/upload_builder_test.go new file mode 100644 index 0000000000..ad03a6dd52 --- /dev/null +++ b/server/pkg/asset/upload_builder_test.go @@ -0,0 +1,202 @@ +package asset + +import ( + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestUploadBuilder_NewUpload(t *testing.T) { + tests := []struct { + name string + want *UploadBuilder + }{ + { + name: "success", + want: &UploadBuilder{ + u: &Upload{}, + }, + }, + } + for _, test := range tests { + t.Run(string(test.name), func(t *testing.T) { + t.Parallel() + result := NewUpload() + assert.Equal(t, test.want, result) + }) + } +} + +func TestUploadBuilderUUID(t *testing.T) { + + tests := []struct { + name string + input string + want *UploadBuilder + }{ + { + name: "success", + input: "123", + want: &UploadBuilder{ + &Upload{ + uuid: "123", + }, + }, + }, + } + for _, test := range tests { + t.Run(string(test.name), func(t *testing.T) { + t.Parallel() + ub := NewUpload() + result := ub.UUID(test.input) + assert.Equal(t, test.want, result) + }) + } +} + +func TestUploadBuilderProject(t *testing.T) { + projectID := id.NewProjectID() + tests := []struct { + name string + input ProjectID + want *UploadBuilder + }{ + { + name: "success", + input: projectID, + want: &UploadBuilder{ + &Upload{ + project: projectID, + }, + }, + }, + } + for _, test := range tests { + t.Run(string(test.name), func(t *testing.T) { + t.Parallel() + ub := NewUpload() + result := ub.Project(test.input) + assert.Equal(t, test.want, result) + }) + } +} + +func TestUploadBuilderFileName(t *testing.T) { + tests := []struct { + name string + input string + want *UploadBuilder + }{ + { + name: "success", + input: "file.test", + want: &UploadBuilder{ + &Upload{ + fileName: "file.test", + }, + }, + }, + } + for _, test := range tests { + t.Run(string(test.name), func(t *testing.T) { + t.Parallel() + ub := NewUpload() + result := ub.FileName(test.input) + assert.Equal(t, test.want, result) + }) + } +} + +func TestUploadBuilderContentLength(t *testing.T) { + tests := []struct { + name string + input int64 + want *UploadBuilder + }{ + { + name: "success", + input: 2, + want: &UploadBuilder{ + &Upload{ + contentLength: 2, + }, + }, + }, + } + for _, test := range tests { + t.Run(string(test.name), func(t *testing.T) { + t.Parallel() + ub := NewUpload() + result := ub.ContentLength(test.input) + assert.Equal(t, test.want, result) + }) + } +} + +func TestUploadBuilderExpiresAt(t *testing.T) { + fixedTime := time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC) + tests := []struct { + name string + input time.Time + want *UploadBuilder + }{ + { + name: "success", + input: fixedTime, + want: &UploadBuilder{ + &Upload{ + expiresAt: fixedTime, + }, + }, + }, + } + for _, test := range tests { + t.Run(string(test.name), func(t *testing.T) { + t.Parallel() + ub := NewUpload() + result := ub.ExpiresAt(test.input) + assert.Equal(t, test.want, result) + }) + } +} + +func TestUploadBuilderBuild(t *testing.T) { + now := time.Now() + projectID := NewProjectID() + ubWithData := &UploadBuilder{ + u: &Upload{ + uuid: "1", + project: projectID, + fileName: "file.test", + contentLength: int64(1), + expiresAt: now, + }, + } + + tests := []struct { + name string + input time.Time + want *Upload + }{ + { + name: "success", + input: now, + want: &Upload{ + uuid: "1", + project: projectID, + fileName: "file.test", + contentLength: int64(1), + expiresAt: now, + }, + }, + } + for _, test := range tests { + t.Run(string(test.name), func(t *testing.T) { + t.Parallel() + result := ubWithData.Build() + assert.Equal(t, test.want, result) + }) + } +} diff --git a/server/pkg/asset/upload_test.go b/server/pkg/asset/upload_test.go new file mode 100644 index 0000000000..22069842be --- /dev/null +++ b/server/pkg/asset/upload_test.go @@ -0,0 +1,28 @@ +package asset + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestUpload_Upload(t *testing.T) { + t.Parallel() + projectID := NewProjectID() + timeNow := time.Now() + uploadWithData := &Upload{ + uuid: "1", + project: projectID, + fileName: "file.test", + contentLength: int64(1), + expiresAt: timeNow, + } + + assert.Equal(t, "1", uploadWithData.UUID()) + assert.Equal(t, projectID, uploadWithData.Project()) + assert.Equal(t, "file.test", uploadWithData.FileName()) + assert.Equal(t, int64(1), uploadWithData.ContentLength()) + assert.Equal(t, false, uploadWithData.Expired(timeNow)) + assert.Equal(t, timeNow, uploadWithData.ExpiresAt()) +} diff --git a/server/pkg/exporters/schema_json.go b/server/pkg/exporters/schema_json.go new file mode 100644 index 0000000000..8be4229217 --- /dev/null +++ b/server/pkg/exporters/schema_json.go @@ -0,0 +1,175 @@ +package exporters + +import ( + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearth-cms/server/pkg/schema" + "github.com/reearth/reearth-cms/server/pkg/value" + "github.com/samber/lo" +) + +const defaultJSONSchemaVersion = "https://json-schema.org/draft/2020-12/schema" + +type SchemaJSON struct { + Id *string `json:"$id,omitempty"` + Schema *string `json:"$schema,omitempty"` + Description *string `json:"description,omitempty"` + Properties map[string]SchemaJSONProperties `json:"properties"` + Title *string `json:"title,omitempty"` + Type string `json:"type"` +} + +type SchemaJSONProperties struct { + Description *string `json:"description,omitempty"` + Format *string `json:"format,omitempty"` + Items *SchemaJSON `json:"items,omitempty"` + MaxLength *int `json:"maxLength,omitempty"` + Maximum *float64 `json:"maximum,omitempty"` + Minimum *float64 `json:"minimum,omitempty"` + Title *string `json:"title,omitempty"` + Type string `json:"type"` +} + +func NewSchemaJSON(id, title, description *string, pp map[string]SchemaJSONProperties) SchemaJSON { + res := SchemaJSON{ + Schema: lo.ToPtr(defaultJSONSchemaVersion), + Type: "object", + Properties: pp, + } + if id != nil && *id != "" { + res.Id = id + } + if title != nil && *title != "" { + res.Title = title + } + if description != nil && *description != "" { + res.Description = description + } + return res +} + +func buildPropertiesMap(f schema.FieldList, gsMap map[id.GroupID]*schema.Schema) map[string]SchemaJSONProperties { + properties := make(map[string]SchemaJSONProperties) + for _, field := range f { + fieldType, format := determineTypeAndFormat(field.Type()) + fieldSchema := SchemaJSONProperties{Type: fieldType} + if field.Name() != "" { + fieldSchema.Title = lo.ToPtr(field.Name()) + } + if field.Description() != "" { + fieldSchema.Description = lo.ToPtr(field.Description()) + } + if format != "" { + fieldSchema.Format = lo.ToPtr(format) + } + + field.TypeProperty().Match(schema.TypePropertyMatch{ + Text: func(f *schema.FieldText) { + if maxLength := f.MaxLength(); maxLength != nil { + fieldSchema.MaxLength = maxLength + properties[field.Key().String()] = fieldSchema + } + }, + TextArea: func(f *schema.FieldTextArea) { + if maxLength := f.MaxLength(); maxLength != nil { + fieldSchema.MaxLength = maxLength + } + }, + RichText: func(f *schema.FieldRichText) { + if maxLength := f.MaxLength(); maxLength != nil { + fieldSchema.MaxLength = maxLength + } + }, + Markdown: func(f *schema.FieldMarkdown) { + if maxLength := f.MaxLength(); maxLength != nil { + fieldSchema.MaxLength = maxLength + } + }, + Integer: func(f *schema.FieldInteger) { + if min := f.Min(); min != nil { + fieldSchema.Minimum = int64ToFloat64(min) + } + if max := f.Max(); max != nil { + fieldSchema.Maximum = int64ToFloat64(max) + } + }, + Number: func(f *schema.FieldNumber) { + if min := f.Min(); min != nil { + fieldSchema.Minimum = min + } + if max := f.Max(); max != nil { + fieldSchema.Maximum = max + } + }, + Group: func(f *schema.FieldGroup) { + if gsMap != nil { + gs := gsMap[f.Group()] + if gs != nil { + fieldSchema.Items = BuildItems(gs.Fields()) + } + } + }, + }) + + properties[field.Key().String()] = fieldSchema + } + return properties +} + +func BuildProperties(f schema.FieldList, gsMap map[id.GroupID]*schema.Schema) map[string]SchemaJSONProperties { + return buildPropertiesMap(f, gsMap) +} + +func BuildItems(f schema.FieldList) *SchemaJSON { + return &SchemaJSON{ + Type: "object", + Properties: buildPropertiesMap(f, nil), + } +} + +func int64ToFloat64(input *int64) *float64 { + if input == nil { + return nil + } + value := float64(*input) + return &value +} + +func determineTypeAndFormat(t value.Type) (string, string) { + switch t { + case value.TypeText, value.TypeTextArea, value.TypeRichText, value.TypeMarkdown, value.TypeSelect, value.TypeTag, value.TypeReference: + return "string", "" + case value.TypeInteger: + return "integer", "" + case value.TypeNumber: + return "number", "" + case value.TypeBool, value.TypeCheckbox: + return "boolean", "" + case value.TypeDateTime: + return "string", "date-time" + case value.TypeURL: + return "string", "uri" + case value.TypeAsset: + return "string", "binary" + case value.TypeGroup: + return "array", "" + case value.TypeGeometryObject, value.TypeGeometryEditor: + return "object", "" + default: + return "string", "" + } +} + +func BuildGroupSchemaMap(sp *schema.Package) map[id.GroupID]*schema.Schema { + groupSchemaMap := make(map[id.GroupID]*schema.Schema) + for _, field := range sp.Schema().Fields() { + field.TypeProperty().Match(schema.TypePropertyMatch{ + Group: func(fg *schema.FieldGroup) { + groupSchema := sp.GroupSchema(fg.Group()) + if groupSchema != nil { + groupSchemaMap[fg.Group()] = groupSchema + } + }, + }) + } + return groupSchemaMap +} diff --git a/server/pkg/exporters/schema_json_test.go b/server/pkg/exporters/schema_json_test.go new file mode 100644 index 0000000000..7da3265811 --- /dev/null +++ b/server/pkg/exporters/schema_json_test.go @@ -0,0 +1,175 @@ +package exporters + +import ( + "testing" + + "github.com/reearth/reearth-cms/server/pkg/group" + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearth-cms/server/pkg/schema" + "github.com/reearth/reearth-cms/server/pkg/value" + "github.com/reearth/reearthx/account/accountdomain" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestBuildProperties(t *testing.T) { + wid := accountdomain.NewWorkspaceID() + pid := id.NewProjectID() + + // text field + fId1 := id.NewFieldID() + sfKey1 := id.RandomKey() + sf1 := schema.NewField(schema.NewText(lo.ToPtr(100)).TypeProperty()).ID(fId1).Key(sfKey1).MustBuild() + + // number field + fId2 := id.NewFieldID() + sfKey2 := id.RandomKey() + intField, err := schema.NewInteger(lo.ToPtr(int64(1)), lo.ToPtr(int64(100))) + assert.NoError(t, err) + sf2 := schema.NewField(intField.TypeProperty()).ID(fId2).Key(sfKey2).MustBuild() + + // asset field + gsfKey := id.NewKey("asset-key") + gsfId2 := id.NewFieldID() + gsf := schema.NewField(schema.NewAsset().TypeProperty()).ID(gsfId2).Key(gsfKey).Multiple(true).MustBuild() + + // group schema + gs := schema.New().ID(id.NewSchemaID()).Workspace(wid).Project(pid).Fields([]*schema.Field{gsf}).MustBuild() + + // group + gid := id.NewGroupID() + gkey := id.RandomKey() + g := group.New().ID(gid).Name("group").Project(pid).Key(gkey).Schema(gs.ID()).MustBuild() + + // group field + fId3 := id.NewFieldID() + sfKey3 := id.NewKey("group-key") + sf3 := schema.NewField(schema.NewGroup(g.ID()).TypeProperty()).ID(fId3).Key(sfKey3).Multiple(true).MustBuild() + + // bool field + fId4 := id.NewFieldID() + sfKey4 := id.RandomKey() + sf4 := schema.NewField(schema.NewBool().TypeProperty()).ID(fId4).Key(sfKey4).MustBuild() + + // date field + fId5 := id.NewFieldID() + sfKey5 := id.RandomKey() + sf5 := schema.NewField(schema.NewDateTime().TypeProperty()).ID(fId5).Key(sfKey5).MustBuild() + + // url field + fId6 := id.NewFieldID() + sfKey6 := id.RandomKey() + sf6 := schema.NewField(schema.NewURL().TypeProperty()).ID(fId6).Key(sfKey6).MustBuild() + + fieldList := schema.FieldList{sf1, sf2, sf3, sf4, sf5, sf6} + gsMap := map[id.GroupID]*schema.Schema{gid: gs} + + expectedProperties := map[string]SchemaJSONProperties{ + sfKey1.String(): { + Type: "string", + MaxLength: lo.ToPtr(100), + }, + sfKey2.String(): { + Type: "integer", + Minimum: lo.ToPtr(float64(1)), + Maximum: lo.ToPtr(float64(100)), + }, + sfKey3.String(): { + Type: "array", + Items: &SchemaJSON{ + Type: "object", + Properties: map[string]SchemaJSONProperties{ + "asset-key": { + Format: lo.ToPtr("binary"), + Type: "string", + }, + }}, + }, + sfKey4.String(): { + Type: "boolean", + }, + sfKey5.String(): { + Type: "string", + Format: lo.ToPtr("date-time"), + }, + sfKey6.String(): { + Type: "string", + Format: lo.ToPtr("uri"), + }, + } + + properties := BuildProperties(fieldList, gsMap) + assert.Equal(t, expectedProperties, properties) +} + +func TestDetermineTypeAndFormat(t *testing.T) { + tests := []struct { + input value.Type + wantType string + wantFmt string + }{ + {value.TypeText, "string", ""}, + {value.TypeTextArea, "string", ""}, + {value.TypeRichText, "string", ""}, + {value.TypeMarkdown, "string", ""}, + {value.TypeSelect, "string", ""}, + {value.TypeTag, "string", ""}, + {value.TypeReference, "string", ""}, + {value.TypeInteger, "integer", ""}, + {value.TypeNumber, "number", ""}, + {value.TypeBool, "boolean", ""}, + {value.TypeDateTime, "string", "date-time"}, + {value.TypeURL, "string", "uri"}, + {value.TypeAsset, "string", "binary"}, + {value.TypeGroup, "array", ""}, + {value.TypeGeometryObject, "object", ""}, + {value.TypeGeometryEditor, "object", ""}, + {"unknown", "string", ""}, + } + + for _, tt := range tests { + t.Run(string(tt.input), func(t *testing.T) { + gotType, gotFmt := determineTypeAndFormat(tt.input) + assert.Equal(t, tt.wantType, gotType) + assert.Equal(t, tt.wantFmt, gotFmt) + }) + } +} + +func TestBuildGroupSchemaMap(t *testing.T) { + wid := accountdomain.NewWorkspaceID() + pid := id.NewProjectID() + + textFieldID := id.NewFieldID() + textFieldKey := id.RandomKey() + textField := schema.NewField(schema.NewText(lo.ToPtr(100)).TypeProperty()).ID(textFieldID).Key(textFieldKey).MustBuild() + + assetField1 := schema.NewField(schema.NewAsset().TypeProperty()).ID(id.NewFieldID()).Key(id.NewKey("asset-key-1")).Multiple(true).MustBuild() + groupSchema1 := schema.New().ID(id.NewSchemaID()).Workspace(wid).Project(pid).Fields([]*schema.Field{assetField1}).MustBuild() + + groupID1 := id.NewGroupID() + groupKey1 := id.RandomKey() + group1 := group.New().ID(groupID1).Name("group-1").Project(pid).Key(groupKey1).Schema(groupSchema1.ID()).MustBuild() + + groupFieldID1 := id.NewFieldID() + groupFieldKey1 := id.NewKey("group-key-1") + groupField1 := schema.NewField(schema.NewGroup(group1.ID()).TypeProperty()).ID(groupFieldID1).Key(groupFieldKey1).Multiple(true).MustBuild() + + textField2 := schema.NewField(schema.NewText(nil).TypeProperty()).ID(id.NewFieldID()).Key(id.NewKey("text-key-2")).Multiple(false).MustBuild() + groupSchema2 := schema.New().ID(id.NewSchemaID()).Workspace(wid).Project(pid).Fields([]*schema.Field{textField2}).MustBuild() + + groupID2 := id.NewGroupID() + groupKey2 := id.RandomKey() + group2 := group.New().ID(groupID2).Name("group-2").Project(pid).Key(groupKey2).Schema(groupSchema2.ID()).MustBuild() + + groupFieldID2 := id.NewFieldID() + groupFieldKey2 := id.NewKey("group-key-2") + groupField2 := schema.NewField(schema.NewGroup(group2.ID()).TypeProperty()).ID(groupFieldID2).Key(groupFieldKey2).Multiple(false).MustBuild() + + mainSchema := schema.New().ID(id.NewSchemaID()).Workspace(wid).Project(pid).Fields([]*schema.Field{textField, groupField1, groupField2}).MustBuild() + schemaPackage := schema.NewPackage(mainSchema, nil, map[id.GroupID]*schema.Schema{groupID1: groupSchema1, groupID2: groupSchema2}, nil) + + expected := map[id.GroupID]*schema.Schema{groupID1: groupSchema1, groupID2: groupSchema2} + result := BuildGroupSchemaMap(schemaPackage) + assert.Equal(t, expected, result) +} diff --git a/server/pkg/id/id.go b/server/pkg/id/id.go index 2269e4c3ea..c278a6a9ec 100644 --- a/server/pkg/id/id.go +++ b/server/pkg/id/id.go @@ -243,7 +243,7 @@ var RequestIDFromRef = idx.FromRef[Request] type View struct{} -func (View) Type() string { return "request" } +func (View) Type() string { return "view" } type ViewID = idx.ID[View] type ViewIDList = idx.List[View] diff --git a/server/pkg/id/id_test.go b/server/pkg/id/id_test.go index ef2bebb78c..b25852d19f 100644 --- a/server/pkg/id/id_test.go +++ b/server/pkg/id/id_test.go @@ -55,4 +55,16 @@ func TestAsset_Type(t *testing.T) { r := Resource{} assert.Equal(t, "resource", r.Type()) + + v := View{} + assert.Equal(t, "view", v.Type()) + + tag := Tag{} + assert.Equal(t, "tag", tag.Type()) + + g := Group{} + assert.Equal(t, "group", g.Type()) + + ig := ItemGroup{} + assert.Equal(t, "item_group", ig.Type()) } diff --git a/server/pkg/id/key_test.go b/server/pkg/id/key_test.go index fcddc667fc..6c70356da8 100644 --- a/server/pkg/id/key_test.go +++ b/server/pkg/id/key_test.go @@ -55,3 +55,14 @@ func TestKey_Clone(t *testing.T) { assert.Equal(t, k, c) assert.NotSame(t, k, c) } + +func TestKey_NewKeyFromPtr(t *testing.T) { + + str := "test-key" + wantKey := Key{ + key: lo.FromPtr(&str), + } + result := NewKeyFromPtr(&str) + assert.NotNil(t, result, "Result should not be nil") + assert.Equal(t, &wantKey, result, "Key value should match the input string") +} diff --git a/server/pkg/integrationapi/asset_test.go b/server/pkg/integrationapi/asset_test.go index 1198903635..a52cce19cf 100644 --- a/server/pkg/integrationapi/asset_test.go +++ b/server/pkg/integrationapi/asset_test.go @@ -2,8 +2,12 @@ package integrationapi import ( "testing" + "time" "github.com/reearth/reearth-cms/server/pkg/asset" + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearth-cms/server/pkg/project" + "github.com/reearth/reearthx/account/accountdomain/user" "github.com/samber/lo" "github.com/stretchr/testify/assert" ) @@ -46,4 +50,182 @@ func TestToAssetFile(t *testing.T) { ContentType: lo.ToPtr(""), } assert.Equal(t, e, a) + + assert.Nil(t, ToAssetFile(nil, true)) +} + +func Test_NewAsset(t *testing.T) { + timeNow := time.Now() + name := "aaa" + path := "a/aaa" + uid := user.NewID() + pid := project.NewID() + a := asset.New().NewID().Project(pid).Size(100).NewUUID(). + CreatedByUser(uid).Thread(id.NewThreadID()).CreatedAt(timeNow).MustBuild() + + f1 := asset.NewFile().Name(name).Path(path).ContentType("s").Size(10).Build() + + tests := []struct { + name string + a *asset.Asset + f *asset.File + url string + all bool + want *Asset + }{ + { + name: "success", + a: a, + f: f1, + url: "www.", + all: true, + want: &Asset{ + Name: lo.ToPtr("aaa"), + Id: a.ID(), + Url: "www.", + CreatedAt: timeNow, + File: &File{ + Name: lo.ToPtr("aaa"), + Path: lo.ToPtr("/a/aaa"), + ContentType: lo.ToPtr("s"), + Size: lo.ToPtr(float32(10)), + }, + ContentType: lo.ToPtr("s"), + TotalSize: lo.ToPtr(float32(100)), + PreviewType: lo.ToPtr(Unknown), + ProjectId: pid, + }, + }, + { + name: "asset and file input is nil", + a: nil, + f: nil, + url: "www.", + all: false, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := NewAsset(tt.a, tt.f, tt.url, tt.all) + + assert.Equal(t, result, tt.want) + }) + } +} + +func TestToAssetArchiveExtractionStatus(t *testing.T) { + tests := []struct { + name string + input *asset.ArchiveExtractionStatus + expected *AssetArchiveExtractionStatus + }{ + { + name: "Nil input", + input: nil, + expected: nil, + }, + { + name: "Status done", + input: lo.ToPtr(asset.ArchiveExtractionStatusDone), + expected: lo.ToPtr(Done), + }, + { + name: "Status failed", + input: lo.ToPtr(asset.ArchiveExtractionStatusFailed), + expected: lo.ToPtr(Failed), + }, + { + name: "Status in progress", + input: lo.ToPtr(asset.ArchiveExtractionStatusInProgress), + expected: lo.ToPtr(InProgress), + }, + { + name: "Status pending", + input: lo.ToPtr(asset.ArchiveExtractionStatusPending), + expected: lo.ToPtr(Pending), + }, + { + name: "Unknown status", + input: lo.ToPtr(asset.ArchiveExtractionStatus("unknown")), + expected: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := ToAssetArchiveExtractionStatus(tt.input) + assert.Equal(t, result, tt.expected) + }) + } +} + +func TestToPreviewType(t *testing.T) { + tests := []struct { + name string + input *asset.PreviewType + expected *AssetPreviewType + }{ + { + name: "Nil input", + input: nil, + expected: lo.ToPtr(Unknown), + }, + { + name: "PreviewTypeGeo", + input: lo.ToPtr(asset.PreviewTypeGeo), + expected: lo.ToPtr(Geo), + }, + { + name: "PreviewTypeGeo3dTiles", + input: lo.ToPtr(asset.PreviewTypeGeo3dTiles), + expected: lo.ToPtr(Geo3dTiles), + }, + { + name: "PreviewTypeGeoMvt", + input: lo.ToPtr(asset.PreviewTypeGeoMvt), + expected: lo.ToPtr(GeoMvt), + }, + { + name: "PreviewTypeModel3d", + input: lo.ToPtr(asset.PreviewTypeModel3d), + expected: lo.ToPtr(Model3d), + }, + { + name: "PreviewTypeImage", + input: lo.ToPtr(asset.PreviewTypeImage), + expected: lo.ToPtr(Image), + }, + { + name: "PreviewTypeImageSvg", + input: lo.ToPtr(asset.PreviewTypeImageSvg), + expected: lo.ToPtr(ImageSvg), + }, + { + name: "PreviewTypeCSV", + input: lo.ToPtr(asset.PreviewTypeCSV), + expected: lo.ToPtr(Csv), + }, + { + name: "PreviewTypeUnknown", + input: lo.ToPtr(asset.PreviewTypeUnknown), + expected: lo.ToPtr(Unknown), + }, + { + name: "Unrecognized PreviewType", + input: lo.ToPtr(asset.PreviewType("unrecognized")), + expected: lo.ToPtr(Unknown), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := ToPreviewType(tt.input) + assert.Equal(t, tt.expected, result) + }) + } } diff --git a/server/pkg/integrationapi/comment_test.go b/server/pkg/integrationapi/comment_test.go new file mode 100644 index 0000000000..d299f5fe67 --- /dev/null +++ b/server/pkg/integrationapi/comment_test.go @@ -0,0 +1,62 @@ +package integrationapi + +import ( + "testing" + + "github.com/reearth/reearth-cms/server/pkg/operator" + "github.com/reearth/reearth-cms/server/pkg/thread" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestNewComment(t *testing.T) { + uid := thread.NewUserID() + iid := operator.NewIntegrationID() + authorUser := operator.OperatorFromUser(uid) + authorIntegration := operator.OperatorFromIntegration(iid) + c := thread.NewComment(thread.NewCommentID(), authorUser, "test") + cIntegration := thread.NewComment(thread.NewCommentID(), authorIntegration, "test") + authorID := c.Author().User().Ref() + authorIntegrationID := cIntegration.Author().Integration().Ref() + tests := []struct { + name string + input *thread.Comment + expected *Comment + }{ + { + name: "Nil input", + input: nil, + expected: nil, + }, + { + name: "User author", + input: c, + expected: &Comment{ + Content: lo.ToPtr("test"), + CreatedAt: lo.ToPtr(c.CreatedAt()), + Id: c.ID().Ref(), + AuthorType: lo.ToPtr(User), + AuthorId: lo.ToPtr(any(authorID)), + }, + }, + { + name: "Integration author", + input: cIntegration, + expected: &Comment{ + Content: lo.ToPtr("test"), + CreatedAt: lo.ToPtr(cIntegration.CreatedAt()), + Id: cIntegration.ID().Ref(), + AuthorType: lo.ToPtr(Integrtaion), + AuthorId: lo.ToPtr(any(authorIntegrationID)), + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := NewComment(tt.input) + assert.Equal(t, tt.expected, result) + }) + } +} diff --git a/server/pkg/integrationapi/condition_test.go b/server/pkg/integrationapi/condition_test.go new file mode 100644 index 0000000000..bee07e9d14 --- /dev/null +++ b/server/pkg/integrationapi/condition_test.go @@ -0,0 +1,720 @@ +package integrationapi + +import ( + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearth-cms/server/pkg/item/view" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestFieldSelectorTypeInto(t *testing.T) { + tests := []struct { + name string + input FieldSelectorType + expected view.FieldType + }{ + { + name: "FieldSelectorTypeId", + input: FieldSelectorTypeId, + expected: view.FieldTypeId, + }, + { + name: "FieldSelectorTypeCreationDate", + input: FieldSelectorTypeCreationDate, + expected: view.FieldTypeCreationDate, + }, + { + name: "FieldSelectorTypeModificationDate", + input: FieldSelectorTypeModificationDate, + expected: view.FieldTypeModificationDate, + }, + { + name: "FieldSelectorTypeStatus", + input: FieldSelectorTypeStatus, + expected: view.FieldTypeStatus, + }, + { + name: "FieldSelectorTypeCreationUser", + input: FieldSelectorTypeCreationUser, + expected: view.FieldTypeCreationUser, + }, + { + name: "FieldSelectorTypeModificationUser", + input: FieldSelectorTypeModificationUser, + expected: view.FieldTypeModificationUser, + }, + { + name: "FieldSelectorTypeField", + input: FieldSelectorTypeField, + expected: view.FieldTypeField, + }, + { + name: "FieldSelectorTypeMetaField", + input: FieldSelectorTypeMetaField, + expected: view.FieldTypeMetaField, + }, + { + name: "Default case", + input: FieldSelectorType("999"), // An unrecognized type + expected: view.FieldTypeId, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := tt.input.Into() + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestStringOperatorInto(t *testing.T) { + tests := []struct { + name string + input ConditionStringOperator + expected view.StringOperator + }{ + { + name: "contains", + input: Contains, + expected: view.StringOperatorContains, + }, + { + name: "NotContains", + input: NotContains, + expected: view.StringOperatorNotContains, + }, + { + name: "StartsWith", + input: StartsWith, + expected: view.StringOperatorStartsWith, + }, + { + name: "NotStartsWith", + input: NotStartsWith, + expected: view.StringOperatorNotStartsWith, + }, + { + name: "EndsWith", + input: EndsWith, + expected: view.StringOperatorEndsWith, + }, + { + name: "NotEndsWith", + input: NotEndsWith, + expected: view.StringOperatorNotEndsWith, + }, + { + name: "Default case", + input: ConditionStringOperator("999"), // An unrecognized type + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := tt.input.Into() + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestNumberOperatorInto(t *testing.T) { + tests := []struct { + name string + input ConditionNumberOperator + expected view.NumberOperator + }{ + { + name: "GreaterThan", + input: GreaterThan, + expected: view.NumberOperatorGreaterThan, + }, + { + name: "GreaterThanOrEqualTo", + input: GreaterThanOrEqualTo, + expected: view.NumberOperatorGreaterThanOrEqualTo, + }, + { + name: "LessThan", + input: LessThan, + expected: view.NumberOperatorLessThan, + }, + { + name: "LessThanOrEqualTo", + input: LessThanOrEqualTo, + expected: view.NumberOperatorLessThanOrEqualTo, + }, + { + name: "Default case", + input: ConditionNumberOperator("999"), // An unrecognized type + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := tt.input.Into() + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestBasicOperatorInto(t *testing.T) { + tests := []struct { + name string + input ConditionBasicOperator + expected view.BasicOperator + }{ + { + name: "Equals", + input: ConditionBasicOperatorEquals, + expected: view.BasicOperatorEquals, + }, + { + name: "NotEquals", + input: ConditionBasicOperatorNotEquals, + expected: view.BasicOperatorNotEquals, + }, + { + name: "Default case", + input: ConditionBasicOperator("999"), // An unrecognized type + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := tt.input.Into() + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestTimeOperatorInto(t *testing.T) { + tests := []struct { + name string + input ConditionTimeOperator + expected view.TimeOperator + }{ + { + name: "After", + input: After, + expected: view.TimeOperatorAfter, + }, + { + name: "AfterOrOn", + input: AfterOrOn, + expected: view.TimeOperatorAfterOrOn, + }, + { + name: "Before", + input: Before, + expected: view.TimeOperatorBefore, + }, + { + name: "BeforeOrOn", + input: BeforeOrOn, + expected: view.TimeOperatorBeforeOrOn, + }, + { + name: "OfThisWeek", + input: OfThisWeek, + expected: view.TimeOperatorOfThisWeek, + }, + { + name: "OfThisMonth", + input: OfThisMonth, + expected: view.TimeOperatorOfThisMonth, + }, + { + name: "OfThisYear", + input: OfThisYear, + expected: view.TimeOperatorOfThisYear, + }, + { + name: "Default case", + input: ConditionTimeOperator("999"), // An unrecognized type + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := tt.input.Into() + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestBoolOperatorInto(t *testing.T) { + tests := []struct { + name string + input ConditionBoolOperator + expected view.BoolOperator + }{ + { + name: "equals", + input: ConditionBoolOperatorEquals, + expected: view.BoolOperatorEquals, + }, + { + name: "not equals", + input: ConditionBoolOperatorNotEquals, + expected: view.BoolOperatorNotEquals, + }, + { + name: "Default case", + input: ConditionBoolOperator("999"), // An unrecognized type + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := tt.input.Into() + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestConditionNullableOperator_Into(t *testing.T) { + tests := []struct { + name string + input ConditionNullableOperator + expected view.NullableOperator + }{ + {"success nullable operator", Empty, view.NullableOperatorEmpty}, + {"success nullable operator not empty", NotEmpty, view.NullableOperatorNotEmpty}, + {"success default case", ConditionNullableOperator("99"), ""}, // Test for default case + } + + for _, test := range tests { + t.Run(string(test.name), func(t *testing.T) { + t.Parallel() + result := test.input.Into() + assert.Equal(t, test.expected, result) + }) + } +} + +func TestConditionMultipleOperator_Into(t *testing.T) { + tests := []struct { + name string + input ConditionMultipleOperator + expected view.MultipleOperator + }{ + {"success IncludeAny", IncludesAny, view.MultipleOperatorIncludesAny}, + {"success NotIncludesAny", NotIncludesAny, view.MultipleOperatorNotIncludesAny}, + {"success IncludesAll", IncludesAll, view.MultipleOperatorIncludesAll}, + {"success NotIncludesAll", NotIncludesAll, view.MultipleOperatorNotIncludesAll}, + {"success default case", ConditionMultipleOperator("99"), ""}, // Test for default case + } + + for _, test := range tests { + t.Run(string(test.name), func(t *testing.T) { + t.Parallel() + result := test.input.Into() + assert.Equal(t, test.expected, result) + }) + } +} + +func TestFieldSelector_Into(t *testing.T) { + fieldType := FieldSelector{ + FieldId: id.NewFieldID().Ref(), + Type: lo.ToPtr(FieldSelectorTypeId), + } + tests := []struct { + name string + input FieldSelector + expected view.FieldSelector + }{ + { + name: "success", + input: FieldSelector{ + FieldId: fieldType.FieldId, + Type: fieldType.Type, + }, + expected: view.FieldSelector{ + Type: view.FieldTypeId, + ID: fieldType.FieldId, + }, + }, + } + + for _, test := range tests { + t.Run(string(test.name), func(t *testing.T) { + t.Parallel() + result := test.input.Into() + assert.Equal(t, test.expected, result) + }) + } +} + +func TestConditionInto(t *testing.T) { + fieldID := id.NewFieldID().Ref() + var pIntf *interface{} + var emptyInterface interface{} + pIntf = &emptyInterface + *pIntf = "test" + timeNow := time.Now() + + tests := []struct { + name string + condition *Condition + want *view.Condition + }{ + { + name: "success bool", + condition: &Condition{ + Bool: &struct { + FieldId FieldSelector "json:\"fieldId\"" + Operator ConditionBoolOperator "json:\"operator\"" + Value bool "json:\"value\"" + }{ + FieldId: FieldSelector{ + FieldId: fieldID, + Type: lo.ToPtr(FieldSelectorTypeId), + }, + Operator: ConditionBoolOperatorEquals, + Value: true, + }, + }, + want: &view.Condition{ + ConditionType: view.ConditionTypeBool, + BoolCondition: &view.BoolCondition{ + Field: view.FieldSelector{ + Type: view.FieldTypeId, + ID: fieldID, + }, + Op: view.BoolOperatorEquals, + Value: true, + }, + }, + }, + { + name: "success type string", + condition: &Condition{ + String: &struct { + FieldId FieldSelector "json:\"fieldId\"" + Operator ConditionStringOperator "json:\"operator\"" + Value string "json:\"value\"" + }{ + FieldId: FieldSelector{ + FieldId: fieldID, + Type: lo.ToPtr(FieldSelectorTypeId), + }, + Operator: Contains, + Value: "CONTAINS", + }, + }, + want: &view.Condition{ + ConditionType: view.ConditionTypeString, + StringCondition: &view.StringCondition{ + Field: view.FieldSelector{ + Type: view.FieldTypeId, + ID: fieldID, + }, + Op: view.StringOperatorContains, + Value: "CONTAINS", + }, + }, + }, + { + name: "success number", + condition: &Condition{ + Number: &struct { + FieldId FieldSelector "json:\"fieldId\"" + Operator ConditionNumberOperator "json:\"operator\"" + Value float32 "json:\"value\"" + }{ + FieldId: FieldSelector{ + FieldId: fieldID, + Type: lo.ToPtr(FieldSelectorTypeId), + }, + Operator: GreaterThanOrEqualTo, + Value: float32(2), + }, + }, + want: &view.Condition{ + ConditionType: view.ConditionTypeNumber, + NumberCondition: &view.NumberCondition{ + Field: view.FieldSelector{ + Type: view.FieldTypeId, + ID: fieldID, + }, + Op: view.NumberOperatorGreaterThanOrEqualTo, + Value: float64(2), + }, + }, + }, + { + name: "success basic", + condition: &Condition{ + Basic: &struct { + FieldId *FieldSelector "json:\"fieldId,omitempty\"" + Operator *ConditionBasicOperator "json:\"operator,omitempty\"" + Value *interface{} "json:\"value,omitempty\"" + }{ + FieldId: &FieldSelector{ + FieldId: fieldID, + Type: lo.ToPtr(FieldSelectorTypeId), + }, + Operator: lo.ToPtr(ConditionBasicOperatorEquals), + Value: pIntf, + }, + }, + want: &view.Condition{ + ConditionType: view.ConditionTypeBasic, + BasicCondition: &view.BasicCondition{ + Field: view.FieldSelector{ + Type: view.FieldTypeId, + ID: fieldID, + }, + Op: view.BasicOperatorEquals, + Value: "test", + }, + }, + }, + { + name: "success time", + condition: &Condition{ + Time: &struct { + FieldId FieldSelector "json:\"fieldId\"" + Operator ConditionTimeOperator "json:\"operator\"" + Value time.Time "json:\"value\"" + }{ + FieldId: FieldSelector{ + FieldId: fieldID, + Type: lo.ToPtr(FieldSelectorTypeId), + }, + Operator: After, + Value: timeNow, + }, + }, + want: &view.Condition{ + ConditionType: view.ConditionTypeTime, + TimeCondition: &view.TimeCondition{ + Field: view.FieldSelector{ + Type: view.FieldTypeId, + ID: fieldID, + }, + Op: view.TimeOperatorAfter, + Value: timeNow, + }, + }, + }, + { + name: "success nullable", + condition: &Condition{ + Nullable: &struct { + FieldId *FieldSelector "json:\"fieldId,omitempty\"" + Operator *ConditionNullableOperator "json:\"operator,omitempty\"" + }{ + FieldId: &FieldSelector{ + FieldId: fieldID, + Type: lo.ToPtr(FieldSelectorTypeId), + }, + Operator: lo.ToPtr(Empty), + }, + }, + want: &view.Condition{ + ConditionType: view.ConditionTypeNullable, + NullableCondition: &view.NullableCondition{ + Field: view.FieldSelector{ + Type: view.FieldTypeId, + ID: fieldID, + }, + Op: view.NullableOperatorEmpty, + }, + }, + }, + { + name: "success multiple", + condition: &Condition{ + Multiple: &struct { + FieldId FieldSelector "json:\"fieldId\"" + Operator ConditionMultipleOperator "json:\"operator\"" + Value []interface{} "json:\"value\"" + }{ + FieldId: FieldSelector{ + FieldId: fieldID, + Type: lo.ToPtr(FieldSelectorTypeId), + }, + Operator: IncludesAll, + Value: []any{ + pIntf, + }, + }, + }, + want: &view.Condition{ + ConditionType: view.ConditionTypeMultiple, + MultipleCondition: &view.MultipleCondition{ + Field: view.FieldSelector{ + ID: fieldID, + Type: view.FieldTypeId, + }, + Op: view.MultipleOperatorIncludesAll, + Value: []any{ + pIntf, + }, + }, + }, + }, + { + name: "success and", + condition: &Condition{ + And: &[]Condition{ + { + Bool: &struct { + FieldId FieldSelector "json:\"fieldId\"" + Operator ConditionBoolOperator "json:\"operator\"" + Value bool "json:\"value\"" + }{ + FieldId: FieldSelector{ + FieldId: fieldID, + Type: lo.ToPtr(FieldSelectorTypeId), + }, + Operator: ConditionBoolOperatorEquals, + Value: true, + }, + }, + { + Bool: &struct { + FieldId FieldSelector "json:\"fieldId\"" + Operator ConditionBoolOperator "json:\"operator\"" + Value bool "json:\"value\"" + }{ + FieldId: FieldSelector{ + FieldId: fieldID, + Type: lo.ToPtr(FieldSelectorTypeId), + }, + Operator: ConditionBoolOperatorEquals, + Value: true, + }, + }, + }, + }, + want: &view.Condition{ + ConditionType: view.ConditionTypeAnd, + AndCondition: &view.AndCondition{ + Conditions: []view.Condition{ + { + ConditionType: view.ConditionTypeBool, + BoolCondition: &view.BoolCondition{ + Field: view.FieldSelector{ + Type: view.FieldTypeId, + ID: fieldID, + }, + Op: view.BoolOperatorEquals, + Value: true, + }, + }, + { + ConditionType: view.ConditionTypeBool, + BoolCondition: &view.BoolCondition{ + Field: view.FieldSelector{ + Type: view.FieldTypeId, + ID: fieldID, + }, + Op: view.BoolOperatorEquals, + Value: true, + }, + }, + }, + }, + }, + }, + { + name: "success or", + condition: &Condition{ + Or: &[]Condition{ + { + Bool: &struct { + FieldId FieldSelector "json:\"fieldId\"" + Operator ConditionBoolOperator "json:\"operator\"" + Value bool "json:\"value\"" + }{ + FieldId: FieldSelector{ + FieldId: fieldID, + Type: lo.ToPtr(FieldSelectorTypeId), + }, + Operator: ConditionBoolOperatorEquals, + Value: true, + }, + }, + { + Bool: &struct { + FieldId FieldSelector "json:\"fieldId\"" + Operator ConditionBoolOperator "json:\"operator\"" + Value bool "json:\"value\"" + }{ + FieldId: FieldSelector{ + FieldId: fieldID, + Type: lo.ToPtr(FieldSelectorTypeId), + }, + Operator: ConditionBoolOperatorEquals, + Value: true, + }, + }, + }, + }, + want: &view.Condition{ + ConditionType: view.ConditionTypeOr, + OrCondition: &view.OrCondition{ + Conditions: []view.Condition{ + { + ConditionType: view.ConditionTypeBool, + BoolCondition: &view.BoolCondition{ + Field: view.FieldSelector{ + Type: view.FieldTypeId, + ID: fieldID, + }, + Op: view.BoolOperatorEquals, + Value: true, + }, + }, + { + ConditionType: view.ConditionTypeBool, + BoolCondition: &view.BoolCondition{ + Field: view.FieldSelector{ + Type: view.FieldTypeId, + ID: fieldID, + }, + Op: view.BoolOperatorEquals, + Value: true, + }, + }, + }, + }, + }, + }, + { + name: "success nil", + condition: nil, + want: nil, + }, + { + name: "empty condition", + condition: &Condition{}, + want: nil, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.want, tc.condition.Into()) + }) + } +} diff --git a/server/pkg/integrationapi/event_test.go b/server/pkg/integrationapi/event_test.go new file mode 100644 index 0000000000..eae75b0366 --- /dev/null +++ b/server/pkg/integrationapi/event_test.go @@ -0,0 +1,168 @@ +package integrationapi + +import ( + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/asset" + "github.com/reearth/reearth-cms/server/pkg/event" + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearth-cms/server/pkg/operator" + "github.com/reearth/reearth-cms/server/pkg/project" + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/account/accountdomain/user" + "github.com/stretchr/testify/assert" +) + +func Test_NewOperator(t *testing.T) { + + uid := accountdomain.NewUserID() + integrationID := id.NewIntegrationID() + opUser := operator.OperatorFromUser(uid) + opIntegration := operator.OperatorFromIntegration(integrationID) + opMachine := operator.OperatorFromMachine() + tests := []struct { + name string + input operator.Operator + want Operator + }{ + { + name: "success user operator", + input: opUser, + want: Operator{ + User: &OperatorUser{ + ID: uid.String(), + }, + }, + }, + { + name: "success integration operator", + input: opIntegration, + want: Operator{ + Integration: &OperatorIntegration{ + ID: integrationID.String(), + }, + }, + }, + { + name: "success machine operator", + input: opMachine, + want: Operator{ + Machine: &OperatorMachine{}, + }, + }, + { + name: "success unknown operator", + input: operator.Operator{}, + want: Operator{}, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + t.Parallel() + result := NewOperator(test.input) + assert.Equal(t, result, test.want) + }) + + } +} + +func TestNewEventWith(t *testing.T) { + mockTime := time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC) + u := user.New().NewID().Email("hoge@example.com").Name("John").MustBuild() + a := asset.New().NewID().Project(project.NewID()).Size(100).NewUUID(). + CreatedByUser(u.ID()).Thread(id.NewThreadID()).MustBuild() + eID1 := event.NewID() + prj := event.Project{ + ID: "testID", + Alias: "testAlias", + } + + ev := event.New[any]().ID(eID1).Timestamp(mockTime).Type(event.AssetCreate).Operator(operator.OperatorFromUser(u.ID())).Object(a).Project(&prj).MustBuild() + ev1 := event.New[any]().ID(eID1).Timestamp(mockTime).Type(event.Type("test")).Operator(operator.OperatorFromUser(u.ID())).Object("test").Project(&prj).MustBuild() + d1, _ := New(ev, "test", func(a *asset.Asset) string { + return "test.com" + }) + d2, _ := New(ev.Object(), "test", func(a *asset.Asset) string { + return "test.com" + }) + type args struct { + event *event.Event[any] + override any + v string + urlResolver asset.URLResolver + } + tests := []struct { + name string + args args + want Event + wantErr error + }{ + { + name: "success", + args: args{ + event: ev, + override: ev, + v: "test", + urlResolver: func(a *asset.Asset) string { + return "test.com" + }, + }, + want: Event{ + ID: ev.ID().String(), + Type: string(ev.Type()), + Timestamp: ev.Timestamp(), + Data: d1, + Project: &ProjectIdAlias{ + ID: ev.Project().ID, + Alias: ev.Project().Alias, + }, + Operator: NewOperator(ev.Operator()), + }, + wantErr: nil, + }, + { + name: "success when override is nil", + args: args{ + event: ev, + override: nil, + v: "test", + urlResolver: func(a *asset.Asset) string { + return "test.com" + }, + }, + want: Event{ + ID: ev.ID().String(), + Type: string(ev.Type()), + Timestamp: ev.Timestamp(), + Data: d2, + Project: &ProjectIdAlias{ + ID: ev.Project().ID, + Alias: ev.Project().Alias, + }, + Operator: NewOperator(ev.Operator()), + }, + wantErr: nil, + }, + { + name: "error new returns error", + args: args{ + event: ev, + override: ev1, + v: "", + urlResolver: nil, + }, + want: Event{}, + wantErr: ErrUnsupportedEntity, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + result, err := NewEventWith(test.args.event, test.args.override, test.args.v, test.args.urlResolver) + assert.Equal(t, result, test.want) + assert.Equal(t, err, test.wantErr) + }) + } +} diff --git a/server/pkg/integrationapi/item_export_test.go b/server/pkg/integrationapi/item_export_test.go new file mode 100644 index 0000000000..63f1811243 --- /dev/null +++ b/server/pkg/integrationapi/item_export_test.go @@ -0,0 +1,157 @@ +package integrationapi + +import ( + "encoding/json" + "testing" + + "github.com/reearth/reearth-cms/server/pkg/exporters" + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearth-cms/server/pkg/item" + "github.com/reearth/reearth-cms/server/pkg/schema" + "github.com/reearth/reearth-cms/server/pkg/value" + "github.com/reearth/reearth-cms/server/pkg/version" + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/util" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestToGeometryType(t *testing.T) { + tests := []struct { + name string + input *exporters.GeometryType + expected *GeometryType + }{ + { + name: "success type point", + input: lo.ToPtr(exporters.GeometryTypePoint), + expected: lo.ToPtr(GeometryTypePoint), + }, + { + name: "success type multi point", + input: lo.ToPtr(exporters.GeometryTypeMultiPoint), + expected: lo.ToPtr(GeometryTypeMultiPoint), + }, + { + name: "success type line string", + input: lo.ToPtr(exporters.GeometryTypeLineString), + expected: lo.ToPtr(GeometryTypeLineString), + }, + { + name: "success type multi line string", + input: lo.ToPtr(exporters.GeometryTypeMultiLineString), + expected: lo.ToPtr(GeometryTypeMultiLineString), + }, + { + name: "success type polygon", + input: lo.ToPtr(exporters.GeometryTypePolygon), + expected: lo.ToPtr(GeometryTypePolygon), + }, + { + name: "success type multi polygon", + input: lo.ToPtr(exporters.GeometryTypeMultiPolygon), + expected: lo.ToPtr(GeometryTypeMultiPolygon), + }, + { + name: "success type geometry collection", + input: lo.ToPtr(exporters.GeometryTypeGeometryCollection), + expected: lo.ToPtr(GeometryTypeGeometryCollection), + }, + { + name: "success nil", + input: nil, + expected: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := toGeometryType(tt.input) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestFeatureCollectionFromItems(t *testing.T) { + iid := id.NewItemID() + sid := id.NewSchemaID() + mid := id.NewModelID() + tid := id.NewThreadID() + pid := id.NewProjectID() + gst := schema.GeometryObjectSupportedTypeList{schema.GeometryObjectSupportedTypePoint, schema.GeometryObjectSupportedTypeLineString} + sf1 := schema.NewField(schema.NewGeometryObject(gst).TypeProperty()).NewID().Name("geo1").Key(id.RandomKey()).MustBuild() + + s1 := schema.New().ID(sid).Fields([]*schema.Field{sf1}).Workspace(accountdomain.NewWorkspaceID()).Project(pid).MustBuild() + str := "{\"coordinates\":[139.28179282584915,36.58570985749664],\"type\":\"Point\"}" + fi1 := item.NewField(sf1.ID(), value.TypeGeometryObject.Value(str).AsMultiple(), nil) + + i1 := item.New(). + ID(iid). + Schema(sid). + Project(pid). + Fields([]*item.Field{fi1}). + Model(mid). + Thread(tid). + MustBuild() + v1 := version.New() + vi1 := version.MustBeValue(v1, nil, version.NewRefs(version.Latest), util.Now(), i1) + // with geometry fields + ver1 := item.VersionedList{vi1} + + tests := []struct { + name string + inputVer item.VersionedList + inputSchema *schema.Schema + expected *FeatureCollection + expectError bool + }{ + { + name: "Valid input", + inputVer: ver1, + inputSchema: s1, + expected: &FeatureCollection{ + Features: &[]Feature{ + { + Id: &iid, + Geometry: &Geometry{ + Coordinates: &Geometry_Coordinates{ + union: json.RawMessage([]byte("[139.28179282584915,36.58570985749664]")), + }, + Type: lo.ToPtr(GeometryTypePoint), + }, + Properties: &map[string]interface{}{}, + Type: lo.ToPtr(FeatureTypeFeature), + }, + }, + Type: lo.ToPtr(FeatureCollectionTypeFeatureCollection), + }, + expectError: false, + }, + { + name: "Invalid input - nil schema", + inputVer: item.VersionedList{}, + inputSchema: nil, + expectError: true, + }, + { + name: "Invalid input - empty VersionedList", + inputVer: item.VersionedList{}, + inputSchema: &schema.Schema{}, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := FeatureCollectionFromItems(tt.inputVer, tt.inputSchema) + assert.Equal(t, tt.expected, result, "FeatureCollectionFromItems() expected %v but got %v", tt.expected, result) + if tt.expectError { + assert.Error(t, err, "Expected an error but got none") + } else { + assert.NoError(t, err, "Expected no error but got: %v", err) + } + }) + } +} diff --git a/server/pkg/integrationapi/project_test.go b/server/pkg/integrationapi/project_test.go new file mode 100644 index 0000000000..25244d48cc --- /dev/null +++ b/server/pkg/integrationapi/project_test.go @@ -0,0 +1,42 @@ +package integrationapi + +import ( + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/project" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func Test_NewProject(t *testing.T) { + timeNow := time.Now() + p1 := project.New().ID(project.NewID()).Workspace(project.NewWorkspaceID()). + Name("test").Description("testing").Alias("testalias").UpdatedAt(timeNow).MustBuild() + tests := []struct { + name string + p *project.Project + want Project + }{ + { + name: "success", + p: p1, + want: Project{ + Id: p1.ID().Ref(), + WorkspaceId: p1.Workspace().Ref(), + Alias: lo.ToPtr(p1.Alias()), + Name: lo.ToPtr(p1.Name()), + Description: lo.ToPtr(p1.Description()), + CreatedAt: lo.ToPtr(p1.CreatedAt()), + UpdatedAt: lo.ToPtr(p1.UpdatedAt()), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := NewProject(tt.p) + assert.Equal(t, tt.want, result) + }) + } +} diff --git a/server/pkg/integrationapi/schema_test.go b/server/pkg/integrationapi/schema_test.go new file mode 100644 index 0000000000..b5adbb580e --- /dev/null +++ b/server/pkg/integrationapi/schema_test.go @@ -0,0 +1,144 @@ +package integrationapi + +import ( + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearth-cms/server/pkg/item" + "github.com/reearth/reearth-cms/server/pkg/model" + "github.com/reearth/reearth-cms/server/pkg/schema" + "github.com/reearth/reearth-cms/server/pkg/value" + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/util" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestNewModel(t *testing.T) { + type args struct { + m *model.Model + sp *schema.Package + lastModified time.Time + } + mockTime := time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC) + pID := id.NewProjectID() + sf1 := schema.NewField(schema.NewText(nil).TypeProperty()).NewID().RandomKey().MustBuild() + sf2 := schema.NewField(lo.Must1(schema.NewInteger(nil, nil)).TypeProperty()).NewID().RandomKey().MustBuild() + s1 := schema.New().NewID().Project(pID).Workspace(accountdomain.NewWorkspaceID()).Fields([]*schema.Field{sf1, sf2}).MustBuild() + s2 := schema.New().NewID().Project(pID).Workspace(accountdomain.NewWorkspaceID()).Fields([]*schema.Field{sf1, sf2}).TitleField(sf1.ID().Ref()).MustBuild() + schemaPackage1 := schema.NewPackage(s1, nil, nil, nil) + schemaPackage2 := schema.NewPackage(s2, nil, nil, nil) + model1 := model.New().ID(id.NewModelID()).Metadata(s1.ID().Ref()).Project(pID).Schema(s1.ID()).Key(id.NewKey("mmm123")).UpdatedAt(mockTime).MustBuild() + model2 := model.New().ID(id.NewModelID()).Metadata(s2.ID().Ref()).Project(pID).Schema(s2.ID()).Key(id.NewKey("mmm123")).UpdatedAt(mockTime).MustBuild() + + tests := []struct { + name string + args args + want Model + }{ + { + name: "success", + args: args{ + m: model1, + sp: schemaPackage1, + lastModified: mockTime, + }, + want: Model{ + Id: model1.ID().Ref(), + Key: util.ToPtrIfNotEmpty(model1.Key().String()), + Name: util.ToPtrIfNotEmpty(model1.Name()), + Description: util.ToPtrIfNotEmpty(model1.Description()), + Public: util.ToPtrIfNotEmpty(model1.Public()), + ProjectId: model1.Project().Ref(), + SchemaId: model1.Schema().Ref(), + Schema: util.ToPtrIfNotEmpty(NewSchema(schemaPackage1.Schema())), + MetadataSchemaId: model1.Metadata().Ref(), + MetadataSchema: util.ToPtrIfNotEmpty(NewSchema(schemaPackage1.MetaSchema())), + CreatedAt: lo.ToPtr(model1.ID().Timestamp()), + UpdatedAt: lo.ToPtr(model1.UpdatedAt()), + LastModified: util.ToPtrIfNotEmpty(mockTime), + }, + }, + { + name: "success with item field in schema", + args: args{ + m: model2, + sp: schemaPackage2, + lastModified: mockTime, + }, + want: Model{ + Id: model2.ID().Ref(), + Key: util.ToPtrIfNotEmpty(model2.Key().String()), + Name: util.ToPtrIfNotEmpty(model2.Name()), + Description: util.ToPtrIfNotEmpty(model2.Description()), + Public: util.ToPtrIfNotEmpty(model2.Public()), + ProjectId: model2.Project().Ref(), + SchemaId: model2.Schema().Ref(), + Schema: util.ToPtrIfNotEmpty(NewSchema(schemaPackage2.Schema())), + MetadataSchemaId: model2.Metadata().Ref(), + MetadataSchema: util.ToPtrIfNotEmpty(NewSchema(schemaPackage2.MetaSchema())), + CreatedAt: lo.ToPtr(model2.ID().Timestamp()), + UpdatedAt: lo.ToPtr(model2.UpdatedAt()), + LastModified: util.ToPtrIfNotEmpty(mockTime), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := NewModel(tt.args.m, tt.args.sp, tt.args.lastModified) + assert.Equal(t, tt.want, result) + }) + } +} + +func TestNewItemFieldChanges(t *testing.T) { + + fID := id.NewFieldID() + v0 := value.MultipleFrom(value.TypeBool, []*value.Value{ + value.New(value.TypeBool, false), + }) + v1 := value.MultipleFrom(value.TypeBool, []*value.Value{ + value.New(value.TypeBool, true), + }) + + type args struct { + change item.FieldChanges + } + + tests := []struct { + name string + args args + want []FieldChange + }{ + { + name: "success", + args: args{ + change: item.FieldChanges{ + item.FieldChange{ + ID: fID, + Type: item.FieldChangeTypeAdd, + CurrentValue: value.MultipleFrom(v1.Type(), []*value.Value{v1.First()}), + PreviousValue: value.MultipleFrom(v0.Type(), []*value.Value{v0.First()}), + }, + }, + }, + want: []FieldChange{ + { + ID: fID, + Type: item.FieldChangeTypeAdd, + CurrentValue: v1.Interface(), + PreviousValue: v0.Interface(), + }, + }, + }, + } + for _, test := range tests { + t.Run(string(test.name), func(t *testing.T) { + t.Parallel() + result := NewItemFieldChanges(test.args.change) + assert.Equal(t, test.want, result) + }) + } +} diff --git a/server/pkg/integrationapi/types.gen.go b/server/pkg/integrationapi/types.gen.go index 2056450597..ac4daf94f9 100644 --- a/server/pkg/integrationapi/types.gen.go +++ b/server/pkg/integrationapi/types.gen.go @@ -551,6 +551,28 @@ type SchemaField struct { Type *ValueType `json:"type,omitempty"` } +// SchemaJSON defines model for schemaJSON. +type SchemaJSON struct { + Id *string `json:"$id,omitempty"` + Schema *string `json:"$schema,omitempty"` + Description *string `json:"description,omitempty"` + Properties map[string]SchemaJSONProperties `json:"properties"` + Title *string `json:"title,omitempty"` + Type string `json:"type"` +} + +// SchemaJSONProperties defines model for schemaJSONProperties. +type SchemaJSONProperties struct { + Description *string `json:"description,omitempty"` + Format *string `json:"format,omitempty"` + Items *SchemaJSON `json:"items,omitempty"` + MaxLength *int `json:"maxLength,omitempty"` + Maximum *float64 `json:"maximum,omitempty"` + Minimum *float64 `json:"minimum,omitempty"` + Title *string `json:"title,omitempty"` + Type string `json:"type"` +} + // TagResponse defines model for tagResponse. type TagResponse struct { Color *string `json:"color,omitempty"` diff --git a/server/pkg/item/builder_test.go b/server/pkg/item/builder_test.go index 69140737a0..9a63bc4108 100644 --- a/server/pkg/item/builder_test.go +++ b/server/pkg/item/builder_test.go @@ -219,3 +219,21 @@ func TestBuilder_IsMetadata(t *testing.T) { b := New().IsMetadata(true) assert.Equal(t, true, b.i.isMetadata) } + +func TestBuilder_UpdatedByUser(t *testing.T) { + uId := accountdomain.NewUserID() + uuid := New().UpdatedByUser(&uId) + assert.Equal(t, &uId, uuid.i.updatedByUser) +} + +func TestBuilder_UpdatedByIntegration(t *testing.T) { + iid := id.NewIntegrationID() + uuid := New().UpdatedByIntegration(&iid) + assert.Equal(t, &iid, uuid.i.updatedByIntegration) +} + +func TestBuilder_OriginalItem(t *testing.T) { + iId := id.NewItemID().Ref() + b := New().OriginalItem(iId) + assert.Equal(t, iId, b.i.originalItem) +} diff --git a/server/schemas/integration.yml b/server/schemas/integration.yml index 4b9d187750..5f62f94e7a 100644 --- a/server/schemas/integration.yml +++ b/server/schemas/integration.yml @@ -162,6 +162,63 @@ paths: description: Not found '500': description: Internal server error + '/schemata/{schemaId}/schema.json': + parameters: + - $ref: '#/components/parameters/schemaIdParam' + get: + operationId: SchemaByIDAsJSON + security: + - bearerAuth: [] + summary: Returns a schema as json by schema ID + tags: + - Schema + - JSON + description: Returns a schema as json by schema ID + responses: + '200': + description: A JSON object + content: + application/json: + schema: + $ref: '#/components/schemas/schemaJSON' + format: binary + '400': + description: Invalid request parameter value + '401': + $ref: '#/components/responses/UnauthorizedError' + '404': + description: Not found + '500': + description: Internal server error + '/projects/{projectIdOrAlias}/schemata/{schemaId}/schema.json': + parameters: + - $ref: '#/components/parameters/projectIdOrAliasParam' + - $ref: '#/components/parameters/schemaIdParam' + get: + operationId: SchemaByIDWithProjectAsJSON + security: + - bearerAuth: [] + summary: Returns a schema as json by project and schema ID + tags: + - Schema + - JSON + description: Returns a schema as json by project and schema ID + responses: + '200': + description: A JSON object + content: + application/json: + schema: + $ref: '#/components/schemas/schemaJSON' + format: binary + '400': + description: Invalid request parameter value + '401': + $ref: '#/components/responses/UnauthorizedError' + '404': + description: Not found + '500': + description: Internal server error '/models/{modelId}': parameters: - $ref: '#/components/parameters/modelIdParam' @@ -242,6 +299,62 @@ paths: description: Invalid request parameter value '401': $ref: '#/components/responses/UnauthorizedError' + /models/{modelId}/schema.json: + parameters: + - $ref: '#/components/parameters/modelIdParam' + get: + operationId: SchemaByModelAsJSON + security: + - bearerAuth: [] + summary: Returns a schema as json by model ID + tags: + - Schema + - JSON + description: Returns a schema as json by model ID + responses: + '200': + description: A JSON object + content: + application/json: + schema: + $ref: '#/components/schemas/schemaJSON' + format: binary + '400': + description: Invalid request parameter value + '401': + $ref: '#/components/responses/UnauthorizedError' + '404': + description: Not found + '500': + description: Internal server error + /models/{modelId}/metadata_schema.json: + parameters: + - $ref: '#/components/parameters/modelIdParam' + get: + operationId: MetadataSchemaByModelAsJSON + security: + - bearerAuth: [] + summary: Returns a metadata schema as json by model ID + tags: + - MetadataSchema + - JSON + description: Returns a metadata schema as json by model ID + responses: + '200': + description: A JSON object + content: + application/json: + schema: + $ref: '#/components/schemas/schemaJSON' + format: binary + '400': + description: Invalid request parameter value + '401': + $ref: '#/components/responses/UnauthorizedError' + '404': + description: Not found + '500': + description: Internal server error '/models/{modelId}/import': parameters: - $ref: '#/components/parameters/modelIdParam' @@ -925,6 +1038,64 @@ paths: description: Not found '500': description: Internal server error + '/projects/{projectIdOrAlias}/models/{modelIdOrKey}/schema.json': + parameters: + - $ref: '#/components/parameters/projectIdOrAliasParam' + - $ref: '#/components/parameters/modelIdOrKeyParam' + get: + operationId: SchemaByModelWithProjectAsJSON + security: + - bearerAuth: [] + summary: Returns a schema as json by project and model ID + tags: + - Schema + - JSON + description: Returns a schema as json by project and model ID + responses: + '200': + description: A JSON object + content: + application/json: + schema: + $ref: '#/components/schemas/schemaJSON' + format: binary + '400': + description: Invalid request parameter value + '401': + $ref: '#/components/responses/UnauthorizedError' + '404': + description: Not found + '500': + description: Internal server error + '/projects/{projectIdOrAlias}/models/{modelIdOrKey}/metadata_schema.json': + parameters: + - $ref: '#/components/parameters/projectIdOrAliasParam' + - $ref: '#/components/parameters/modelIdOrKeyParam' + get: + operationId: MetadataSchemaByModelWithProjectAsJSON + security: + - bearerAuth: [] + summary: Returns a metadata schema as json by project and model ID + tags: + - MetadataSchema + - JSON + description: Returns a metadata schema as json by project and model ID + responses: + '200': + description: A JSON object + content: + application/json: + schema: + $ref: '#/components/schemas/schemaJSON' + format: binary + '400': + description: Invalid request parameter value + '401': + $ref: '#/components/responses/UnauthorizedError' + '404': + description: Not found + '500': + description: Internal server error '/items/{itemId}': parameters: - $ref: '#/components/parameters/itemIdParam' @@ -1658,6 +1829,46 @@ components: createdAt: type: string format: date-time + schemaJSON: + type: object + required: ["type", "properties"] + properties: + $schema: + type: string + $id: + type: string + title: + type: string + description: + type: string + type: + type: string + properties: + type: object + additionalProperties: + $ref: '#/components/schemas/schemaJSONProperties' + schemaJSONProperties: + type: object + required: ["type"] + properties: + type: + type: string + title: + type: string + description: + type: string + format: + type: string + minimum: + type: number + format: double + maximum: + type: number + format: double + maxLength: + type: integer + items: + $ref: '#/components/schemas/schemaJSON' valueType: type: string enum: