Skip to content

Commit

Permalink
Merge pull request #26 from d-ylee/fileDatasetsValidFix
Browse files Browse the repository at this point in the history
Modified IS_FILE_VALID in files API, refactor tests to handle PUT requests
  • Loading branch information
vkuznet authored Apr 13, 2022
2 parents d94d584 + 588612f commit ec7c015
Show file tree
Hide file tree
Showing 9 changed files with 193 additions and 44 deletions.
2 changes: 1 addition & 1 deletion dbs/datasets.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ func (a *API) Datasets() error {
tmpl["ParentDataset"] = false
tmpl["Detail"] = false

// run_num shouhld come first since it may produce TokenGenerator
// run_num should come first since it may produce TokenGenerator
// whose bind parameters should appear first
runs, err := ParseRuns(getValues(a.Params, "run_num"))
if err != nil {
Expand Down
10 changes: 4 additions & 6 deletions dbs/files.go
Original file line number Diff line number Diff line change
Expand Up @@ -437,11 +437,6 @@ func (a *API) InsertFiles() error {
records = pyrec.Records
}

// check if is_file_valid was present in request, if not set it to 1
isFileValid := 0
if !strings.Contains(string(data), "is_file_valid") {
isFileValid = 1
}
for _, rec := range records {
if rec.CREATE_BY == "" {
rec.CREATE_BY = a.CreateBy
Expand All @@ -452,7 +447,10 @@ func (a *API) InsertFiles() error {
if utils.VERBOSE > 1 {
log.Printf("insert %+v", rec)
}
rec.IS_FILE_VALID = int64(isFileValid)
// check if is_file_valid was present in request, if not set it to 1
if !strings.Contains(string(data), "is_file_valid") {
rec.IS_FILE_VALID = 1
}

// set dependent's records
frec := Files{
Expand Down
23 changes: 23 additions & 0 deletions test/data/integration/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Integration Test Data
This folder contains data for integration tests. The main file, `integration_data.json`, contains most of the metadata that is used in all of the integration tests (`test/int_*.go`).

The data is generated in `test/integration_cases.go`. The JSON structure is defined in the struct `initialData` in the same file. When running `make test-integration`, the function `TestIntegration` in `test/integration_test.go` is run, which does the following:
1. Load the data from the file defined in `INTEGRATION_DATA_FILE`. The default in the `MakeFile` is `test/data/integration_data.json`. The data is loaded into the variable `TestData` in `test/integration_cases.go`.
2. Populate the test cases with this initial data.
3. Iterate over the testCases and run the data through `runTestWorkflow`.
4. Each testCase either does a `POST` or `GET` request, depending on the fields in the testCase structure.

When running `make test-integration`, if the file at `INTEGRATION_DATA_FILE` does not exist, the function `generateBaseData` in `test/integration_cases.go` will be run, populating the fields in `TestData` and then writing the data as JSON into the file.

## Writing Test Cases
The test cases are written as Table-Driven tests, in which only the inputs and expected outputs for each case are needed in a struct list.

Each endpoint of the API has a corresponding `test/int_*.go`.
The test cases related to the endpoint are created in `get*TestTable` functions in each file, which returns a slice of `EndpointTestCase`.
`EndpointTestCase` struct contains default information about an endpoint.

Within `EndpointTestCase`, the field `testCases` contains a list of test cases that utilize the endpoint.
An individual test case is defined in the `testCase` struct in `test/integration_cases.go`. It defines the basic elements to create a test case.

## Processing Test Cases
Each `EndpointTestCase` is run through `runTestWorkflow` in `test/integration_test.go`. In turn, the `testCases` field is iterated over, using the `testCase` fields in individual test cases.
2 changes: 1 addition & 1 deletion test/int_blocks.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package main

// this file contains logic for the blocks API
// the HTTP requests body is defined by dbs.Blocks struct defined in this dbs/blocks.go
// the HTTP requests body is defined by dbs.Blocks struct defined in dbs/blocks.go
// the HTTP response body is defined by blockResponse struct defined in this file
// the HTTP response body for the `detail` query is defined by blockDetailResponse struct defined in this file
// the HTTP handlers and endpoints are defined in the EndpointTestCase struct defined in test/integration_cases.go
Expand Down
71 changes: 71 additions & 0 deletions test/int_datasets.go
Original file line number Diff line number Diff line change
Expand Up @@ -439,3 +439,74 @@ func getDatasetsTestTable2(t *testing.T) EndpointTestCase {
},
}
}

// struct for a datasets update request body
type datasetsUpdateRequest struct {
DATASET string `json:"dataset"`
DATASET_ACCESS_TYPE string `json:"dataset_access_type"`
}

// third datasets endpoint tests for update datasets
func getDatasetsTestTable3(t *testing.T) EndpointTestCase {
// basic responses
dsResp := createDSResponse(TestData.Dataset)

// detail responses
// dsDetailResp := createDetailDSResponse(1, TestData.Dataset, TestData.ProcDataset, TestData.DatasetAccessType)

// setting dsResp to PRODUCTION
dsUpdateReq := datasetsUpdateRequest{
DATASET: TestData.Dataset,
DATASET_ACCESS_TYPE: TestData.DatasetAccessType2,
}
return EndpointTestCase{
description: "Test datasets update",
defaultHandler: web.DatasetsHandler,
defaultEndpoint: "/dbs/datasets",
testCases: []testCase{
{
description: "Check dataset to be updated",
serverType: "DBSReader",
method: "GET",
params: url.Values{
"dataset": []string{TestData.Dataset},
},
output: []Response{
dsResp,
},
respCode: http.StatusOK,
},
{
description: "Test PUT update dataset type", // DBSClientWriter_t.test20
serverType: "DBSWriter",
method: "PUT",
input: dsUpdateReq,
output: []Response{},
respCode: http.StatusOK,
},
{
description: "Ensure update removes dataset valid",
serverType: "DBSReader",
method: "GET",
params: url.Values{
"dataset": []string{TestData.Dataset},
},
output: []Response{},
respCode: http.StatusOK,
},
{
description: "Check dataset access type is PRODUCTION",
serverType: "DBSReader",
method: "GET",
params: url.Values{
"is_dataset_valid": []string{"0"},
"dataset_access_type": []string{"PRODUCTION"},
},
output: []Response{
dsResp,
},
respCode: http.StatusOK,
},
},
}
}
37 changes: 36 additions & 1 deletion test/int_files.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@ import (
)

// this file contains logic for files API
// the HTTP request body is defined by dbs.FileRecord struct defined in dbs/files.go
// the basic HTTP response body is defined by fileResponse struct in this file
// the detailed HTTP response body is defined by fileDetailResponse struct in this file
// the HTTP response body for run_num param is defined by fileRunResponse struct in this file
// the HTTP handlers and endpoints are defined in the EndpointTestCase struct defined in test/integration_cases.go

// basic files API response
type fileResponse struct {
Expand Down Expand Up @@ -92,7 +97,7 @@ func createDetailedResponse(i int, blockID int64, datasetID int64, fileRecord db
FILE_SIZE: fileRecord.FILE_SIZE,
FILE_TYPE: fileRecord.FILE_TYPE,
FILE_TYPE_ID: 1,
IS_FILE_VALID: 0,
IS_FILE_VALID: 1,
LAST_MODIFICATION_DATE: 0,
LAST_MODIFIED_BY: TestData.CreateBy,
LOGICAL_FILE_NAME: fileRecord.LOGICAL_FILE_NAME,
Expand All @@ -102,6 +107,7 @@ func createDetailedResponse(i int, blockID int64, datasetID int64, fileRecord db

// files endpoint tests
// TODO: handle BRANCH_HASH_ID
// TODO: Test with a request that does not contain is_file_valid
func getFilesTestTable(t *testing.T) EndpointTestCase {
parentFileLumiList := []dbs.FileLumi{
{LumiSectionNumber: 27414, RunNumber: 97, EventCount: 66},
Expand Down Expand Up @@ -264,3 +270,32 @@ func getFilesTestTable(t *testing.T) EndpointTestCase {
},
}
}

// files PUT request body struct
type filesPUTRequest struct {
LOGICAL_FILE_NAME string `json:"logical_file_name"`
IS_FILE_VALID int64 `json:"is_file_valid" validate:"number"`
}

// files endpoint tests part 2
func getFilesTestTable2(t *testing.T) EndpointTestCase {
lfn := fmt.Sprintf("/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%v/%v.root", TestData.UID, 1)
fileReq := filesPUTRequest{
LOGICAL_FILE_NAME: lfn,
IS_FILE_VALID: 0,
}
return EndpointTestCase{
description: "Test files 2",
defaultHandler: web.FilesHandler,
defaultEndpoint: "/dbs/files",
testCases: []testCase{
{
description: "Test update file status",
method: "PUT",
serverType: "DBSWriter",
input: fileReq,
respCode: http.StatusOK,
},
},
}
}
19 changes: 13 additions & 6 deletions test/int_primary_datasets.go
Original file line number Diff line number Diff line change
Expand Up @@ -125,8 +125,10 @@ func getPrimaryDatasetTestTable(t *testing.T) EndpointTestCase {
serverType: "DBSWriter",
params: nil,
input: primaryDSReq,
output: nil,
respCode: http.StatusOK,
output: []Response{
primaryDSResp,
},
respCode: http.StatusOK,
},
{
description: "Test primarydatasets GET after POST",
Expand Down Expand Up @@ -230,8 +232,10 @@ func getPrimaryDatasetTestTable(t *testing.T) EndpointTestCase {
serverType: "DBSWriter",
params: nil,
input: primaryDSReq,
output: nil,
respCode: http.StatusOK,
output: []Response{
primaryDSResp,
},
respCode: http.StatusOK,
},
{
description: "Test primarydatasets GET after duplicate POST",
Expand All @@ -248,8 +252,11 @@ func getPrimaryDatasetTestTable(t *testing.T) EndpointTestCase {
serverType: "DBSWriter",
params: nil,
input: primaryDSReq2,
output: nil,
respCode: http.StatusOK,
output: []Response{
primaryDSResp,
primaryDSResp2,
},
respCode: http.StatusOK,
},
{
description: "Test primarydatasets GET after second POST",
Expand Down
22 changes: 13 additions & 9 deletions test/integration_cases.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,15 @@ type BadRequest struct {

// basic elements to define a test case
type testCase struct {
description string // test case description
serverType string // DBSWriter, DBSReader, DBSMigrate
method string // http method
endpoint string // url endpoint
params url.Values // url parameters
handler func(http.ResponseWriter, *http.Request)
input RequestBody // POST record
output []Response // expected response
respCode int // expected HTTP response code
description string // test case description
serverType string // DBSWriter, DBSReader, DBSMigrate
method string // http method
endpoint string // url endpoint, optional if EndpointTestCase.defaultEndpoint is defined
params url.Values // url parameters, optional
handler func(http.ResponseWriter, *http.Request) // optional if EndpointTestCase.defaultHandler is defined
input RequestBody // POST and PUT body, optional for GET request
output []Response // expected response
respCode int // expected HTTP response code
}

// initialData struct for test data generation
Expand Down Expand Up @@ -223,6 +223,8 @@ func LoadTestCases(t *testing.T, filepath string) []EndpointTestCase {
blocksTestCase := getBlocksTestTable(t)
filesTestCase := getFilesTestTable(t)
datasetsTestCase2 := getDatasetsTestTable2(t)
filesTestCase2 := getFilesTestTable2(t)
datasetsTestCase3 := getDatasetsTestTable3(t)

return []EndpointTestCase{
primaryDatasetAndTypesTestCase,
Expand All @@ -236,5 +238,7 @@ func LoadTestCases(t *testing.T, filepath string) []EndpointTestCase {
blocksTestCase,
filesTestCase,
datasetsTestCase2,
filesTestCase2,
datasetsTestCase3,
}
}
51 changes: 31 additions & 20 deletions test/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -249,14 +249,6 @@ func runTestWorkflow(t *testing.T, c EndpointTestCase) {
t.Run(c.description, func(t *testing.T) {
for _, v := range c.testCases {
t.Run(v.description, func(t *testing.T) {
var handler func(http.ResponseWriter, *http.Request)

// set handler
handler = c.defaultHandler
if v.handler != nil {
handler = v.handler
}

// set the endpoint
endpoint := c.defaultEndpoint
if v.endpoint != "" {
Expand All @@ -267,22 +259,41 @@ func runTestWorkflow(t *testing.T, c EndpointTestCase) {
server = runTestServer(t, v.serverType)
defer server.Close()

//var req *http.Request
if v.method == "GET" {
d, _ := getData(t, server.URL, endpoint, v.params, v.respCode)
// create request body
data, err := json.Marshal(v.input)
if err != nil {
t.Fatal(err.Error())
}
reader := bytes.NewReader(data)

// req = newreq(t, v.method, server.URL, endpoint, nil, v.params, nil)
verifyResponse(t, d, v.output)
} else if v.method == "POST" {
injectDBRecord(t, v.input, server.URL, endpoint, v.params, handler, v.respCode)
// Set headers
headers := http.Header{}
headers.Set("Accept", "application/json")
if v.method == "POST" || v.method == "PUT" {
headers.Set("Content-Type", "application/json")
}
req := newreq(t, v.method, server.URL, endpoint, reader, v.params, headers)

r, err := http.DefaultClient.Do(req)
if err != nil {
t.Fatal(err.Error())
}
defer r.Body.Close()

// ensure returned status code is same as expected status code
if r.StatusCode != v.respCode {
t.Fatalf("Different HTTP Status: Expected %v, Received %v", v.respCode, r.StatusCode)
}
/*
r, err := http.DefaultClient.Do(req)

// decode and verify a GET request
if v.method == "GET" {
var d []dbs.Record
err = json.NewDecoder(r.Body).Decode(&d)
if err != nil {
t.Fatal(err)
t.Fatalf("Failed to decode body, %v", err)
}
defer r.Body.Close()
*/
verifyResponse(t, d, v.output)
}
})
}
})
Expand Down

0 comments on commit ec7c015

Please sign in to comment.