diff --git a/test/data/integration/bulkblocks_data.json b/test/data/integration/bulkblocks_data.json index a12a0054..1a657ed4 100755 --- a/test/data/integration/bulkblocks_data.json +++ b/test/data/integration/bulkblocks_data.json @@ -1,5 +1,5 @@ { - "parent_bulk": { + "con_parent_bulk": { "dataset_conf_list": [ { "release_version": "CMSSW_1_2_3", @@ -232,19 +232,19 @@ "create_by": "WMAgent", "primary_ds_type": "test", "primary_ds_name": "unittest_web_primary_ds_name_8268_stepchain", - "creation_date": 1652729861 + "creation_date": 1652798955 }, "dataset": { "dataset_id": 0, "create_by": "WMAgent", - "creation_date": 1652729861, + "creation_date": 1652798955, "physics_group_name": "Tracker", "dataset_access_type": "PRODUCTION", "data_tier_name": "GEN-SIM-RAW", "last_modified_by": "WMAgent", "processed_ds_name": "acq_era_8268-ptsr-v8268", "xtcrosssection": 0, - "last_modification_date": 1652729861, + "last_modification_date": 1652798955, "dataset": "/unittest_web_primary_ds_name_8268_stepchain/acq_era_8268-ptsr-v8268/GEN-SIM-RAW", "prep_id": "TestPrepID" }, @@ -274,7 +274,7 @@ "dataset_parent_list": [], "ds_parent_list": null }, - "child_bulk": { + "con_child_bulk": { "dataset_conf_list": [ { "release_version": "CMSSW_1_2_3", @@ -507,19 +507,19 @@ "create_by": "WMAgent", "primary_ds_type": "test", "primary_ds_name": "unittest_web_primary_ds_name_8268_stepchain", - "creation_date": 1652729861 + "creation_date": 1652798955 }, "dataset": { "dataset_id": 0, "create_by": "WMAgent", - "creation_date": 1652729861, + "creation_date": 1652798955, "physics_group_name": "Tracker", "dataset_access_type": "PRODUCTION", "data_tier_name": "GEN-SIM-RAW", "last_modified_by": "WMAgent", "processed_ds_name": "acq_era_8268-v8268", "xtcrosssection": 0, - "last_modification_date": 1652729861, + "last_modification_date": 1652798955, "dataset": "/unittest_web_primary_ds_name_8268_stepchain/acq_era_8268-v8268/GEN-SIM-RAW", "prep_id": "TestPrepID" }, @@ -551,7 +551,7 @@ ], "ds_parent_list": null }, - "parent_bulk2": { + "seq_parent_bulk": { "dataset_conf_list": [ { "release_version": "CMSSW_1_2_3", @@ -728,19 +728,19 @@ "create_by": "WMAgent", "primary_ds_type": "test", "primary_ds_name": "unittest_web_primary_ds_name_8268_stepchain2", - "creation_date": 1652729861 + "creation_date": 1652798955 }, "dataset": { "dataset_id": 0, "create_by": "WMAgent", - "creation_date": 1652729861, + "creation_date": 1652798955, "physics_group_name": "Tracker", "dataset_access_type": "PRODUCTION", "data_tier_name": "GEN-SIM-RAW", "last_modified_by": "WMAgent", "processed_ds_name": "acq_era_8268-ptsr-v82682", "xtcrosssection": 0, - "last_modification_date": 1652729861, + "last_modification_date": 1652798955, "dataset": "/unittest_web_primary_ds_name_8268_stepchain/acq_era_8268-ptsr-v8268/GEN-SIM-RAW2", "prep_id": "TestPrepID" }, @@ -770,7 +770,7 @@ "dataset_parent_list": [], "ds_parent_list": null }, - "child_bulk2": { + "seq_child_bulk": { "dataset_conf_list": [ { "release_version": "CMSSW_1_2_3", @@ -947,19 +947,19 @@ "create_by": "WMAgent", "primary_ds_type": "test", "primary_ds_name": "unittest_web_primary_ds_name_8268_stepchain2", - "creation_date": 1652729861 + "creation_date": 1652798955 }, "dataset": { "dataset_id": 0, "create_by": "WMAgent", - "creation_date": 1652729861, + "creation_date": 1652798955, "physics_group_name": "Tracker", "dataset_access_type": "PRODUCTION", "data_tier_name": "GEN-SIM-RAW", "last_modified_by": "WMAgent", "processed_ds_name": "acq_era_8268-v82682", "xtcrosssection": 0, - "last_modification_date": 1652729861, + "last_modification_date": 1652798955, "dataset": "/unittest_web_primary_ds_name_8268_stepchain/acq_era_8268-v8268/GEN-SIM-RAW2", "prep_id": "TestPrepID" }, diff --git a/test/int_bulkblocks.go b/test/int_bulkblocks.go index 7b6f9a34..4d6027a1 100644 --- a/test/int_bulkblocks.go +++ b/test/int_bulkblocks.go @@ -11,10 +11,12 @@ import ( // this file contains logic for bulkblocks API // both sequential and concurrent bulkblocks are tested // HTTP request body data is defined in test/data/integration/bulkblocks_data.json. This is generated using generateBulkBlocksData in test/integration_cases.go -// Sequential bulkblocks data is under the parentBulk and childBulk fields in test/data/integration/bulkblocks_data.json -// Concurrent bulkblocks data is under the parentBulk2 and childBulk2 fields in test/data/integration/bulkblocks_data.json +// sequential bulkblocks data is under the seq_parent_bulk and seq_child_bulk fields in test/data/integration/bulkblocks_data.json +// concurrent bulkblocks data is under the con_parent_bulk and con_child_bulk fields in test/data/integration/bulkblocks_data.json // bulkblocks_data.json is loaded into BulkBlocksData struct defined in test/integration_cases.go // the HTTP request body is defined by dbs.BulkBlocks struct defined in dbs/bulkblocks.go +// sequential bulkblocks data is loaded into SequentialParentData and SequentialChildData in BulkBlocksData struct +// concurrent bulkblocks data is loaded into ConcurrentParentData and ConcurrentChildData in BulkBlocksData struct // the HTTP handlers and endpoints are defined in the EndpointTestCase struct defined in test/integration_cases.go // bulkblocks test table @@ -25,62 +27,52 @@ func getBulkBlocksTestTable(t *testing.T) EndpointTestCase { defaultEndpoint: "/dbs/bulkblocks", testCases: []testCase{ { - description: "Test POST parent bulkblocks", - serverType: "DBSWriter", - method: "POST", - input: BulkBlocksData.ParentData2, + description: "Test POST concurrent parent bulkblocks", + serverType: "DBSWriter", + concurrentBulkBlocks: true, + method: "POST", + input: BulkBlocksData.ConcurrentParentData, + output: []Response{}, params: url.Values{ - "block_name": []string{TestData.ParentStepchainBlock + "2"}, + "block_name": []string{TestData.ParentStepchainBlock}, }, - output: []Response{}, handler: web.FilesHandler, respCode: http.StatusOK, }, { - description: "Test POST child bulkblocks", - serverType: "DBSWriter", - method: "POST", - input: BulkBlocksData.ChildData2, + description: "Test POST concurrent child bulkblocks", + serverType: "DBSWriter", + concurrentBulkBlocks: true, + method: "POST", + input: BulkBlocksData.ConcurrentChildData, + output: []Response{}, params: url.Values{ - "block_name": []string{TestData.StepchainBlock + "2"}, + "block_name": []string{TestData.StepchainBlock}, }, - output: []Response{}, handler: web.FilesHandler, respCode: http.StatusOK, }, - }, - } -} - -// concurrent bulkblocks test table -func getConcurrentBulkBlocksTestTable(t *testing.T) EndpointTestCase { - return EndpointTestCase{ - description: "Test bulkblocks", - defaultHandler: web.BulkBlocksHandler, - defaultEndpoint: "/dbs/bulkblocks", - concurrentBulkBlocks: true, - testCases: []testCase{ { - description: "Test POST parent bulkblocks", + description: "Test POST sequential parent bulkblocks", serverType: "DBSWriter", method: "POST", - input: BulkBlocksData.ParentData, - output: []Response{}, + input: BulkBlocksData.SequentialParentData, params: url.Values{ - "block_name": []string{TestData.ParentStepchainBlock}, + "block_name": []string{TestData.ParentStepchainBlock + "2"}, }, + output: []Response{}, handler: web.FilesHandler, respCode: http.StatusOK, }, { - description: "Test POST child bulkblocks", + description: "Test POST sequential child bulkblocks", serverType: "DBSWriter", method: "POST", - input: BulkBlocksData.ChildData, - output: []Response{}, + input: BulkBlocksData.SequentialChildData, params: url.Values{ - "block_name": []string{TestData.StepchainBlock}, + "block_name": []string{TestData.StepchainBlock + "2"}, }, + output: []Response{}, handler: web.FilesHandler, respCode: http.StatusOK, }, diff --git a/test/int_files.go b/test/int_files.go index caf6ec57..3eca7485 100644 --- a/test/int_files.go +++ b/test/int_files.go @@ -560,7 +560,7 @@ func getFilesTestTable2(t *testing.T) EndpointTestCase { // test files with lumi_list range func getFilesLumiListRangeTestTable(t *testing.T) EndpointTestCase { // filtered detailed response - childBulk := BulkBlocksData.ChildData + childBulk := BulkBlocksData.ConcurrentChildData var lfns []Response var detailResp3 []Response var detailRunResp []Response diff --git a/test/integration_cases.go b/test/integration_cases.go index c08ac8c7..55fc5f04 100644 --- a/test/integration_cases.go +++ b/test/integration_cases.go @@ -34,15 +34,16 @@ type BadRequest struct { // basic elements to define a test case type testCase struct { - description string // test case description - serverType string // DBSWriter, DBSReader, DBSMigrate - method string // http method - endpoint string // url endpoint, optional if EndpointTestCase.defaultEndpoint is defined - params url.Values // url parameters, optional - handler func(http.ResponseWriter, *http.Request) // optional if EndpointTestCase.defaultHandler is defined - input RequestBody // POST and PUT body, optional for GET request - output []Response // expected response - respCode int // expected HTTP response code + description string // test case description + serverType string // DBSWriter, DBSReader, DBSMigrate + concurrentBulkBlocks bool // true for concurrentBulkBlocks + method string // http method + endpoint string // url endpoint, optional if EndpointTestCase.defaultEndpoint is defined + params url.Values // url parameters, optional + handler func(http.ResponseWriter, *http.Request) // optional if EndpointTestCase.defaultHandler is defined + input RequestBody // POST and PUT body, optional for GET request + output []Response // expected response + respCode int // expected HTTP response code } // initialData struct for test data generation @@ -86,10 +87,10 @@ type initialData struct { // struct containing bulk blocks data type bulkBlocksData struct { - ParentData dbs.BulkBlocks `json:"parent_bulk"` // for concurrent bulkblocks - ChildData dbs.BulkBlocks `json:"child_bulk"` // for concurrent bulkblocks - ParentData2 dbs.BulkBlocks `json:"parent_bulk2"` // for sequential bulkblocks - ChildData2 dbs.BulkBlocks `json:"child_bulk2"` // for sequential bulkblocks + ConcurrentParentData dbs.BulkBlocks `json:"con_parent_bulk"` // for concurrent bulkblocks + ConcurrentChildData dbs.BulkBlocks `json:"con_child_bulk"` // for concurrent bulkblocks + SequentialParentData dbs.BulkBlocks `json:"seq_parent_bulk"` // for sequential bulkblocks + SequentialChildData dbs.BulkBlocks `json:"seq_child_bulk"` // for sequential bulkblocks } // TestData contains the generated data @@ -101,11 +102,10 @@ var BulkBlocksData bulkBlocksData // defines a testcase for an endpoint type EndpointTestCase struct { - description string - defaultHandler func(http.ResponseWriter, *http.Request) - defaultEndpoint string - concurrentBulkBlocks bool - testCases []testCase + description string + defaultHandler func(http.ResponseWriter, *http.Request) + defaultEndpoint string + testCases []testCase } // get a UUID time_mid as an int @@ -359,10 +359,10 @@ func generateBulkBlocksData(t *testing.T, filepath string) { bulk2.Files = childFileList2 BulkBlocksData = bulkBlocksData{ - ParentData: parentBulk, - ChildData: bulk, - ParentData2: parentBulk2, - ChildData2: bulk2, + ConcurrentParentData: parentBulk, + ConcurrentChildData: bulk, + SequentialParentData: parentBulk2, + SequentialChildData: bulk2, } file, err := json.MarshalIndent(BulkBlocksData, "", " ") @@ -427,7 +427,6 @@ func LoadTestCases(t *testing.T, filepath string, bulkblockspath string) []Endpo outputConfigTestCase2 := getOutputConfigTestTable2(t) datasetParentsTestCase := getDatasetParentsTestTable(t) bulkBlocksTest := getBulkBlocksTestTable(t) - bulkBlocksConcurrentTest := getConcurrentBulkBlocksTestTable(t) filesReaderTestTable := getFilesLumiListRangeTestTable(t) return []EndpointTestCase{ @@ -447,8 +446,7 @@ func LoadTestCases(t *testing.T, filepath string, bulkblockspath string) []Endpo blockUpdateTestCase, outputConfigTestCase2, datasetParentsTestCase, - bulkBlocksConcurrentTest, - filesReaderTestTable, bulkBlocksTest, + filesReaderTestTable, } } diff --git a/test/integration_test.go b/test/integration_test.go index 457045a2..679fbd87 100644 --- a/test/integration_test.go +++ b/test/integration_test.go @@ -48,7 +48,7 @@ func runTestWorkflow(t *testing.T, c EndpointTestCase) { } // run a test server for a single test case - server = dbsServer(t, "dbs", "DBS_DB_FILE", v.serverType, c.concurrentBulkBlocks) + server = dbsServer(t, "dbs", "DBS_DB_FILE", v.serverType, v.concurrentBulkBlocks) defer server.Close() // create request body