Skip to content

Commit

Permalink
Moved bulkblocks POST to single function, renamed bulkblocks data keys
Browse files Browse the repository at this point in the history
  • Loading branch information
d-ylee committed May 17, 2022
1 parent a90a946 commit c935851
Show file tree
Hide file tree
Showing 5 changed files with 67 additions and 77 deletions.
32 changes: 16 additions & 16 deletions test/data/integration/bulkblocks_data.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"parent_bulk": {
"con_parent_bulk": {
"dataset_conf_list": [
{
"release_version": "CMSSW_1_2_3",
Expand Down Expand Up @@ -232,19 +232,19 @@
"create_by": "WMAgent",
"primary_ds_type": "test",
"primary_ds_name": "unittest_web_primary_ds_name_8268_stepchain",
"creation_date": 1652729861
"creation_date": 1652798955
},
"dataset": {
"dataset_id": 0,
"create_by": "WMAgent",
"creation_date": 1652729861,
"creation_date": 1652798955,
"physics_group_name": "Tracker",
"dataset_access_type": "PRODUCTION",
"data_tier_name": "GEN-SIM-RAW",
"last_modified_by": "WMAgent",
"processed_ds_name": "acq_era_8268-ptsr-v8268",
"xtcrosssection": 0,
"last_modification_date": 1652729861,
"last_modification_date": 1652798955,
"dataset": "/unittest_web_primary_ds_name_8268_stepchain/acq_era_8268-ptsr-v8268/GEN-SIM-RAW",
"prep_id": "TestPrepID"
},
Expand Down Expand Up @@ -274,7 +274,7 @@
"dataset_parent_list": [],
"ds_parent_list": null
},
"child_bulk": {
"con_child_bulk": {
"dataset_conf_list": [
{
"release_version": "CMSSW_1_2_3",
Expand Down Expand Up @@ -507,19 +507,19 @@
"create_by": "WMAgent",
"primary_ds_type": "test",
"primary_ds_name": "unittest_web_primary_ds_name_8268_stepchain",
"creation_date": 1652729861
"creation_date": 1652798955
},
"dataset": {
"dataset_id": 0,
"create_by": "WMAgent",
"creation_date": 1652729861,
"creation_date": 1652798955,
"physics_group_name": "Tracker",
"dataset_access_type": "PRODUCTION",
"data_tier_name": "GEN-SIM-RAW",
"last_modified_by": "WMAgent",
"processed_ds_name": "acq_era_8268-v8268",
"xtcrosssection": 0,
"last_modification_date": 1652729861,
"last_modification_date": 1652798955,
"dataset": "/unittest_web_primary_ds_name_8268_stepchain/acq_era_8268-v8268/GEN-SIM-RAW",
"prep_id": "TestPrepID"
},
Expand Down Expand Up @@ -551,7 +551,7 @@
],
"ds_parent_list": null
},
"parent_bulk2": {
"seq_parent_bulk": {
"dataset_conf_list": [
{
"release_version": "CMSSW_1_2_3",
Expand Down Expand Up @@ -728,19 +728,19 @@
"create_by": "WMAgent",
"primary_ds_type": "test",
"primary_ds_name": "unittest_web_primary_ds_name_8268_stepchain2",
"creation_date": 1652729861
"creation_date": 1652798955
},
"dataset": {
"dataset_id": 0,
"create_by": "WMAgent",
"creation_date": 1652729861,
"creation_date": 1652798955,
"physics_group_name": "Tracker",
"dataset_access_type": "PRODUCTION",
"data_tier_name": "GEN-SIM-RAW",
"last_modified_by": "WMAgent",
"processed_ds_name": "acq_era_8268-ptsr-v82682",
"xtcrosssection": 0,
"last_modification_date": 1652729861,
"last_modification_date": 1652798955,
"dataset": "/unittest_web_primary_ds_name_8268_stepchain/acq_era_8268-ptsr-v8268/GEN-SIM-RAW2",
"prep_id": "TestPrepID"
},
Expand Down Expand Up @@ -770,7 +770,7 @@
"dataset_parent_list": [],
"ds_parent_list": null
},
"child_bulk2": {
"seq_child_bulk": {
"dataset_conf_list": [
{
"release_version": "CMSSW_1_2_3",
Expand Down Expand Up @@ -947,19 +947,19 @@
"create_by": "WMAgent",
"primary_ds_type": "test",
"primary_ds_name": "unittest_web_primary_ds_name_8268_stepchain2",
"creation_date": 1652729861
"creation_date": 1652798955
},
"dataset": {
"dataset_id": 0,
"create_by": "WMAgent",
"creation_date": 1652729861,
"creation_date": 1652798955,
"physics_group_name": "Tracker",
"dataset_access_type": "PRODUCTION",
"data_tier_name": "GEN-SIM-RAW",
"last_modified_by": "WMAgent",
"processed_ds_name": "acq_era_8268-v82682",
"xtcrosssection": 0,
"last_modification_date": 1652729861,
"last_modification_date": 1652798955,
"dataset": "/unittest_web_primary_ds_name_8268_stepchain/acq_era_8268-v8268/GEN-SIM-RAW2",
"prep_id": "TestPrepID"
},
Expand Down
60 changes: 26 additions & 34 deletions test/int_bulkblocks.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,12 @@ import (
// this file contains logic for bulkblocks API
// both sequential and concurrent bulkblocks are tested
// HTTP request body data is defined in test/data/integration/bulkblocks_data.json. This is generated using generateBulkBlocksData in test/integration_cases.go
// Sequential bulkblocks data is under the parentBulk and childBulk fields in test/data/integration/bulkblocks_data.json
// Concurrent bulkblocks data is under the parentBulk2 and childBulk2 fields in test/data/integration/bulkblocks_data.json
// sequential bulkblocks data is under the seq_parent_bulk and seq_child_bulk fields in test/data/integration/bulkblocks_data.json
// concurrent bulkblocks data is under the con_parent_bulk and con_child_bulk fields in test/data/integration/bulkblocks_data.json
// bulkblocks_data.json is loaded into BulkBlocksData struct defined in test/integration_cases.go
// the HTTP request body is defined by dbs.BulkBlocks struct defined in dbs/bulkblocks.go
// sequential bulkblocks data is loaded into SequentialParentData and SequentialChildData in BulkBlocksData struct
// concurrent bulkblocks data is loaded into ConcurrentParentData and ConcurrentChildData in BulkBlocksData struct
// the HTTP handlers and endpoints are defined in the EndpointTestCase struct defined in test/integration_cases.go

// bulkblocks test table
Expand All @@ -25,62 +27,52 @@ func getBulkBlocksTestTable(t *testing.T) EndpointTestCase {
defaultEndpoint: "/dbs/bulkblocks",
testCases: []testCase{
{
description: "Test POST parent bulkblocks",
serverType: "DBSWriter",
method: "POST",
input: BulkBlocksData.ParentData2,
description: "Test POST concurrent parent bulkblocks",
serverType: "DBSWriter",
concurrentBulkBlocks: true,
method: "POST",
input: BulkBlocksData.ConcurrentParentData,
output: []Response{},
params: url.Values{
"block_name": []string{TestData.ParentStepchainBlock + "2"},
"block_name": []string{TestData.ParentStepchainBlock},
},
output: []Response{},
handler: web.FilesHandler,
respCode: http.StatusOK,
},
{
description: "Test POST child bulkblocks",
serverType: "DBSWriter",
method: "POST",
input: BulkBlocksData.ChildData2,
description: "Test POST concurrent child bulkblocks",
serverType: "DBSWriter",
concurrentBulkBlocks: true,
method: "POST",
input: BulkBlocksData.ConcurrentChildData,
output: []Response{},
params: url.Values{
"block_name": []string{TestData.StepchainBlock + "2"},
"block_name": []string{TestData.StepchainBlock},
},
output: []Response{},
handler: web.FilesHandler,
respCode: http.StatusOK,
},
},
}
}

// concurrent bulkblocks test table
func getConcurrentBulkBlocksTestTable(t *testing.T) EndpointTestCase {
return EndpointTestCase{
description: "Test bulkblocks",
defaultHandler: web.BulkBlocksHandler,
defaultEndpoint: "/dbs/bulkblocks",
concurrentBulkBlocks: true,
testCases: []testCase{
{
description: "Test POST parent bulkblocks",
description: "Test POST sequential parent bulkblocks",
serverType: "DBSWriter",
method: "POST",
input: BulkBlocksData.ParentData,
output: []Response{},
input: BulkBlocksData.SequentialParentData,
params: url.Values{
"block_name": []string{TestData.ParentStepchainBlock},
"block_name": []string{TestData.ParentStepchainBlock + "2"},
},
output: []Response{},
handler: web.FilesHandler,
respCode: http.StatusOK,
},
{
description: "Test POST child bulkblocks",
description: "Test POST sequential child bulkblocks",
serverType: "DBSWriter",
method: "POST",
input: BulkBlocksData.ChildData,
output: []Response{},
input: BulkBlocksData.SequentialChildData,
params: url.Values{
"block_name": []string{TestData.StepchainBlock},
"block_name": []string{TestData.StepchainBlock + "2"},
},
output: []Response{},
handler: web.FilesHandler,
respCode: http.StatusOK,
},
Expand Down
2 changes: 1 addition & 1 deletion test/int_files.go
Original file line number Diff line number Diff line change
Expand Up @@ -560,7 +560,7 @@ func getFilesTestTable2(t *testing.T) EndpointTestCase {
// test files with lumi_list range
func getFilesLumiListRangeTestTable(t *testing.T) EndpointTestCase {
// filtered detailed response
childBulk := BulkBlocksData.ChildData
childBulk := BulkBlocksData.ConcurrentChildData
var lfns []Response
var detailResp3 []Response
var detailRunResp []Response
Expand Down
48 changes: 23 additions & 25 deletions test/integration_cases.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,16 @@ type BadRequest struct {

// basic elements to define a test case
type testCase struct {
description string // test case description
serverType string // DBSWriter, DBSReader, DBSMigrate
method string // http method
endpoint string // url endpoint, optional if EndpointTestCase.defaultEndpoint is defined
params url.Values // url parameters, optional
handler func(http.ResponseWriter, *http.Request) // optional if EndpointTestCase.defaultHandler is defined
input RequestBody // POST and PUT body, optional for GET request
output []Response // expected response
respCode int // expected HTTP response code
description string // test case description
serverType string // DBSWriter, DBSReader, DBSMigrate
concurrentBulkBlocks bool // true for concurrentBulkBlocks
method string // http method
endpoint string // url endpoint, optional if EndpointTestCase.defaultEndpoint is defined
params url.Values // url parameters, optional
handler func(http.ResponseWriter, *http.Request) // optional if EndpointTestCase.defaultHandler is defined
input RequestBody // POST and PUT body, optional for GET request
output []Response // expected response
respCode int // expected HTTP response code
}

// initialData struct for test data generation
Expand Down Expand Up @@ -86,10 +87,10 @@ type initialData struct {

// struct containing bulk blocks data
type bulkBlocksData struct {
ParentData dbs.BulkBlocks `json:"parent_bulk"` // for concurrent bulkblocks
ChildData dbs.BulkBlocks `json:"child_bulk"` // for concurrent bulkblocks
ParentData2 dbs.BulkBlocks `json:"parent_bulk2"` // for sequential bulkblocks
ChildData2 dbs.BulkBlocks `json:"child_bulk2"` // for sequential bulkblocks
ConcurrentParentData dbs.BulkBlocks `json:"con_parent_bulk"` // for concurrent bulkblocks
ConcurrentChildData dbs.BulkBlocks `json:"con_child_bulk"` // for concurrent bulkblocks
SequentialParentData dbs.BulkBlocks `json:"seq_parent_bulk"` // for sequential bulkblocks
SequentialChildData dbs.BulkBlocks `json:"seq_child_bulk"` // for sequential bulkblocks
}

// TestData contains the generated data
Expand All @@ -101,11 +102,10 @@ var BulkBlocksData bulkBlocksData

// defines a testcase for an endpoint
type EndpointTestCase struct {
description string
defaultHandler func(http.ResponseWriter, *http.Request)
defaultEndpoint string
concurrentBulkBlocks bool
testCases []testCase
description string
defaultHandler func(http.ResponseWriter, *http.Request)
defaultEndpoint string
testCases []testCase
}

// get a UUID time_mid as an int
Expand Down Expand Up @@ -359,10 +359,10 @@ func generateBulkBlocksData(t *testing.T, filepath string) {
bulk2.Files = childFileList2

BulkBlocksData = bulkBlocksData{
ParentData: parentBulk,
ChildData: bulk,
ParentData2: parentBulk2,
ChildData2: bulk2,
ConcurrentParentData: parentBulk,
ConcurrentChildData: bulk,
SequentialParentData: parentBulk2,
SequentialChildData: bulk2,
}

file, err := json.MarshalIndent(BulkBlocksData, "", " ")
Expand Down Expand Up @@ -427,7 +427,6 @@ func LoadTestCases(t *testing.T, filepath string, bulkblockspath string) []Endpo
outputConfigTestCase2 := getOutputConfigTestTable2(t)
datasetParentsTestCase := getDatasetParentsTestTable(t)
bulkBlocksTest := getBulkBlocksTestTable(t)
bulkBlocksConcurrentTest := getConcurrentBulkBlocksTestTable(t)
filesReaderTestTable := getFilesLumiListRangeTestTable(t)

return []EndpointTestCase{
Expand All @@ -447,8 +446,7 @@ func LoadTestCases(t *testing.T, filepath string, bulkblockspath string) []Endpo
blockUpdateTestCase,
outputConfigTestCase2,
datasetParentsTestCase,
bulkBlocksConcurrentTest,
filesReaderTestTable,
bulkBlocksTest,
filesReaderTestTable,
}
}
2 changes: 1 addition & 1 deletion test/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ func runTestWorkflow(t *testing.T, c EndpointTestCase) {
}

// run a test server for a single test case
server = dbsServer(t, "dbs", "DBS_DB_FILE", v.serverType, c.concurrentBulkBlocks)
server = dbsServer(t, "dbs", "DBS_DB_FILE", v.serverType, v.concurrentBulkBlocks)
defer server.Close()

// create request body
Expand Down

0 comments on commit c935851

Please sign in to comment.