Skip to content

Commit

Permalink
Merge pull request #34 from d-ylee/datasetsValid
Browse files Browse the repository at this point in the history
IS_DATASETS_VALID logic
  • Loading branch information
vkuznet authored Apr 28, 2022
2 parents e896169 + 20fa5bc commit 002af65
Show file tree
Hide file tree
Showing 4 changed files with 187 additions and 31 deletions.
19 changes: 9 additions & 10 deletions dbs/datasets.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,15 +70,6 @@ func (a *API) Datasets() error {
conds, args = AddParam("dataset", "D.DATASET", a.Params, conds, args)
}

// parse is_dataset_valid argument
isValid, _ := getSingleValue(a.Params, "is_dataset_valid")
if isValid == "" {
isValid = "1"
}
cond := fmt.Sprintf("D.IS_DATASET_VALID = %s", placeholder("is_dataset_valid"))
conds = append(conds, cond)
args = append(args, isValid)

// parse dataset_id argument
datasetAccessType, _ := getSingleValue(a.Params, "dataset_access_type")
oper := "="
Expand All @@ -88,11 +79,19 @@ func (a *API) Datasets() error {
datasetAccessType = "%"
oper = "like"
}
cond = fmt.Sprintf("DP.DATASET_ACCESS_TYPE %s %s", oper, placeholder("dataset_access_type"))
cond := fmt.Sprintf("DP.DATASET_ACCESS_TYPE %s %s", oper, placeholder("dataset_access_type"))
conds = append(conds, cond)
args = append(args, datasetAccessType)
// }

// parse is_dataset_valid argument
isValid, _ := getSingleValue(a.Params, "is_dataset_valid")
if isValid != "" {
cond = fmt.Sprintf("D.IS_DATASET_VALID = %s", placeholder("is_dataset_valid"))
conds = append(conds, cond)
args = append(args, isValid)
}

// optional arguments
if _, e := getSingleValue(a.Params, "parent_dataset"); e == nil {
tmpl["ParentDataset"] = true
Expand Down
98 changes: 78 additions & 20 deletions test/int_datasets.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package main

// this file contains logic for datasets API
// the HTTP requests body is defined by datasetsRequest struct defined in this file
// the HTTP requests body is defined by dbs.DatasetRecord struct defined in dbs/datasets.go
// the HTTP response body is defined by datasetsResponse struct defined in this file
// the HTTP response body for the `detail` query is defined by datasetsDetailResponse struct defined in this file
// the HTTP handlers and endpoints are defined in the EndpointTestCase struct defined in test/integration_cases.go
Expand All @@ -17,25 +17,6 @@ import (
"github.com/dmwm/dbs2go/web"
)

// struct for datasets POST request body
type datasetsRequest struct {
DATASET string `json:"dataset" validate:"required"`
PRIMARY_DS_NAME string `json:"primary_ds_name" validate:"required"`
PRIMARY_DS_TYPE string `json:"primary_ds_type" validate:"required"`
PROCESSED_DS_NAME string `json:"processed_ds_name" validate:"required"`
DATA_TIER_NAME string `json:"data_tier_name" validate:"required"`
ACQUISITION_ERA_NAME string `json:"acquisition_era_name" validate:"required"`
DATASET_ACCESS_TYPE string `json:"dataset_access_type" validate:"required"`
PROCESSING_VERSION int64 `json:"processing_version" validate:"required,number,gt=0"`
PHYSICS_GROUP_NAME string `json:"physics_group_name" validate:"required"`
XTCROSSSECTION float64 `json:"xtcrosssection" validate:"required,number"`
CREATION_DATE int64 `json:"creation_date" validate:"required,number,gt=0"`
CREATE_BY string `json:"create_by" validate:"required"`
LAST_MODIFICATION_DATE int64 `json:"last_modification_date" validate:"required,number,gt=0"`
LAST_MODIFIED_BY string `json:"last_modified_by" validate:"required"`
OUTPUT_CONFIGS []dbs.OutputConfigRecord `json:"output_configs"`
}

// struct for datasets GET response
type datasetsResponse struct {
DATASET string `json:"dataset"`
Expand Down Expand Up @@ -548,6 +529,40 @@ func getDatasetsTestTable(t *testing.T) EndpointTestCase {
},
respCode: http.StatusOK,
},
{
description: "Test is_dataset_valid true",
serverType: "DBSReader",
method: "GET",
params: url.Values{
"is_dataset_valid": []string{"1"},
},
output: []Response{
dsResp,
dsParentResp,
},
respCode: http.StatusOK,
},
{
description: "Test is_dataset_valid false",
serverType: "DBSReader",
method: "GET",
params: url.Values{
"is_dataset_valid": []string{"0"},
},
output: []Response{},
respCode: http.StatusOK,
},
{
description: "Test is_dataset_valid none",
serverType: "DBSReader",
method: "GET",
params: url.Values{},
output: []Response{
dsResp,
dsParentResp,
},
respCode: http.StatusOK,
},
},
}
}
Expand Down Expand Up @@ -766,6 +781,7 @@ type datasetsUpdateRequest struct {
func getDatasetsTestTable3(t *testing.T) EndpointTestCase {
// basic responses
dsResp := createDSResponse(TestData.Dataset)
// dsParentResp := createDSResponse(TestData.ParentDataset)

// detail responses
// dsDetailResp := createDetailDSResponse(1, TestData.Dataset, TestData.ProcDataset, TestData.DatasetAccessType)
Expand Down Expand Up @@ -826,3 +842,45 @@ func getDatasetsTestTable3(t *testing.T) EndpointTestCase {
},
}
}

type datasetParentResponse struct {
PARENT_DATASET string `json:"parent_dataset"`
PARENT_DS_ID int `json:"parent_dataset_id"`
THIS_DATASET string `json:"this_dataset"`
}

// datasetparents test cases
func getDatasetParentsTestTable(t *testing.T) EndpointTestCase {
dsParentsResp := datasetParentResponse{
PARENT_DATASET: TestData.ParentDataset,
PARENT_DS_ID: 2,
THIS_DATASET: TestData.Dataset,
}
return EndpointTestCase{
description: "Test datasetparents",
defaultHandler: web.DatasetParentsHandler,
defaultEndpoint: "/dbs/datasetparents",
testCases: []testCase{
{
description: "Test GET", // DBSClientReader_t.test030
method: "GET",
serverType: "DBSReader",
params: url.Values{
"dataset": []string{TestData.Dataset},
},
output: []Response{dsParentsResp},
respCode: http.StatusOK,
},
{
description: "Test GET for non-existing", // DBSClientReader_t.test031
method: "GET",
serverType: "DBSReader",
params: url.Values{
"dataset": []string{"/does/not/EXIST"},
},
output: []Response{},
respCode: http.StatusOK,
},
},
}
}
99 changes: 98 additions & 1 deletion test/int_files.go
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ func getFilesTestTable(t *testing.T) EndpointTestCase {
respCode: http.StatusOK,
},
{
description: "Test GET",
description: "Test GET", // DBSClientReader_t.test032
method: "GET",
serverType: "DBSReader",
params: url.Values{
Expand All @@ -241,6 +241,26 @@ func getFilesTestTable(t *testing.T) EndpointTestCase {
output: lfns,
respCode: http.StatusOK,
},
{
description: "Test GET validFileOnly", // DBSClientReader_t.test033
method: "GET",
serverType: "DBSReader",
params: url.Values{
"validFileOnly": []string{"1"},
},
output: append(parentLFNs, lfns...),
respCode: http.StatusOK,
},
{
description: "Test GET validFileOnly false", // DBSClientReader_t.test034
method: "GET",
serverType: "DBSReader",
params: url.Values{
"validFileOnly": []string{"0"},
},
output: append(parentLFNs, lfns...),
respCode: http.StatusOK,
},
{
description: "Test detail GET",
method: "GET",
Expand Down Expand Up @@ -287,6 +307,73 @@ type filesPUTRequest struct {

// files endpoint update tests
func getFilesTestTable2(t *testing.T) EndpointTestCase {
parentFileLumiList := []dbs.FileLumi{
{LumiSectionNumber: 27414, RunNumber: 97, EventCount: 66},
{LumiSectionNumber: 26422, RunNumber: 97, EventCount: 67},
{LumiSectionNumber: 29838, RunNumber: 97, EventCount: 68},
{LumiSectionNumber: 248, RunNumber: 97, EventCount: 69},
{LumiSectionNumber: 250, RunNumber: 97, EventCount: 70},
{LumiSectionNumber: 300, RunNumber: 97, EventCount: 71},
{LumiSectionNumber: 534, RunNumber: 97, EventCount: 72},
{LumiSectionNumber: 546, RunNumber: 97, EventCount: 73},
{LumiSectionNumber: 638, RunNumber: 97, EventCount: 74},
{LumiSectionNumber: 650, RunNumber: 97, EventCount: 75},
{LumiSectionNumber: 794, RunNumber: 97, EventCount: 76},
{LumiSectionNumber: 1313, RunNumber: 97, EventCount: 77},
{LumiSectionNumber: 1327, RunNumber: 97, EventCount: 78},
{LumiSectionNumber: 1339, RunNumber: 97, EventCount: 79},
{LumiSectionNumber: 1353, RunNumber: 97, EventCount: 80},
{LumiSectionNumber: 1428, RunNumber: 97, EventCount: 81},
{LumiSectionNumber: 1496, RunNumber: 97, EventCount: 82},
{LumiSectionNumber: 1537, RunNumber: 97, EventCount: 83},
{LumiSectionNumber: 1652, RunNumber: 97, EventCount: 84},
{LumiSectionNumber: 1664, RunNumber: 97, EventCount: 85},
{LumiSectionNumber: 1743, RunNumber: 97, EventCount: 86},
{LumiSectionNumber: 1755, RunNumber: 97, EventCount: 87},
{LumiSectionNumber: 1860, RunNumber: 97, EventCount: 88},
{LumiSectionNumber: 1872, RunNumber: 97, EventCount: 89},
}
var parentFiles []dbs.FileRecord
var parentLFNs []Response
var testDataParentFiles []string
var parentDetailResp []Response
for i := 1; i <= 10; i++ {
parentLFN := fmt.Sprintf("/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/p%v/%v.root", TestData.UID, i)
parentLFNs = append(parentLFNs, fileResponse{LOGICAL_FILE_NAME: parentLFN})
testDataParentFiles = append(testDataParentFiles, parentLFN)
fileRecord := createFileRecord(i, TestData.ParentDataset, TestData.ParentBlock, parentFileLumiList, parentLFN, []dbs.FileParentLFNRecord{})
parentFiles = append(parentFiles, fileRecord)
parentDetailResp = append(parentDetailResp, createDetailedResponse(i, 2, 2, fileRecord))
}

TestData.ParentFiles = testDataParentFiles

fileLumiList := []dbs.FileLumi{
{LumiSectionNumber: 27414, RunNumber: 97},
{LumiSectionNumber: 26422, RunNumber: 98},
{LumiSectionNumber: 29838, RunNumber: 99},
}

var files []dbs.FileRecord
var lfns []Response
var detailResp []Response
var testDataFiles []string
for i := 1; i <= 10; i++ {
lfn := fmt.Sprintf("/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%v/%v.root", TestData.UID, i)
lfns = append(lfns, fileResponse{LOGICAL_FILE_NAME: lfn})
testDataFiles = append(testDataFiles, lfn)
fileParentLFN := fmt.Sprintf("/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/p%v/%v.root", TestData.UID, i)
fileParentList := []dbs.FileParentLFNRecord{
{
FILE_PARENT_LFN: fileParentLFN,
},
}
fileRecord := createFileRecord(i, TestData.Dataset, TestData.Block, fileLumiList, lfn, fileParentList)
files = append(files, fileRecord)
detailResp = append(detailResp, createDetailedResponse(i+10, 1, 1, fileRecord))
}

TestData.Files = testDataFiles
lfn := fmt.Sprintf("/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%v/%v.root", TestData.UID, 1)
fileReq := filesPUTRequest{
LOGICAL_FILE_NAME: lfn,
Expand Down Expand Up @@ -357,6 +444,16 @@ func getFilesTestTable2(t *testing.T) EndpointTestCase {
},
respCode: http.StatusOK,
},
{
description: "Test GET validFileOnly false", // DBSClientReader_t.test034
method: "GET",
serverType: "DBSReader",
params: url.Values{
"validFileOnly": []string{"0"},
},
output: append(parentLFNs, lfns...),
respCode: http.StatusOK,
},
},
}
}
2 changes: 2 additions & 0 deletions test/integration_cases.go
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,7 @@ func LoadTestCases(t *testing.T, filepath string) []EndpointTestCase {
datasetsUpdateTestCase := getDatasetsTestTable3(t)
blockUpdateTestCase := getBlocksTestTable2(t)
outputConfigTestCase2 := getOutputConfigTestTable2(t)
datasetParentsTestCase := getDatasetParentsTestTable(t)

return []EndpointTestCase{
primaryDatasetAndTypesTestCase,
Expand All @@ -244,5 +245,6 @@ func LoadTestCases(t *testing.T, filepath string) []EndpointTestCase {
datasetsUpdateTestCase,
blockUpdateTestCase,
outputConfigTestCase2,
datasetParentsTestCase,
}
}

0 comments on commit 002af65

Please sign in to comment.