Skip to content

Commit

Permalink
Add support for DBS API parameters checks via parameters.json file
Browse files Browse the repository at this point in the history
  • Loading branch information
vkuznet committed May 17, 2022
1 parent 289dbca commit bd9b59b
Show file tree
Hide file tree
Showing 11 changed files with 400 additions and 25 deletions.
95 changes: 80 additions & 15 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -43,35 +43,94 @@ test-lexicon: test-lexicon-writer-pos test-lexicon-writer-neg test-lexicon-reade
test-errors:
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} go test -v -run TestDBSError
test-dbs:
cd test && rm -f /tmp/dbs-test.db && sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json go test -v -run TestDBS
cd test && rm -f /tmp/dbs-test.db && \
sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && \
LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
go test -v -run TestDBS
test-bulk:
cd test && rm -f /tmp/dbs-test.db && sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json go test -v -run Bulk
cd test && rm -f /tmp/dbs-test.db && \
sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && \
LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
go test -v -run Bulk
test-sql:
cd test && rm -f /tmp/dbs-test.db && sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json go test -v -run SQL
cd test && rm -f /tmp/dbs-test.db && \
sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && \
LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
go test -v -run SQL
test-validator:
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json go test -v -run Validator
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
go test -v -run Validator
test-http:
cd test && rm -f /tmp/dbs-test.db && sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json go test -v -run HTTP
cd test && rm -f /tmp/dbs-test.db && \
sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && \
LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
go test -v -run HTTP
test-writer:
cd test && rm -f /tmp/dbs-test.db && sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json go test -v -run DBSWriter
cd test && rm -f /tmp/dbs-test.db && \
sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && \
LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
go test -v -run DBSWriter
test-utils:
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json go test -v -run Utils
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
go test -v -run Utils
test-migrate:
cd test && rm -f /tmp/dbs-test.db && sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json go test -v -run Migrate
cd test && rm -f /tmp/dbs-test.db && \
sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && \
LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
go test -v -run Migrate
test-filelumis:
cd test && rm -f /tmp/dbs-test.db && sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json go test -v -run FileLumisInjection
cd test && rm -f /tmp/dbs-test.db && \
sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && \
LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
go test -v -run FileLumisInjection
test-lexicon-writer-pos:
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json DBS_LEXICON_SAMPLE_FILE=../static/lexicon_writer_positive.json go test -v -run LexiconPositive
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
DBS_LEXICON_SAMPLE_FILE=../static/lexicon_writer_positive.json \
go test -v -run LexiconPositive
test-lexicon-writer-neg:
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json DBS_LEXICON_SAMPLE_FILE=../static/lexicon_writer_negative.json go test -v -run LexiconNegative
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
DBS_LEXICON_SAMPLE_FILE=../static/lexicon_writer_negative.json \
go test -v -run LexiconNegative
test-lexicon-reader-pos:
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_reader.json DBS_LEXICON_SAMPLE_FILE=../static/lexicon_reader_positive.json go test -v -run LexiconPositive
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_reader.json \
DBS_LEXICON_SAMPLE_FILE=../static/lexicon_reader_positive.json \
go test -v -run LexiconPositive
test-lexicon-reader-neg:
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_reader.json DBS_LEXICON_SAMPLE_FILE=../static/lexicon_reader_negative.json go test -v -run LexiconNegative
cd test && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_reader.json \
DBS_LEXICON_SAMPLE_FILE=../static/lexicon_reader_negative.json \
go test -v -run LexiconNegative
test-integration:
cd test && rm -f /tmp/dbs-test.db && sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && \
cd test && rm -f /tmp/dbs-test.db && \
sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && \
echo "\"sqlite3 /tmp/dbs-test.db sqlite\"" > ./dbfile && \
LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_READER_LEXICON_FILE=../static/lexicon_reader.json \
DBS_WRITER_LEXICON_FILE=../static/lexicon_writer.json \
DBS_DB_FILE=./dbfile \
Expand All @@ -86,11 +145,17 @@ test-migration:
sqlite3 /tmp/dbs-two.db < ../static/schema/sqlite-schema.sql && \
echo "\"sqlite3 /tmp/dbs-two.db sqlite\"" > ./dbfile_2 && \
LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_READER_LEXICON_FILE=../static/lexicon_reader.json \
DBS_WRITER_LEXICON_FILE=../static/lexicon_writer.json \
DBS_DB_FILE_1=./dbfile_1 \
DBS_DB_FILE_2=./dbfile_2 \
INTEGRATION_DATA_FILE=./data/integration/integration_data.json \
go test -v -run Migration
bench:
cd test && rm -f /tmp/dbs-test.db && sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} DBS_LEXICON_FILE=../static/lexicon_writer.json go test -run Benchmark -bench=.
cd test && rm -f /tmp/dbs-test.db && \
sqlite3 /tmp/dbs-test.db < ../static/schema/sqlite-schema.sql && \
LD_LIBRARY_PATH=${odir} DYLD_LIBRARY_PATH=${odir} \
DBS_API_PARAMETERS_FILE=../static/parameters.json \
DBS_LEXICON_FILE=../static/lexicon_writer.json \
go test -run Benchmark -bench=.
73 changes: 73 additions & 0 deletions dbs/parameters.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
package dbs

import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"net/http"

"github.com/dmwm/dbs2go/utils"
)

// ApiParameterFile represents API parameter file
var ApiParametersFile string

// ApiParameters rerepresents a API parameters record
type ApiParameters struct {
Api string
Parameters []string
}

// ApiParametersMap represents data type of api parameters
type ApiParametersMap map[string][]string

// ApiParamMap an object which holds API parameter records
var ApiParamMap ApiParametersMap

// LoadApiParameters loads Api parameters and constructs ApiParameters map
func LoadApiParameters(fname string) (ApiParametersMap, error) {
data, err := ioutil.ReadFile(fname)
if err != nil {
log.Printf("Unable to read, file '%s', error: %v\n", fname, err)
return nil, Error(err, ReaderErrorCode, "", "dbs.parameters.LoadParameters")
}
var records []ApiParameters
err = json.Unmarshal(data, &records)
if err != nil {
log.Printf("Unable to parse, file '%s', error: %v\n", fname, err)
return nil, Error(err, UnmarshalErrorCode, "", "dbs.parameters.LoadParameters")
}
pmap := make(ApiParametersMap)
for _, rec := range records {
pmap[rec.Api] = rec.Parameters
}
return pmap, nil
}

// CheckQueryParameters checks query parameters against API parameters map
func CheckQueryParameters(r *http.Request, api string) error {
var err error
if ApiParamMap == nil {
log.Println("loading", ApiParametersFile)
ApiParamMap, err = LoadApiParameters(ApiParametersFile)
if err != nil {
return Error(GenericErr, LoadErrorCode, "", "dbs.parameters.CheckQueryParameters")
}
}
for k, _ := range r.URL.Query() {
if params, ok := ApiParamMap[api]; ok {
if !utils.InList(k, params) {
msg := fmt.Sprintf("parameter '%s' is not accepted by '%s' API", k, api)
return Error(
InvalidParamErr,
ParametersErrorCode,
msg,
"dbs.parameters.CheckQueryParameters")
}
} else {
log.Printf("DBS %s API is not presented in ApiParamMap", api)
}
}
return nil
}
186 changes: 186 additions & 0 deletions static/parameters.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
[
{
"api": "datasets",
"parameters": [
"dataset", "parent_dataset", "release_version", "pset_hash", "app_name",
"output_module_label", "global_tag", "processing_version", "acquisition_era_name",
"run_num", "physics_group_name", "logical_file_name", "primary_ds_name",
"primary_ds_type", "processed_ds_name", "data_tier_name", "dataset_access_type",
"prep_id", "create_by", "last_modified_by", "min_cdate", "max_cdate", "min_ldate",
"max_ldate", "cdate", "ldate", "detail", "dataset_id", "is_dataset_valid"
]
},
{
"api": "datatiers",
"parameters": [
"data_tier_name"
]
},
{
"api": "blocks",
"parameters": [
"dataset", "block_name", "data_tier_name", "origin_site_name",
"logical_file_name", "run_num", "min_cdate", "max_cdate", "min_ldate", "max_ldate",
"cdate", "ldate", "open_for_writing", "detail"
]
},
{
"api": "blockTrio",
"parameters": [
"block_name"
]
},
{
"api": "files",
"parameters": [
"dataset", "block_name", "logical_file_name", "release_version",
"pset_hash", "app_name", "output_module_label", "run_num", "origin_site_name",
"lumi_list", "detail", "validFileOnly", "sumOverLumi"
]
},
{
"api": "primarydatasets",
"parameters": [
"primary_ds_name", "primary_ds_type"
]
},
{
"api": "parentDSTrio",
"parameters": [
"dataset"
]
},
{
"api": "acquisitioneras",
"parameters": [
"acquisition_era_name"
]
},
{
"api": "acquisitioneras_ci",
"parameters": [
"acquisition_era_name"
]
},
{
"api": "releaseversions",
"parameters": [
"release_version", "dataset", "logical_file_name"
]
},
{
"api": "physicsgroups",
"parameters": [
"physics_group_name"
]
},
{
"api": "primarydstypes",
"parameters": [
"primary_ds_type", "dataset"
]
},
{
"api": "datatypes",
"parameters": [
"datatype", "dataset"
]
},
{
"api": "processingeras",
"parameters": [
"processing_version"
]
},
{
"api": "outputconfigs",
"parameters": [
"dataset", "logical_file_name", "release_version", "pset_hash",
"app_name", "output_module_label", "block_id", "global_tag"
]
},
{
"api": "datasetaccesstypes",
"parameters": [
"dataset_access_type"
]
},
{
"api": "runs",
"parameters": [
"run_num", "logical_file_name", "block_name", "dataset"
]
},
{
"api": "runsummaries",
"parameters": [
"dataset", "run_num"
]
},
{
"api": "blockorigin",
"parameters": [
"origin_site_name", "dataset", "block_name"
]
},
{
"api": "blockdump",
"parameters": [
"block_name"
]
},
{
"api": "blockchildren",
"parameters": [
"block_name"
]
},
{
"api": "blockparents",
"parameters": [
"block_name"
]
},
{
"api": "blocksummaries",
"parameters": [
"block_name", "dataset", "detail"
]
},
{
"api": "filechildren",
"parameters": [
"logical_file_name", "block_name", "block_id"
]
},
{
"api": "fileparents",
"parameters": [
"logical_file_name", "block_name", "block_id"
]
},
{
"api": "filesummaries",
"parameters": [
"block_name", "dataset", "run_num", "validFileOnly", "sumOverLumi"
]
},
{
"api": "filelumis",
"parameters": [
"logical_file_name", "block_name", "run_num", "validFileOnly"
]
},
{
"api": "datasetchildren",
"parameters": [
"dataset"
]
},
{
"api": "datasetparents",
"parameters": [
"dataset"
]
}
]
3 changes: 2 additions & 1 deletion test/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ The following environment variables are required:
- `PKG_CONFIG_PATH`: This is the location of the `oci8.pc` file
- `DYLD_LIBRARY_PATH`: This is the location of the Oracle instantclient files
- The instructions to prepare the files and directories for these are in the [Installation instructions](docs/Installation.md)
- `DBS_API_PARAMETERS_FILE`: DBS API parameters file for DBS server; default `static/parameters.json`
- `DBS_READER_LEXICON_FILE`: Lexicon file for DBSReader server; default: `static/lexicon_reader.json`
- `DBS_WRITER_LEXICON_FILE`: Lexicon file for DBSWriter server; default: `static/lexicon_writer.json`
- `INTEGRATION_DATA_FILE`: File for initial data for test case tables; default: `test/data/integration/integration_data.json`
- `INTEGRATION_DATA_FILE`: File for initial data for test case tables; default: `test/data/integration/integration_data.json`
Loading

0 comments on commit bd9b59b

Please sign in to comment.