diff --git a/.github/workflows/ci-build-deploy.yaml b/.github/workflows/ci-build-deploy.yaml
index 281fb82bd..a4abaa718 100644
--- a/.github/workflows/ci-build-deploy.yaml
+++ b/.github/workflows/ci-build-deploy.yaml
@@ -264,9 +264,9 @@ jobs:
NEXT_PUBLIC_HELP_API_DOCS_URL:
value: '/ds/api/v2/console/'
NEXT_PUBLIC_HELP_SUPPORT_URL:
- value: 'https://mvp.developer.gov.bc.ca/docs/default/component/aps-infra-platform-docs/'
+ value: 'https://developer.gov.bc.ca/docs/default/component/aps-infra-platform-docs/'
NEXT_PUBLIC_HELP_RELEASE_URL:
- value: 'https://mvp.developer.gov.bc.ca/docs/default/component/aps-infra-platform-docs/reference/releases/'
+ value: 'https://developer.gov.bc.ca/docs/default/component/aps-infra-platform-docs/reference/releases/'
NEXT_PUBLIC_HELP_STATUS_URL:
value: 'https://uptime.com/s/bcgov-dss'
NEXT_PUBLIC_DEVELOPER_IDS:
diff --git a/.github/workflows/ci-feat-sonar.yaml b/.github/workflows/ci-feat-sonar.yaml
index d28dde2da..34ccb59ea 100644
--- a/.github/workflows/ci-feat-sonar.yaml
+++ b/.github/workflows/ci-feat-sonar.yaml
@@ -2,7 +2,7 @@ name: Sonar Scanner
on:
push:
- branches: [dev, feat/*]
+ branches: [dev, feature/*]
env:
REGISTRY: ghcr.io
@@ -23,10 +23,24 @@ jobs:
- name: Run Tests
run: |
+ docker compose up kong-db -d
+
+ set -o allexport
+ source ./.env.local
+ LOG_LEVEL=debug
+ KNEX_HOST=kong-db.localtest.me
+ NEXT_PUBLIC_MOCKS=off
+ set +o allexport
+
cd src
+
npm i
+ npm run intg-build
npm test
+ cd ..
+ docker compose down
+
- name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@master
with:
diff --git a/README.md b/README.md
index 2640d003e..fbbeaf8bc 100644
--- a/README.md
+++ b/README.md
@@ -29,7 +29,7 @@ The repo is setup to create a local deployment of the Portal along with required
1. Build: Back in `api-services-portal`, run `docker compose --profile testsuite build`.
1. Run: `docker compose up`. Wait for startup to complete - look for `Swagger UI registered`.
1. The Portal is now live at http://oauth2proxy.localtest.me:4180
- 1. To login, use username `local` and password `local`, or username `janis@idir` and password `awsummer`.
+ 1. To login, use username `janis@idir` and password `awsummer` (or username `local` and password `local`).
1. If you have made any changes to the app code, update images by running `docker compose build` then `docker compose up`.
1. Clean up: `docker compose down` removes all the hosted services
@@ -67,11 +67,16 @@ Use the following configuration to run the Portal locally (outside of Docker) ag
1. Turn off the docker compose Portal: `docker stop apsportal`
1. Configure the `oauth2-proxy` that is running in Docker:
- 1. Update `upstreams` in `oauth2-proxy/oauth2-proxy-local.cfg` to include the IP address of your local machine, e.g. `upstreams=["http://172.100.100.01:3000"]`
+ 1. Update `upstreams` in `local/oauth2-proxy/oauth2-proxy-local.cfg` to include the IP address of your local machine, e.g. `upstreams=["http://172.100.100.01:3000"]`
You can obtain the IP address using `hostname -I`.
1. Restart the oauth2-proxy: `docker compose restart oauth2-proxy`
+ 1. Update `DESTINATION_URL` in `local/feeds/.env.local` to include the IP address of your local machine
+ 1. Restart the feeder: `docker compose restart feeder`
+ 1. Update `PORTAL_ACTIVITY_URL` in `local/gwa-api/.env.local` to include the IP address of your local machine
+ 1. Restart the feeder: `docker compose restart gwa-api`
+
1. Start the Portal locally:
```sh
diff --git a/docker-compose.yml b/docker-compose.yml
index c5eb616f2..f8a230546 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -83,7 +83,7 @@ services:
context: ./feeds
dockerfile: Dockerfile
env_file:
- - ./feeds/.env.local
+ - ./local/feeds/.env.local
restart: on-failure
ports:
- 6000:6000
@@ -117,15 +117,15 @@ services:
- ./local/db/database-init.sql:/docker-entrypoint-initdb.d/1-init.sql
- ./local/db/keystone-init.sql:/docker-entrypoint-initdb.d/2-init.sql
networks:
- aps-net:
- aliases:
- - kong-db.localtest.me
+ - aps-net
kong-migrations:
image: kong:kong-local
command: kong migrations bootstrap
depends_on:
- kong-db
- environment: *common-variables
+ environment:
+ <<: *common-variables
+ KONG_DNS_ORDER: 'LAST,A'
networks:
- aps-net
restart: on-failure
@@ -138,7 +138,9 @@ services:
depends_on:
- kong-db
- kong-migrations
- environment: *common-variables
+ environment:
+ <<: *common-variables
+ KONG_DNS_ORDER: 'LAST,A'
networks:
- aps-net
restart: on-failure
@@ -150,6 +152,7 @@ services:
- kong-migrations-up
environment:
<<: *common-variables
+ KONG_DEBUG_LEVEL: debug
KONG_ADMIN_ACCESS_LOG: /dev/stdout
KONG_ADMIN_ERROR_LOG: /dev/stderr
KONG_CASSANDRA_CONTACT_POINTS: kong-db
@@ -157,6 +160,9 @@ services:
KONG_PROXY_ACCESS_LOG: /dev/stdout
KONG_PROXY_ERROR_LOG: /dev/stderr
KONG_ADMIN_LISTEN: 0.0.0.0:8001
+ KONG_DNS_ORDER: 'LAST,A'
+ KONG_TRUSTED_IPS: '0.0.0.0/0,::/0'
+ KONG_REAL_IP_HEADER: X-Forwarded-For
ports:
- 8000:8000
- 8001:8001
diff --git a/local/feeder-init/init.sh b/local/feeder-init/init.sh
index 31de8f274..513462f42 100755
--- a/local/feeder-init/init.sh
+++ b/local/feeder-init/init.sh
@@ -32,6 +32,6 @@ while true; do
else
echo "Waiting for Keycloak....."
- sleep 1m
+ sleep 5
fi
done
diff --git a/feeds/.env.local b/local/feeds/.env.local
similarity index 100%
rename from feeds/.env.local
rename to local/feeds/.env.local
diff --git a/local/gwa-api/entrypoint.sh b/local/gwa-api/entrypoint.sh
index 30aa32045..e6b975ef8 100755
--- a/local/gwa-api/entrypoint.sh
+++ b/local/gwa-api/entrypoint.sh
@@ -63,6 +63,6 @@ while true; do
break
else
echo "Waiting for Keycloak....."
- sleep 1m
+ sleep 5
fi
done
diff --git a/local/gwa-cli/gw-config.yml b/local/gwa-cli/gw-config.yml
deleted file mode 100644
index c878bd092..000000000
--- a/local/gwa-cli/gw-config.yml
+++ /dev/null
@@ -1,99 +0,0 @@
-kind: Namespace
-name: gw-12345
-displayName: gw-12345 Display Name
----
-kind: GatewayService
-name: my-service-dev
-tags: [ns.gw-12345]
-host: httpbin.org
-port: 443
-protocol: https
-retries: 0
-routes:
- - name: my-service-dev
- tags: [ns.gw-12345]
- hosts:
- - my-service.dev.api.gov.bc.ca
- methods:
- - GET
- strip_path: false
- https_redirect_status_code: 426
- path_handling: v0
- request_buffering: true
- response_buffering: true
-plugins:
- - name: jwt-keycloak
- tags: [ns.gw-12345]
- enabled: true
- config:
- allowed_iss:
- - http://keycloak.localtest.me:9081/auth/realms/master
- allowed_aud: ap-gw-12345-default-test
- run_on_preflight: true
- iss_key_grace_period: 10
- maximum_expiration: 0
- algorithm: RS256
- claims_to_verify:
- - exp
- uri_param_names:
- - jwt
- cookie_names: []
- scope: null
- roles: null
- realm_roles: null
- client_roles: null
- anonymous: null
- consumer_match: true
- consumer_match_claim: azp
- consumer_match_claim_custom_id: true
- consumer_match_ignore_not_found: false
- - name: request-transformer
- tags: [ns.gw-12345]
- enabled: true
- config:
- http_method: null
----
-kind: CredentialIssuer
-name: gw-12345 default
-description: Default Authorization Profile for gw-12345 Gateway
-flow: client-credentials
-mode: auto
-authPlugin: jwt-keycloak
-clientAuthenticator: client-secret
-clientRoles: []
-inheritFrom: Sample Shared IdP
----
-kind: DraftDataset
-name: my-service-dataset
-title: my-service
-organization: ministry-of-health
-organizationUnit: planning-and-innovation-division
-notes: Some information about the my-service service
-tags: [my-service, openapi]
-license_title: Access Only
-view_audience: Government
-security_class: LOW-PUBLIC
-record_publish_date: '2021-05-27'
----
-kind: Product
-appId: '242925AE01CF'
-name: my-service API
-dataset: my-service-dataset
-environments:
- - name: test
- appId: 'AF13BB19'
- active: false
- approval: false
- flow: client-credentials
- credentialIssuer: gw-12345 default
- services: [my-service-dev]
-# ---
-# kind: ProductEnvironment
-# name: dev
-# product: my-service API
-# appId: 'A308A21A'
-# active: false
-# approval: true
-# flow: client-credentials
-# credentialIssuer: gw-12345 default
-# services: [my-service-dev]
diff --git a/local/keycloak/master-realm.json b/local/keycloak/master-realm.json
index 7c9787de8..df8ae3277 100644
--- a/local/keycloak/master-realm.json
+++ b/local/keycloak/master-realm.json
@@ -2317,7 +2317,9 @@
}
}
],
- "defaultClientScopes": [],
+ "defaultClientScopes": [
+ "profile"
+ ],
"optionalClientScopes": []
},
{
diff --git a/src/authz/matrix.csv b/src/authz/matrix.csv
index 19bfab619..718639ecd 100644
--- a/src/authz/matrix.csv
+++ b/src/authz/matrix.csv
@@ -87,6 +87,7 @@ API Owner Role Rules,,,Dataset,read,,,,,api-owner,,,allow,
API Owner Role Rules,,,Environment,create,,active,,,api-owner,,,allow,
API Owner Role Rules,,,Environment,,"update,delete,read",active,,,,,"api-owner,provider-user",allow,
API Owner Role Rules,,forceDeleteEnvironment,,,,,,,api-owner,,,allow,
+API Owner Role Rules,,deleteEnvironments,,,,,,,api-owner,,,allow,filterByPackageNS
API Owner Role Rules,,,Environment,read,,,,,,,"api-owner,provider-user",allow,
API Owner Role Rules,,,Environment,create,,,,,api-owner,,,allow,
API Owner Role Rules,,,Environment,update,,,,,api-owner,,,allow,
diff --git a/src/batch/data-rules.js b/src/batch/data-rules.js
index 38f8bb7ef..1d243a22b 100644
--- a/src/batch/data-rules.js
+++ b/src/batch/data-rules.js
@@ -376,11 +376,12 @@ const metadata = {
Product: {
query: 'allProducts',
refKey: 'appId',
- sync: ['name', 'namespace'],
+ compositeRefKey: ['name', 'namespace'],
+ sync: ['name', 'description', 'namespace'],
transformations: {
dataset: { name: 'connectOne', list: 'allDatasets', refKey: 'name' },
environments: {
- name: 'connectExclusiveList',
+ name: 'connectExclusiveListCreate',
list: 'Environment',
syncFirst: true,
refKey: 'appId',
@@ -403,6 +404,10 @@ const metadata = {
Environment: {
query: 'allEnvironments',
refKey: 'appId',
+ compositeRefKey: [
+ 'name',
+ { key: 'parent.id', whereClause: 'product: { id: $parent_id }' },
+ ],
sync: ['name', 'active', 'approval', 'flow', 'additionalDetailsToRequest'],
ownedBy: 'product',
transformations: {
@@ -410,12 +415,14 @@ const metadata = {
name: 'connectMany',
list: 'allGatewayServices',
refKey: 'name',
+ filterByNamespace: true,
},
legal: { name: 'connectOne', list: 'allLegals', refKey: 'reference' },
credentialIssuer: {
name: 'connectOne',
list: 'allCredentialIssuers',
refKey: 'name',
+ filterByNamespace: true,
},
},
validations: {
diff --git a/src/batch/feed-worker.ts b/src/batch/feed-worker.ts
index b2e368a3c..2d0ce490a 100644
--- a/src/batch/feed-worker.ts
+++ b/src/batch/feed-worker.ts
@@ -4,6 +4,7 @@ import {
alwaysFalse,
byKey,
connectExclusiveList,
+ connectExclusiveListCreate,
connectExclusiveOne,
connectMany,
connectOne,
@@ -17,6 +18,7 @@ import { BatchResult, BatchSyncException } from './types';
import {
BatchService,
BatchWhereClause,
+ CompositeKeyValue,
} from '../services/keystone/batch-service';
import { Logger } from '../logger';
@@ -45,6 +47,7 @@ const transformations = {
byKey: byKey,
mapNamespace: mapNamespace,
connectExclusiveList: connectExclusiveList,
+ connectExclusiveListCreate: connectExclusiveListCreate,
connectExclusiveOne: connectExclusiveOne,
connectMany: connectMany,
connectOne: connectOne,
@@ -55,6 +58,7 @@ const transformations = {
export const putFeedWorker = async (context: any, req: any, res: any) => {
const entity = req.params['entity'];
assert.strictEqual(entity in metadata, true);
+ logger.info('putFeedWorker %s', entity);
const md = metadata[entity];
const refKey = md.refKey;
@@ -164,7 +168,8 @@ export const getFeedWorker = async (context: any, req: any, res: any) => {
const syncListOfRecords = async function (
keystone: any,
transformInfo: any,
- records: any
+ records: any,
+ parentRecord?: any
): Promise {
const result: BatchResult[] = [];
if (records == null || typeof records == 'undefined') {
@@ -179,7 +184,8 @@ const syncListOfRecords = async function (
transformInfo.list,
record[recordKey],
record,
- true
+ true,
+ parentRecord
)
);
}
@@ -203,9 +209,13 @@ function buildQueryResponse(md: any, children: string[] = undefined): string[] {
const relationshipFields = Object.keys(
md.transformations
).filter((tranField: any) =>
- ['byKey', 'connectOne', 'connectExclusiveList', 'connectMany'].includes(
- md.transformations[tranField].name
- )
+ [
+ 'byKey',
+ 'connectOne',
+ 'connectExclusiveList',
+ 'connectExclusiveListCreate',
+ 'connectMany',
+ ].includes(md.transformations[tranField].name)
);
const response = md.sync
.filter((s: string) => !relationshipFields.includes(s))
@@ -245,7 +255,7 @@ function buildQueryResponse(md: any, children: string[] = undefined): string[] {
});
}
if ('ownedBy' in md) {
- response.push(`${md.ownedBy} { id }`);
+ response.push(`${md.ownedBy} { id, namespace }`);
}
logger.debug('[buildQueryResponse] FINAL (%s) %j', md.query, response);
@@ -307,7 +317,8 @@ export const syncRecords = async function (
feedEntity: string,
eid: string,
json: any,
- children = false
+ children = false,
+ parentRecord: any = undefined
): Promise {
const md = (metadata as any)[feedEntity];
const entity = 'entity' in md ? md['entity'] : feedEntity;
@@ -318,13 +329,31 @@ export const syncRecords = async function (
'This entity is only part of a child.'
);
- assert.strictEqual(
- typeof eid === 'string' && eid.length > 0,
- true,
- `Invalid ID for ${feedEntity} ${eid}`
- );
+ const compositeKeyValues: CompositeKeyValue[] = [];
+ if (md.compositeRefKey) {
+ md.compositeRefKey.forEach((refKey: string | CompositeKeyValue) => {
+ if (typeof refKey === 'string') {
+ compositeKeyValues.push({ key: refKey, value: json[refKey] });
+ } else {
+ // if the value is missing, then ignore the composite value
+ // and include the 'refKey' value
+ const value = dot({ ...json, ...{ parent: parentRecord } }, refKey.key);
+ compositeKeyValues.push({
+ key: refKey.key,
+ value: value,
+ whereClause: refKey.whereClause,
+ });
+ }
+ });
+ } else {
+ assert.strictEqual(
+ typeof eid === 'string' && eid.length > 0,
+ true,
+ `Invalid ID for ${feedEntity} ${md.refKey} = ${eid || 'blank'}`
+ );
- const batchService = new BatchService(context);
+ compositeKeyValues.push({ key: md.refKey, return: md.refKey, value: eid });
+ }
// pre-lookup hook that can be used to handle special cases,
// such as for Kong, cleaning up records where the service or route has been renamed
@@ -342,10 +371,11 @@ export const syncRecords = async function (
let childResults: BatchResult[] = [];
- const localRecord = await batchService.lookup(
+ const batchService = new BatchService(context);
+
+ const localRecord = await batchService.lookupUsingCompositeKey(
md.query,
- md.refKey,
- eid,
+ compositeKeyValues,
buildQueryResponse(md)
);
if (localRecord == null) {
@@ -361,34 +391,43 @@ export const syncRecords = async function (
for (const transformKey of Object.keys(md.transformations)) {
const transformInfo = md.transformations[transformKey];
if (transformInfo.syncFirst) {
- // handle these children independently first - return a list of IDs
- const allIds = await syncListOfRecords(
- context,
- transformInfo,
- json[transformKey]
- );
- logger.debug('CHILDREN [%s] %j', transformKey, allIds);
- assert.strictEqual(
- allIds.filter((record) => record.status != 200).length,
- 0,
- 'Failed updating children'
- );
- assert.strictEqual(
- allIds.filter((record) => typeof record.ownedBy != 'undefined')
- .length,
- 0,
- 'There are some child records that have exclusive ownership already!'
- );
- json[transformKey + '_ids'] = allIds.map((status) => status.id);
-
- childResults.push(...allIds);
+ // since localRecord is null, this is related to creating
+ // and syncFirst is only used for `connectExclusiveList` or `connectExclusiveOne`
+ // so we want the transformer to return a "create" statement
+ if (transformInfo.name === 'connectExclusiveListCreate') {
+ } else {
+ const allIds = await syncListOfRecords(
+ context,
+ transformInfo,
+ json[transformKey],
+ json
+ );
+ logger.debug('CHILDREN [%s] %j', transformKey, allIds);
+ childResults.push(...allIds);
+ assert.strictEqual(
+ allIds.filter((record) => record.status != 200).length,
+ 0,
+ 'Failed updating children'
+ );
+ assert.strictEqual(
+ allIds.filter((record) => typeof record.ownedBy != 'undefined')
+ .length,
+ 0,
+ 'There are some child records that have exclusive ownership already!'
+ );
+ json[transformKey + '_ids'] = allIds.map((status) => status.id);
+ }
+ }
+ if (transformInfo.filterByNamespace) {
+ json['_namespace'] = parentRecord['namespace'];
}
const transformMutation = await transformations[transformInfo.name](
context,
transformInfo,
null,
json,
- transformKey
+ transformKey,
+ parentRecord
);
if (transformMutation != null) {
logger.debug(
@@ -403,13 +442,20 @@ export const syncRecords = async function (
}
}
}
- data[md.refKey] = eid;
+ if (eid) {
+ data[md.refKey] = eid;
+ }
const nr = await batchService.create(entity, data);
- if (nr == null) {
+ if (!nr.id) {
logger.error('CREATE FAILED (%s) %j', nr, data);
- return { status: 400, result: 'create-failed', childResults };
+ return {
+ status: 400,
+ result: 'create-failed',
+ reason: nr.error,
+ childResults,
+ };
} else {
- return { status: 200, result: 'created', id: nr, childResults };
+ return { status: 200, result: 'created', id: nr.id, childResults };
}
} catch (ex) {
logger.error('Caught exception %s', ex);
@@ -422,6 +468,12 @@ export const syncRecords = async function (
}
} else {
try {
+ if (
+ json.hasOwnProperty(md['refKey']) &&
+ json[md['refKey']] != localRecord[md['refKey']]
+ ) {
+ throw new Error('Unexpected ' + md['refKey']);
+ }
const transformKeys =
'transformations' in md ? Object.keys(md.transformations) : [];
const data: any = {};
@@ -452,9 +504,13 @@ export const syncRecords = async function (
const allIds = await syncListOfRecords(
context,
transformInfo,
- json[transformKey]
+ json[transformKey],
+ localRecord
);
+
logger.debug('CHILDREN [%s] %j', transformKey, allIds);
+ childResults.push(...allIds);
+
assert.strictEqual(
allIds.filter((record) => record.status != 200).length,
0,
@@ -468,19 +524,21 @@ export const syncRecords = async function (
record.ownedBy != localRecord.id
).length,
0,
- 'There are some child records that had ownership already (w/ local record)!'
+ 'There are some child records that have ownership already (update not allowed)!'
);
json[transformKey + '_ids'] = allIds.map((status) => status.id);
- childResults.push(...allIds);
}
-
+ if (transformInfo.filterByNamespace) {
+ json['_namespace'] = parentRecord['namespace'];
+ }
const transformMutation = await transformations[transformInfo.name](
context,
transformInfo,
localRecord,
json,
- transformKey
+ transformKey,
+ parentRecord
);
if (transformMutation && transformMutation != null) {
logger.debug(
@@ -518,14 +576,19 @@ export const syncRecords = async function (
Object.keys(data)
);
const nr = await batchService.update(entity, localRecord.id, data);
- if (nr == null) {
+ if (!nr.id) {
logger.error('UPDATE FAILED (%s) %j', nr, data);
- return { status: 400, result: 'update-failed', childResults };
+ return {
+ status: 400,
+ result: 'update-failed',
+ reason: nr.error,
+ childResults,
+ };
} else {
return {
status: 200,
result: 'updated',
- id: nr,
+ id: nr.id,
childResults,
ownedBy:
md.ownedBy && localRecord[md.ownedBy]
@@ -545,6 +608,57 @@ export const syncRecords = async function (
}
};
+export const applyTransformationsToNewCreation = async (
+ keystone: any,
+ transformInfo: any,
+ inputData: any,
+ parentRecord: any
+) => {
+ if (!inputData) {
+ return;
+ }
+ const feedEntity = transformInfo['list'];
+
+ const md = (metadata as any)[feedEntity];
+ logger.debug('[applyTransformations] %j', md);
+ logger.debug('[applyTransformations] parent %j', parentRecord);
+ logger.debug('[applyTransformations] input %j', inputData);
+
+ const transformKeys =
+ 'transformations' in md ? Object.keys(md.transformations) : [];
+
+ for (const inputDataRecord of inputData) {
+ for (const transformKey of transformKeys) {
+ logger.debug(
+ ' -- (applyTransformations) changed trans? (%s)',
+ transformKey
+ );
+ const transformInfo = md.transformations[transformKey];
+
+ if (transformInfo.filterByNamespace && parentRecord) {
+ inputDataRecord['_namespace'] = parentRecord['namespace'];
+ }
+
+ const transformMutation = await transformations[transformInfo.name](
+ keystone,
+ transformInfo,
+ null,
+ inputDataRecord,
+ transformKey
+ );
+ delete inputDataRecord['_namespace'];
+ if (transformMutation && transformMutation != null) {
+ logger.debug(
+ ' -- (applyTransformations) trans (%s) %j',
+ transformKey,
+ transformMutation
+ );
+ inputDataRecord[transformKey] = transformMutation;
+ }
+ }
+ }
+};
+
export const removeEmpty = (obj: object) => {
Object.entries(obj).forEach(
([key, val]) =>
diff --git a/src/batch/transformations/connectExclusiveListCreate.ts b/src/batch/transformations/connectExclusiveListCreate.ts
new file mode 100644
index 000000000..88fa1949c
--- /dev/null
+++ b/src/batch/transformations/connectExclusiveListCreate.ts
@@ -0,0 +1,43 @@
+import { BatchService } from '../../services/keystone/batch-service';
+import { Logger } from '../../logger';
+import { strict as assert } from 'assert';
+import { connectExclusiveList } from './connectExclusiveList';
+import { applyTransformationsToNewCreation } from '../feed-worker';
+
+const logger = Logger('batch.connectExclusiveListCreate');
+
+export async function connectExclusiveListCreate(
+ keystone: any,
+ transformInfo: any,
+ currentData: any,
+ inputData: any,
+ fieldKey: string,
+ parentRecord: any
+) {
+ logger.debug('%s %j %j %j', fieldKey, currentData, inputData, parentRecord);
+
+ await applyTransformationsToNewCreation(
+ keystone,
+ transformInfo,
+ inputData[fieldKey],
+ inputData
+ );
+
+ if (currentData != null) {
+ return connectExclusiveList(
+ keystone,
+ transformInfo,
+ currentData,
+ inputData,
+ fieldKey
+ );
+ }
+
+ if (inputData[fieldKey]) {
+ return {
+ create: inputData[fieldKey],
+ };
+ } else {
+ return null;
+ }
+}
diff --git a/src/batch/transformations/connectMany.ts b/src/batch/transformations/connectMany.ts
index f26dde80a..a43e473a9 100644
--- a/src/batch/transformations/connectMany.ts
+++ b/src/batch/transformations/connectMany.ts
@@ -1,4 +1,7 @@
-import { BatchService } from '../../services/keystone/batch-service';
+import {
+ BatchService,
+ CompositeKeyValue,
+} from '../../services/keystone/batch-service';
import { dot } from '../feed-worker';
import { Logger } from '../../logger';
@@ -17,12 +20,31 @@ export async function connectMany(
const batchService = new BatchService(keystone);
if (idList != null) {
for (const uniqueKey of idList) {
- const lkup = await batchService.lookup(
- transformInfo['list'],
- transformInfo['refKey'],
- uniqueKey,
- []
- );
+ logger.error('T = %s -- %j %j', uniqueKey, inputData, currentData);
+ let lkup;
+ if (transformInfo['filterByNamespace']) {
+ const compositeKeyValues: CompositeKeyValue[] = [];
+ compositeKeyValues.push({
+ key: 'namespace',
+ value: inputData['_namespace'],
+ });
+ compositeKeyValues.push({
+ key: transformInfo['refKey'],
+ value: uniqueKey,
+ });
+ lkup = await batchService.lookupUsingCompositeKey(
+ transformInfo['list'],
+ compositeKeyValues,
+ []
+ );
+ } else {
+ lkup = await batchService.lookup(
+ transformInfo['list'],
+ transformInfo['refKey'],
+ uniqueKey,
+ []
+ );
+ }
if (lkup == null) {
logger.error(
`Lookup failed for ${transformInfo['list']} ${transformInfo['refKey']}!`
diff --git a/src/batch/transformations/connectOne.ts b/src/batch/transformations/connectOne.ts
index ca95335e0..8b8e02bdb 100644
--- a/src/batch/transformations/connectOne.ts
+++ b/src/batch/transformations/connectOne.ts
@@ -1,4 +1,7 @@
-import { BatchService } from '../../services/keystone/batch-service';
+import {
+ BatchService,
+ CompositeKeyValue,
+} from '../../services/keystone/batch-service';
import { Logger } from '../../logger';
import { dot } from '../feed-worker';
@@ -16,6 +19,7 @@ export async function connectOne(
// fieldKey: The field that has the new value in the input
const fieldKey = 'key' in transformInfo ? transformInfo['key'] : _fieldKey;
+ logger.debug('[connectOne] %j %s', inputData, fieldKey);
const value = dot(inputData, fieldKey);
// undefined value is one that was never passed in (rather than explicitely passed in null)
@@ -29,12 +33,31 @@ export async function connectOne(
}
}
- const lkup = await batchService.lookup(
- transformInfo['list'],
- transformInfo['refKey'],
- value,
- []
- );
+ let lkup;
+ if (transformInfo['filterByNamespace']) {
+ const compositeKeyValues: CompositeKeyValue[] = [];
+ compositeKeyValues.push({
+ key: 'namespace',
+ value: inputData['_namespace'],
+ });
+ compositeKeyValues.push({
+ key: transformInfo['refKey'],
+ value: value,
+ });
+ lkup = await batchService.lookupUsingCompositeKey(
+ transformInfo['list'],
+ compositeKeyValues,
+ []
+ );
+ } else {
+ lkup = await batchService.lookup(
+ transformInfo['list'],
+ transformInfo['refKey'],
+ value,
+ []
+ );
+ }
+
if (lkup == null) {
logger.error(
`Lookup failed for ${transformInfo['list']} ${transformInfo['refKey']}!`
@@ -48,7 +71,7 @@ export async function connectOne(
) {
return null;
} else {
- logger.debug('Adding: ' + JSON.stringify({ connect: { id: lkup['id'] } }));
+ logger.debug('Adding: %s = %j', fieldKey, { connect: { id: lkup['id'] } });
return { connect: { id: lkup['id'] } };
}
}
diff --git a/src/batch/transformations/index.ts b/src/batch/transformations/index.ts
index b6d0a8480..12f96ef9b 100644
--- a/src/batch/transformations/index.ts
+++ b/src/batch/transformations/index.ts
@@ -6,6 +6,8 @@ export { byKey } from './byKey';
export { connectExclusiveList } from './connectExclusiveList';
+export { connectExclusiveListCreate } from './connectExclusiveListCreate';
+
export { connectExclusiveOne } from './connectExclusiveOne';
export { connectMany } from './connectMany';
diff --git a/src/controllers/v2/ProductController.ts b/src/controllers/v2/ProductController.ts
index 81340dc13..44268b8ae 100644
--- a/src/controllers/v2/ProductController.ts
+++ b/src/controllers/v2/ProductController.ts
@@ -64,6 +64,7 @@ export class ProductController extends Controller {
@Body() body: Product,
@Request() request: any
): Promise {
+ body['namespace'] = ns;
return await syncRecordsThrowErrors(
this.keystone.createContext(request),
'Product',
diff --git a/src/controllers/v2/openapi.yaml b/src/controllers/v2/openapi.yaml
index 06cbe5a6a..6696f64e2 100644
--- a/src/controllers/v2/openapi.yaml
+++ b/src/controllers/v2/openapi.yaml
@@ -608,6 +608,8 @@ components:
type: string
name:
type: string
+ description:
+ type: string
namespace:
type: string
dataset:
diff --git a/src/controllers/v2/routes.ts b/src/controllers/v2/routes.ts
index 46048762e..4d6561ced 100644
--- a/src/controllers/v2/routes.ts
+++ b/src/controllers/v2/routes.ts
@@ -393,6 +393,7 @@ const models: TsoaRoute.Models = {
"properties": {
"appId": {"dataType":"string"},
"name": {"dataType":"string"},
+ "description": {"dataType":"string"},
"namespace": {"dataType":"string"},
"dataset": {"ref":"DraftDatasetRefID"},
"environments": {"dataType":"array","array":{"dataType":"refObject","ref":"Environment"}},
diff --git a/src/controllers/v2/types.ts b/src/controllers/v2/types.ts
index e8a3b9bdc..a5ed8e61e 100644
--- a/src/controllers/v2/types.ts
+++ b/src/controllers/v2/types.ts
@@ -278,6 +278,7 @@ export interface Application {
export interface Product {
appId?: string; // Primary Key
name?: string;
+ description?: string;
namespace?: string;
dataset?: DraftDatasetRefID;
environments?: Environment[];
diff --git a/src/jest.config.js b/src/jest.config.js
index 9a77d4a66..76cb7f5d5 100644
--- a/src/jest.config.js
+++ b/src/jest.config.js
@@ -1,7 +1,7 @@
module.exports = {
verbose: true,
testEnvironment: 'node',
- testMatch: ['**/?(*.)+(test.{ts,js,jsx})'],
+ testMatch: ['**/?(*.)+(test.{js,jsx})'],
collectCoverageFrom: ['services/**/*.js', 'services/**/*.ts'],
coveragePathIgnorePatterns: ['.*/__mocks__/.*', '.*/@types/.*'],
coverageDirectory: '__coverage__',
diff --git a/src/lists/Dataset.js b/src/lists/Dataset.js
index 106e58868..e7ff317b5 100644
--- a/src/lists/Dataset.js
+++ b/src/lists/Dataset.js
@@ -5,6 +5,7 @@ const { Wysiwyg } = require('@keystonejs/fields-wysiwyg-tinymce');
const { externallySourced } = require('../components/ExternalSource');
const { EnforcementPoint } = require('../authz/enforcement');
+const { regExprValidation } = require('../services/utils');
/*
@@ -102,4 +103,13 @@ module.exports = {
},
access: EnforcementPoint,
plugins: [externallySourced({ isRequired: false })],
+ hooks: {
+ // validateInput: ({ resolvedData }) => {
+ // regExprValidation(
+ // '^[a-z0-9-]{3,100}$',
+ // resolvedData['name'],
+ // "Dataset name must be between 3 and 100 lowercase alpha-numeric characters (including special character '-')"
+ // );
+ // },
+ },
};
diff --git a/src/lists/Product.js b/src/lists/Product.js
index 06d74d686..6719819c5 100644
--- a/src/lists/Product.js
+++ b/src/lists/Product.js
@@ -10,8 +10,9 @@ const {
DeleteProductValidate,
DeleteProductEnvironments,
} = require('../services/workflow/delete-product');
-const { strict: assert } = require('assert');
+const { strict: assert, AssertionError } = require('assert');
const { StructuredActivityService } = require('../services/workflow');
+const { regExprValidation } = require('../services/utils');
module.exports = {
fields: {
@@ -46,7 +47,11 @@ module.exports = {
access: EnforcementPoint,
hooks: {
resolveInput: ({ context, operation, resolvedData }) => {
- logger.debug('[List.Product] Auth %j', context['authedItem']);
+ logger.debug(
+ '[List.Product] Auth %s %j',
+ operation,
+ context['authedItem']
+ );
if (operation == 'create') {
if ('appId' in resolvedData && isProductID(resolvedData['appId'])) {
} else {
@@ -59,7 +64,21 @@ module.exports = {
logger.debug('[List.Product] Resolved %j', resolvedData);
return resolvedData;
},
-
+ validateInput: ({ resolvedData, addValidationError }) => {
+ try {
+ regExprValidation(
+ '^[a-zA-Z0-9 ()&-]{3,100}$',
+ resolvedData['name'],
+ "Product name must be between 3 and 100 alpha-numeric characters (including special characters ' ()&-')"
+ );
+ } catch (ex) {
+ if (ex instanceof AssertionError) {
+ addValidationError(ex.message);
+ } else {
+ throw ex;
+ }
+ }
+ },
validateDelete: async function ({ existingItem, context }) {
await DeleteProductValidate(
context,
@@ -98,12 +117,12 @@ module.exports = {
);
},
- // beforeDelete: async function ({ existingItem, context }) {
- // await DeleteProductEnvironments(
- // context,
- // context.authedItem['namespace'],
- // existingItem.id
- // );
- // },
+ beforeDelete: async function ({ existingItem, context }) {
+ await DeleteProductEnvironments(
+ context.createContext({ skipAccessControl: true }),
+ context.authedItem['namespace'],
+ existingItem.id
+ );
+ },
},
};
diff --git a/src/nextapp/.env.local b/src/nextapp/.env.local
index 6b0c3e9e9..31bf55355 100644
--- a/src/nextapp/.env.local
+++ b/src/nextapp/.env.local
@@ -5,7 +5,7 @@ NEXT_PUBLIC_HELP_DESK_URL=https://dpdd.atlassian.net/servicedesk/customer/portal
NEXT_PUBLIC_HELP_CHAT_URL=https://chat.developer.gov.bc.ca/channel/aps-ops
NEXT_PUBLIC_HELP_ISSUE_URL=https://github.com/bcgov/api-services-portal/issues
NEXT_PUBLIC_HELP_API_DOCS_URL=/ds/api/v2/console/
-NEXT_PUBLIC_HELP_SUPPORT_URL=https://mvp.developer.gov.bc.ca/docs/default/component/aps-infra-platform-docs/
-NEXT_PUBLIC_HELP_RELEASE_URL=https://mvp.developer.gov.bc.ca/docs/default/component/aps-infra-platform-docs/reference/releases/
+NEXT_PUBLIC_HELP_SUPPORT_URL=https://developer.gov.bc.ca/docs/default/component/aps-infra-platform-docs/
+NEXT_PUBLIC_HELP_RELEASE_URL=https://developer.gov.bc.ca/docs/default/component/aps-infra-platform-docs/reference/releases/
NEXT_PUBLIC_HELP_STATUS_URL=https://uptime.com/s/bcgov-dss
diff --git a/src/nextapp/components/products-list/delete-product.tsx b/src/nextapp/components/products-list/delete-product.tsx
index 4252f791b..37a32268c 100644
--- a/src/nextapp/components/products-list/delete-product.tsx
+++ b/src/nextapp/components/products-list/delete-product.tsx
@@ -42,9 +42,10 @@ const DeleteProduct: React.FC = ({
});
onDeleted();
client.invalidateQueries(queryKey);
- } catch {
+ } catch(e) {
toast({
title: 'Product delete failed',
+ description: e,
status: 'error',
isClosable: true,
});
diff --git a/src/nextapp/pages/devportal/api-directory/[id].tsx b/src/nextapp/pages/devportal/api-directory/[id].tsx
index 5bdeb2c47..9a8475efb 100644
--- a/src/nextapp/pages/devportal/api-directory/[id].tsx
+++ b/src/nextapp/pages/devportal/api-directory/[id].tsx
@@ -150,7 +150,7 @@ const ApiPage: React.FC<
- {data?.products?.map((p) => (
+ {data?.products?.sort((a,b) => (a.name > b.name) ? 1 : ((b.name > a.name) ? -1 : 0)).map((p) => (
1) {
+ throw Error(
+ 'Expecting zero or one rows ' + query + ' ' + JSON.stringify(variables)
+ );
+ }
+ return result['data'][query].length == 0 ? null : result['data'][query][0];
+ }
+
public async lookup(
query: string,
refKey: string,
@@ -166,7 +240,10 @@ export class BatchService {
return result['data'][query].length == 0 ? null : result['data'][query][0];
}
- public async create(entity: string, data: any) {
+ public async create(
+ entity: string,
+ data: any
+ ): Promise<{ id?: string; error?: string }> {
logger.debug('[create] : (%s) %j', entity, data);
const result = await this.context.executeGraphQL({
query: `mutation ($data: ${entity}CreateInput) {
@@ -182,10 +259,16 @@ export class BatchService {
logger.debug('[create] RESULT %j', result);
}
- return 'errors' in result ? null : result['data'][`create${entity}`].id;
+ return 'errors' in result
+ ? { error: result['errors'][0].message }
+ : { id: result['data'][`create${entity}`].id };
}
- public async update(entity: string, id: string, data: any): Promise {
+ public async update(
+ entity: string,
+ id: string,
+ data: any
+ ): Promise<{ id?: string; error?: string }> {
logger.debug('[update] : %s %s', entity, id);
const result = await this.context.executeGraphQL({
query: `mutation ($id: ID!, $data: ${entity}UpdateInput) {
@@ -200,7 +283,10 @@ export class BatchService {
} else {
logger.debug('[update] RESULT %j', result);
}
- return 'errors' in result ? null : result['data'][`update${entity}`].id;
+
+ return 'errors' in result
+ ? { error: result['errors'][0].message }
+ : { id: result['data'][`update${entity}`].id };
}
public async remove(entity: string, id: string): Promise {
diff --git a/src/services/workflow/delete-environment.ts b/src/services/workflow/delete-environment.ts
index cb1f87ad2..1292700c9 100644
--- a/src/services/workflow/delete-environment.ts
+++ b/src/services/workflow/delete-environment.ts
@@ -71,6 +71,13 @@ export const DeleteEnvironment = async (
prodEnvId
);
+ // no longer doing a cascade delete of service access / consumer data
+ assert.strictEqual(
+ force,
+ false,
+ 'Force delete environment no longer supported'
+ );
+
const envDetail = await lookupEnvironmentAndIssuerById(context, prodEnvId);
const accessList = await lookupServiceAccessesByEnvironment(context, ns, [
@@ -78,7 +85,7 @@ export const DeleteEnvironment = async (
]);
assert.strictEqual(
- force == true || accessList.length == 0,
+ accessList.length == 0,
true,
`${accessList.length} ${
accessList.length == 1 ? 'consumer has' : 'consumers have'
@@ -102,21 +109,21 @@ export const CascadeDeleteEnvironment = async (
ns: string,
prodEnvId: string
): Promise => {
- await deleteRecords(
- context,
- 'ServiceAccess',
- { productEnvironment: { id: prodEnvId } },
- true,
- ['id']
- );
-
- await deleteRecords(
- context,
- 'AccessRequest',
- { productEnvironment: { id: prodEnvId } },
- true,
- ['id']
- );
+ // await deleteRecords(
+ // context,
+ // 'ServiceAccess',
+ // { productEnvironment: { id: prodEnvId } },
+ // true,
+ // ['id']
+ // );
+
+ // await deleteRecords(
+ // context,
+ // 'AccessRequest',
+ // { productEnvironment: { id: prodEnvId } },
+ // true,
+ // ['id']
+ // );
await deleteRecords(context, 'Environment', { id: prodEnvId }, false, ['id']);
};
diff --git a/src/services/workflow/delete-namespace.ts b/src/services/workflow/delete-namespace.ts
index 4abd6063a..2fa4151d5 100644
--- a/src/services/workflow/delete-namespace.ts
+++ b/src/services/workflow/delete-namespace.ts
@@ -25,7 +25,7 @@ import { Environment } from '../keystone/types';
import { lookupEnvironmentsByNS } from '../keystone/product-environment';
import { FieldErrors } from 'tsoa';
import { updateActivity } from '../keystone/activity';
-import { CascadeDeleteEnvironment } from './delete-environment';
+//import { CascadeDeleteEnvironment } from './delete-environment';
import { GWAService } from '../gwaapi';
import getSubjectToken from '../../auth/auth-token';
@@ -128,13 +128,17 @@ export const DeleteNamespace = async (
const envs = await lookupEnvironmentsByNS(context, ns);
const ids = envs.map((e: Environment) => e.id);
- for (const envId of ids) {
- await CascadeDeleteEnvironment(context, ns, envId);
- }
-
- await deleteRecords(context, 'ServiceAccess', { namespace: ns }, true, [
- 'id',
- ]);
+ // "DeleteNamespaceValidate" is called prior to this one, so
+ // it won't reach here if there are Service Access records
+ // but to be extra safe, lets keep this code
+ //
+ // for (const envId of ids) {
+ // await CascadeDeleteEnvironment(context, ns, envId);
+ // }
+
+ // await deleteRecords(context, 'ServiceAccess', { namespace: ns }, true, [
+ // 'id',
+ // ]);
await deleteRecords(context, 'Product', { namespace: ns }, true, ['id']);
diff --git a/src/services/workflow/delete-product.ts b/src/services/workflow/delete-product.ts
index d39525e34..ab1a179f7 100644
--- a/src/services/workflow/delete-product.ts
+++ b/src/services/workflow/delete-product.ts
@@ -46,19 +46,19 @@ export const DeleteProductValidate = async (
);
};
-// export const DeleteProductEnvironments = async (
-// context: any,
-// ns: string,
-// id: string
-// ) => {
-// logger.debug('Deleting Product ns=%s, id=%s', ns, id);
+export const DeleteProductEnvironments = async (
+ context: any,
+ ns: string,
+ id: string
+) => {
+ logger.debug('Deleting environments for ns=%s, product=%s', ns, id);
-// const product = await lookupProduct(context, ns, id);
-// logger.error('Product %j', product);
+ const product = await lookupProduct(context, ns, id);
+ logger.info('Deleting product environments %j', product);
-// const ids = product.environments.map((e: Environment) => e.id);
+ const ids = product.environments.map((e: Environment) => e.id);
-// for (const envId of ids) {
-// await deleteRecords(context, 'Environment', { id: envId }, false, ['id']);
-// }
-// };
+ for (const envId of ids) {
+ await deleteRecords(context, 'Environment', { id: envId }, false, ['id']);
+ }
+};
diff --git a/src/test/integrated/batchworker/product.ts b/src/test/integrated/batchworker/product.ts
new file mode 100644
index 000000000..eac8c069b
--- /dev/null
+++ b/src/test/integrated/batchworker/product.ts
@@ -0,0 +1,59 @@
+/*
+Wire up directly with Keycloak and use the Services
+To run:
+npm run ts-build
+npm run ts-watch
+node dist/test/integrated/batchworker/product.js
+*/
+
+import InitKeystone from '../keystonejs/init';
+import {
+ getRecords,
+ parseJsonString,
+ transformAllRefID,
+ removeEmpty,
+ removeKeys,
+ syncRecords,
+} from '../../../batch/feed-worker';
+import { o } from '../util';
+import { BatchService } from '../../../services/keystone/batch-service';
+
+(async () => {
+ const keystone = await InitKeystone();
+ console.log('K = ' + keystone);
+
+ const ns = 'refactortime';
+ const skipAccessControl = false;
+
+ const identity = {
+ id: null,
+ username: 'sample_username',
+ namespace: ns,
+ roles: JSON.stringify(['api-owner']),
+ scopes: [],
+ userId: null,
+ } as any;
+
+ const ctx = keystone.createContext({
+ skipAccessControl,
+ authentication: { item: identity },
+ });
+
+ const json = {
+ name: 'Refactor Time Test',
+ namespace: ns,
+ environments: [
+ {
+ name: 'stage',
+ appId: '0A021EB0',
+ //services: [] as any,
+ services: ['a-service-for-refactortime'],
+ // services: ['a-service-for-refactortime', 'a-service-for-aps-moh-proto'],
+ },
+ ] as any,
+ };
+ const res = await syncRecords(ctx, 'Product', null, json);
+ o(res);
+
+ await keystone.disconnect();
+})();
diff --git a/src/test/integrated/keystonejs/init.ts b/src/test/integrated/keystonejs/init.ts
index 1930b2ec8..e69f7e11e 100644
--- a/src/test/integrated/keystonejs/init.ts
+++ b/src/test/integrated/keystonejs/init.ts
@@ -1,8 +1,6 @@
/*
node dist/test/integrated/keystonejs/test.js
*/
-import { syncRecords } from '../../../batch/feed-worker';
-
import { loadRulesAndWatch } from '../../../authz/enforcement';
loadRulesAndWatch(false);
@@ -17,6 +15,20 @@ export default async function InitKeystone(
const session = require('express-session');
//const MongoStore = require('connect-mongo')(session);
+ const { KnexAdapter } = require('@keystonejs/adapter-knex');
+ const knexAdapterConfig = {
+ knexOptions: {
+ debug: process.env.LOG_LEVEL === 'debug' ? false : false,
+ connection: {
+ host: process.env.KNEX_HOST,
+ port: process.env.KNEX_PORT,
+ user: process.env.KNEX_USER,
+ password: process.env.KNEX_PASSWORD,
+ database: process.env.KNEX_DATABASE,
+ },
+ },
+ };
+
const { MongooseAdapter } = require('@keystonejs/adapter-mongoose');
const mongooseAdapterConfig = {
mongoUri: process.env.MONGO_URL,
@@ -24,8 +36,13 @@ export default async function InitKeystone(
pass: process.env.MONGO_PASSWORD,
};
+ const adapter = process.env.ADAPTER ? process.env.ADAPTER : 'mongoose';
+
const keystone = new Keystone({
- adapter: new MongooseAdapter(mongooseAdapterConfig),
+ adapter:
+ adapter == 'knex'
+ ? new KnexAdapter(knexAdapterConfig)
+ : new MongooseAdapter(mongooseAdapterConfig),
cookieSecret: process.env.COOKIE_SECRET,
cookie: {
secure: process.env.COOKIE_SECURE === 'true', // Default to true in production
diff --git a/src/test/services/batch/integrated-batch.test.ts b/src/test/services/batch/integrated-batch.test.ts
new file mode 100644
index 000000000..711491160
--- /dev/null
+++ b/src/test/services/batch/integrated-batch.test.ts
@@ -0,0 +1,107 @@
+/*
+Wire up directly with Keycloak and use the Services
+To run:
+npm run intg-build
+npm run ts-watch
+node dist/test/integrated/batchworker/testsuite/run.js
+*/
+
+import InitKeystone from '../../integrated/keystonejs/init';
+import {
+ removeKeys,
+ syncRecords,
+ getRecords,
+} from '../../../batch/feed-worker';
+import yaml from 'js-yaml';
+import { strict as assert } from 'assert';
+
+import testdata from './testdata';
+import { Logger } from '../../../logger';
+import { BatchWhereClause } from '@/services/keystone/batch-service';
+
+const logger = Logger('testsuite');
+
+function equalPayload(a: any, e: any) {
+ assert.strictEqual(
+ yaml.dump(a, { indent: 2, lineWidth: 100 }),
+ yaml.dump(e, { indent: 2, lineWidth: 100 })
+ );
+}
+
+function testHeading(index: number, name: string) {
+ logger.info('\x1b[33m --------------------------------------------- \x1b[0m');
+ logger.info('\x1b[33m ' + index + ' ' + name + ' \x1b[0m');
+ logger.info('\x1b[33m --------------------------------------------- \x1b[0m');
+}
+
+describe('Batch Tests', function () {
+ jest.setTimeout(120 * 1000);
+ it(`should pass all tests`, async function () {
+ const keystone = await InitKeystone();
+ console.log('K = ' + keystone);
+
+ const ns = 'refactortime';
+ const skipAccessControl = true;
+
+ const identity = {
+ id: null,
+ username: 'sample_username',
+ namespace: ns,
+ roles: JSON.stringify(['api-owner']),
+ scopes: [],
+ userId: null,
+ } as any;
+
+ const ctx = keystone.createContext({
+ skipAccessControl,
+ authentication: { item: identity },
+ });
+
+ //await cleanupDatabase();
+
+ let index = 1;
+ for (const test of testdata.tests) {
+ const json: any = test.data;
+ testHeading(index++, test.name);
+ try {
+ if ((test.method || 'PUT') === 'PUT') {
+ const res = await syncRecords(
+ ctx,
+ test.entity,
+ json[test.refKey],
+ json
+ );
+ equalPayload(
+ removeKeys(res, ['id', 'ownedBy']),
+ test.expected.payload
+ );
+ } else {
+ const where: BatchWhereClause = test.whereClause;
+ const records: any[] = await getRecords(
+ ctx,
+ test.entity,
+ null,
+ test.responseFields,
+ where
+ );
+ const payload = records.map((o) => removeKeys(o, ['id', 'appId']));
+ equalPayload(payload, test.expected.payload);
+ }
+ } catch (e) {
+ logger.error(e.message);
+ if (
+ !test.expected?.exception ||
+ test.expected?.exception != `${e.message}`
+ ) {
+ await keystone.disconnect();
+
+ throw e;
+ }
+ }
+ }
+
+ testHeading(index, 'DONE');
+
+ await keystone.disconnect();
+ });
+});
diff --git a/src/test/services/batch/testdata.js b/src/test/services/batch/testdata.js
new file mode 100644
index 000000000..93c4124ae
--- /dev/null
+++ b/src/test/services/batch/testdata.js
@@ -0,0 +1,707 @@
+export default {
+ tests: [
+ {
+ name: 'create an organization',
+ entity: 'Organization',
+ refKey: 'extForeignKey',
+ data: {
+ name: 'ministry-of-health',
+ title: 'Ministry of Health',
+ extForeignKey: '01',
+ extSource: 'ckan',
+ extRecordHash: '',
+ orgUnits: [
+ {
+ id: '319b3297-846d-4b97-8095-ceb3ec505fb8',
+ name: 'planning-and-innovation-division',
+ title: 'Planning and Innovation Division',
+ extSource: 'ckan',
+ extRecordHash: '',
+ },
+ {
+ id: '319b3297-846d-4b97-8095-ceb3ec505fb7',
+ name: 'public-health',
+ title: 'Public Health',
+ extSource: 'ckan',
+ extRecordHash: '',
+ },
+ ],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [
+ {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ ],
+ },
+ },
+ },
+ {
+ name: 'create a new product',
+ entity: 'Product',
+ data: {
+ name: 'Refactor time test',
+ namespace: 'refactortime',
+ environments: [{ name: 'dev', appId: '0A021EB0' }],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'update same product',
+ entity: 'Product',
+ data: {
+ name: 'Refactor time test',
+ namespace: 'refactortime',
+ environments: [{ name: 'dev', appId: '0A021EB0' }],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'no-change',
+ childResults: [
+ { status: 200, result: 'no-change', childResults: [] },
+ ],
+ },
+ },
+ },
+ {
+ name: 'update same product but with invalid appId',
+ entity: 'Product',
+ data: {
+ name: 'Refactor time test',
+ namespace: 'refactortime',
+ environments: [{ name: 'dev', appId: '22021EB0' }],
+ },
+ expected: {
+ payload: {
+ status: 400,
+ result: 'update-failed',
+ reason: 'Failed updating children',
+ childResults: [
+ {
+ status: 400,
+ result: 'update-failed',
+ reason: 'Unexpected appId',
+ childResults: [],
+ },
+ ],
+ },
+ },
+ },
+ {
+ name: 'update description of same product',
+ entity: 'Product',
+ data: {
+ name: 'Refactor time test',
+ description: 'Good info to have',
+ namespace: 'refactortime',
+ environments: [{ name: 'dev', appId: '0A021EB0' }],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'updated',
+ childResults: [
+ { status: 200, result: 'no-change', childResults: [] },
+ ],
+ },
+ },
+ },
+ {
+ name: 'update same product add environment',
+ entity: 'Product',
+ data: {
+ name: 'Refactor time test',
+ description: 'Good info to have',
+ namespace: 'refactortime',
+ environments: [
+ { name: 'dev', appId: '0A021EB0' },
+ { name: 'test', appId: '0A021FB0' },
+ ],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'updated',
+ childResults: [
+ { status: 200, result: 'no-change', childResults: [] },
+ { status: 200, result: 'created', childResults: [] },
+ ],
+ },
+ },
+ },
+ {
+ name: 'update same product remove environment',
+ entity: 'Product',
+ data: {
+ name: 'Refactor time test',
+ description: 'Good info to have',
+ namespace: 'refactortime',
+ environments: [{ name: 'test', appId: '0A021FB0' }],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'updated',
+ childResults: [
+ { status: 200, result: 'no-change', childResults: [] },
+ ],
+ },
+ },
+ },
+ {
+ name: 'try updating product from different namespace',
+ entity: 'Product',
+ data: {
+ name: 'Refactor time test',
+ namespace: 'diffnamespace',
+ environments: [{ name: 'test', appId: '0A021FB0' }],
+ },
+ expected: {
+ payload: {
+ status: 400,
+ result: 'create-failed',
+ reason:
+ 'Unable to create and/or connect 1 Product.environments',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create a new product with no environments',
+ entity: 'Product',
+ data: {
+ name: 'Refactor number two',
+ namespace: 'refactortime',
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create a new product with same name as before but diff appId',
+ entity: 'Product',
+ data: {
+ name: 'Refactor number two',
+ appId: '040FA2D8138D',
+ namespace: 'refactortime',
+ },
+ expected: {
+ payload: {
+ status: 400,
+ result: 'update-failed',
+ reason: 'Unexpected appId',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create a new product with no name or appId',
+ entity: 'Product',
+ data: {
+ namespace: 'refactortime',
+ },
+ expected: { exception: 'Missing value for key name' },
+ },
+ {
+ name: 'create a new product with lots of environments',
+ entity: 'Product',
+ data: {
+ name: 'All Env Product',
+ namespace: 'refactortime',
+ environments: [
+ { name: 'dev', appId: '1B021EB0' },
+ { name: 'test', appId: '2B021EB0' },
+ { name: 'prod', appId: '3B021EB0' },
+ ],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'update product just created',
+ entity: 'Product',
+ data: {
+ name: 'All Env Product',
+ namespace: 'refactortime',
+ environments: [
+ { name: 'dev', appId: '1B021EB0' },
+ { name: 'test', appId: '2B021EB0' },
+ { name: 'prod', appId: '3B021EB0' },
+ ],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'no-change',
+ childResults: [
+ { status: 200, result: 'no-change', childResults: [] },
+ { status: 200, result: 'no-change', childResults: [] },
+ { status: 200, result: 'no-change', childResults: [] },
+ ],
+ },
+ },
+ },
+
+ {
+ name: 'update product just created using no appIds',
+ entity: 'Product',
+ data: {
+ name: 'All Env Product',
+ namespace: 'refactortime',
+ environments: [{ name: 'dev' }, { name: 'test' }, { name: 'prod' }],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'no-change',
+ childResults: [
+ { status: 200, result: 'no-change', childResults: [] },
+ { status: 200, result: 'no-change', childResults: [] },
+ { status: 200, result: 'no-change', childResults: [] },
+ ],
+ },
+ },
+ },
+ {
+ name: 'create a new product with missing appIds',
+ entity: 'Product',
+ data: {
+ name: 'All Env Product New',
+ namespace: 'refactortime',
+ environments: [{ name: 'dev' }, { name: 'test' }, { name: 'prod' }],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create a product with invalid name',
+ entity: 'Product',
+ data: {
+ name: '@#$&(#@&$*(#@&',
+ namespace: 'refactortime',
+ },
+ expected: {
+ payload: {
+ status: 400,
+ result: 'create-failed',
+ reason: 'You attempted to perform an invalid mutation',
+ // reason:
+ // "Product name must be between 3 and 100 alpha-numeric characters (including special characters ' {}&-')",
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create an invalid product with short name',
+ entity: 'Product',
+ data: {
+ name: 'tw',
+ namespace: 'refactortime',
+ },
+ expected: {
+ payload: {
+ status: 400,
+ result: 'create-failed',
+ reason: 'You attempted to perform an invalid mutation',
+ // reason:
+ // "Product name must be between 3 and 100 alpha-numeric characters (including special characters ' {}&-')",
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create a product with all good characters',
+ entity: 'Product',
+ data: {
+ name: 'abc (ABC) & 123',
+ namespace: 'refactortime',
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create a product with an environment with no appId',
+ entity: 'Product',
+ data: {
+ name: 'Prod with a dev env',
+ namespace: 'refactortime',
+ environments: [{ name: 'dev' }],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'update a product with an environment with no appId',
+ entity: 'Product',
+ data: {
+ name: 'Prod with a dev env',
+ namespace: 'refactortime',
+ environments: [{ name: 'dev' }],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'no-change',
+ childResults: [
+ { status: 200, result: 'no-change', childResults: [] },
+ ],
+ },
+ },
+ },
+ {
+ name: 'get a product',
+ entity: 'Product',
+ method: 'GET',
+ whereClause: {
+ query: '$name: String',
+ clause: '{ name: $name }',
+ variables: {
+ name: 'Prod with a dev env',
+ },
+ },
+ responseFields: ['environments', 'datasets'],
+ expected: {
+ payload: [
+ {
+ name: 'Prod with a dev env',
+ description: null,
+ namespace: 'refactortime',
+ dataset: null,
+ environments: [
+ {
+ name: 'dev',
+ active: false,
+ approval: false,
+ flow: 'public',
+ additionalDetailsToRequest: null,
+ services: [],
+ legal: null,
+ credentialIssuer: null,
+ product: { namespace: 'refactortime' },
+ },
+ ],
+ },
+ ],
+ },
+ },
+ {
+ name: 'create a dataset',
+ entity: 'DraftDataset',
+ refKey: 'name',
+ data: {
+ name: 'my-dataset',
+ title: 'My Dataset',
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create a product with missing dataset',
+ entity: 'Product',
+ data: {
+ name: 'Prod with dataset',
+ dataset: 'missing-dataset',
+ namespace: 'refactortime',
+ environments: [{ name: 'dev' }],
+ },
+ expected: {
+ payload: {
+ status: 400,
+ result: 'create-failed',
+ reason: 'Record not found [dataset] missing-dataset',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create a product with the dataset',
+ entity: 'Product',
+ data: {
+ name: 'Prod with dataset',
+ dataset: 'my-dataset',
+ namespace: 'refactortime',
+ environments: [{ name: 'dev' }],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'try create a DraftDataset with invalid name',
+ entity: 'DraftDataset',
+ refKey: 'name',
+ data: {
+ name: 'my dataset',
+ title: 'My Dataset',
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create DraftDataset',
+ entity: 'DraftDataset',
+ refKey: 'name',
+ data: {
+ name: 'my-draft-product',
+ title: 'My Draft Product',
+ notes:
+ 'API Gateway Services provides a way to configure services on the API Gateway, manage access to APIs and get insight into the use of them.',
+ tags: ['gateway', 'kong', 'openapi'],
+ sector: 'Service',
+ license_title: 'Access Only',
+ view_audience: 'Government',
+ security_class: 'LOW-PUBLIC',
+ organization: 'ministry-of-health',
+ organizationUnit: 'planning-and-innovation-division',
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'update security class in existing DraftDataset',
+ entity: 'DraftDataset',
+ refKey: 'name',
+ data: {
+ name: 'my-draft-product',
+ title: 'My Draft Product',
+ notes:
+ 'API Gateway Services provides a way to configure services on the API Gateway, manage access to APIs and get insight into the use of them.',
+ tags: ['gateway', 'kong', 'openapi'],
+ sector: 'Service',
+ license_title: 'Access Only',
+ view_audience: 'Government',
+ security_class: 'PUBLIC',
+ organization: 'ministry-of-health',
+ organizationUnit: 'planning-and-innovation-division',
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'updated',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'update DraftDataset no change',
+ entity: 'DraftDataset',
+ refKey: 'name',
+ data: {
+ name: 'my-draft-product',
+ title: 'My Draft Product',
+ notes:
+ 'API Gateway Services provides a way to configure services on the API Gateway, manage access to APIs and get insight into the use of them.',
+ tags: ['gateway', 'kong', 'openapi'],
+ sector: 'Service',
+ license_title: 'Access Only',
+ view_audience: 'Government',
+ security_class: 'PUBLIC',
+ organization: 'ministry-of-health',
+ organizationUnit: 'planning-and-innovation-division',
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'no-change',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'legal',
+ entity: 'Legal',
+ refKey: 'reference',
+ data: {
+ reference: 'terms-of-use-for-api-gateway-1',
+ title: 'Terms of Use for API Gateway',
+ link:
+ 'https://www2.gov.bc.ca/gov/content/data/open-data/api-terms-of-use-for-ogl-information',
+ document: 'terms-of-use',
+ version: 1,
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create Dataset',
+ entity: 'Dataset',
+ refKey: 'extForeignKey',
+ data: {
+ extForeignKey: '000-000-001',
+ name: 'external-dataset',
+ title: 'External DS',
+ notes: 'Small notes',
+ tags: ['gateway', 'kong', 'openapi'],
+ sector: 'Service',
+ license_title: 'Access Only',
+ view_audience: 'Government',
+ security_class: 'PUBLIC',
+ organization: { id: '319b3297-846d-4b97-8095-ceb3ec505fb8' },
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create a new product with legal',
+ entity: 'Product',
+ data: {
+ name: 'Product with Legal',
+ namespace: 'refactortime',
+ environments: [
+ { name: 'dev', legal: 'terms-of-use-for-api-gateway-1' },
+ ],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'get the legal product',
+ entity: 'Product',
+ method: 'GET',
+ whereClause: {
+ query: '$name: String',
+ clause: '{ name: $name }',
+ variables: {
+ name: 'Product with Legal',
+ },
+ },
+ responseFields: ['environments', 'datasets'],
+ expected: {
+ payload: [
+ {
+ name: 'Product with Legal',
+ description: null,
+ namespace: 'refactortime',
+ dataset: null,
+ environments: [
+ {
+ name: 'dev',
+ active: false,
+ approval: false,
+ flow: 'public',
+ additionalDetailsToRequest: null,
+ services: [],
+ legal: { reference: 'terms-of-use-for-api-gateway-1' },
+ credentialIssuer: null,
+ product: { namespace: 'refactortime' },
+ },
+ ],
+ },
+ ],
+ },
+ },
+ {
+ name: 'create issuer',
+ entity: 'CredentialIssuer',
+ refKey: 'name',
+ data: {
+ name: 'issuer',
+ namespace: 'refactortime',
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ {
+ name: 'create a new product with issuer',
+ entity: 'Product',
+ data: {
+ name: 'Product with Issuer',
+ namespace: 'refactortime',
+ environments: [
+ {
+ name: 'dev',
+ flow: 'client-credentials',
+ credentialIssuer: 'issuer',
+ },
+ ],
+ },
+ expected: {
+ payload: {
+ status: 200,
+ result: 'created',
+ childResults: [],
+ },
+ },
+ },
+ ],
+};