diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json deleted file mode 100644 index a4e6e67..0000000 --- a/.devcontainer/devcontainer.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "image": "mcr.microsoft.com/devcontainers/typescript-node:20-buster", - "features": { - "ghcr.io/devcontainers-contrib/features/prettier:1": { - "version": "latest" - }, - "ghcr.io/devcontainers-contrib/features/jest:2": { "version": "latest" } - }, - "customizations": { - "vscode": { - "extensions": ["Orta.vscode-jest", "esbenp.prettier-vscode"] - } - } -} diff --git a/.github/workflows/v2-automated-testing.yml b/.github/workflows/v2-automated-testing.yml new file mode 100644 index 0000000..f3b8850 --- /dev/null +++ b/.github/workflows/v2-automated-testing.yml @@ -0,0 +1,82 @@ +--- +name: DataDog Service Catalog +on: + push: + workflow_dispatch: + +jobs: + automated-testing: + runs-on: ubuntu-latest + steps: + + - name: Get current date + id: date + run: echo "date=$(date +'%Y-%m-%d -- %s')" >> $GITHUB_OUTPUT + + - name: Checkout + uses: actions/checkout@v3 + + - name: Run CI Tests + env: + DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }} + DATADOG_APPLICATION_KEY: ${{ secrets.DATADOG_APPLICATION_KEY }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + npm install + npm run test + + - name: Datadog Service Catalog Metadata Provider Manual Test + uses: ./ + with: + schema-version: v2 + github-token: ${{ secrets.GITHUB_TOKEN }} + datadog-hostname: api.us5.datadoghq.com + datadog-key: ${{ secrets.DATADOG_API_KEY }} + datadog-app-key: ${{ secrets.DATADOG_APPLICATION_KEY }} + service-name: v2-automated-test + team: Team Name Here + email: team-name-here@fakeemaildomainthatdoesntexist.com + slack-support-channel: 'https://fakeorg.slack.com/archives/A0000000000' + repo: https://github.com/arcxp/datadog-service-catalog-metadata-provider + contacts: | + - name: Testy McTester + type: email + contact: testy@mctester.com + tags: | + - schema-version: v2 + - 'application:GitHub Action Config Test' + - intended-env:prod + - infrastructure:serverless + - language:nodejs + - updated:${{ steps.date.outputs.date }} + - internet_accessible: false + repos: | + - name: .github + url: https://github.com/arcxp/.github + provider: github + links: | + - url: https://github.com/actions/toolkit + type: repo + provider: Github + name: '@actions/toolkit' + - name: AMI Version Status Dashboard + url: https://thisisanentirelyfakeurl.seriouslythisisafakehostname.com/dashboard + type: dashboard + - name: GitHub Actions! + url: https://github.com/features/actions + provider: Github Docs + type: doc + - name: Some Runbook + url: https://thisisanentirelyfakeurl.seriouslythisisafakehostname.com/runbook + type: runbook + provider: Confluence + docs: | + - name: Some Docs + url: https://thisisanentirelyfakeurl.seriouslythisisafakehostname.com/docs + provider: Confluence + - name: Datadog Blog + url: https://www.datadoghq.com/blog/ + integrations: | + opsgenie: + service-url: https://yourorghere.app.opsgenie.com/service/00000000-0000-0000-0000-000000000000 + region: US diff --git a/.github/workflows/v2.1-automated-testing.yml b/.github/workflows/v2.1-automated-testing.yml new file mode 100644 index 0000000..37f6e45 --- /dev/null +++ b/.github/workflows/v2.1-automated-testing.yml @@ -0,0 +1,79 @@ +--- +name: DataDog Service Catalog +on: + push: + workflow_dispatch: + +jobs: + automated-testing: + runs-on: ubuntu-latest + steps: + + - name: Get current date + id: date + run: echo "date=$(date +'%Y-%m-%d -- %s')" >> $GITHUB_OUTPUT + + - name: Checkout + uses: actions/checkout@v3 + + - name: Run CI Tests + env: + DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }} + DATADOG_APPLICATION_KEY: ${{ secrets.DATADOG_APPLICATION_KEY }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + npm install + npm run test + + - name: Datadog Service Catalog Metadata Provider Manual Test + uses: ./ + with: + schema-version: v2.1 + github-token: ${{ secrets.GITHUB_TOKEN }} + datadog-hostname: api.us5.datadoghq.com + datadog-key: ${{ secrets.DATADOG_API_KEY }} + datadog-app-key: ${{ secrets.DATADOG_APPLICATION_KEY }} + service-name: v2.1-automated-test + application: Datadog Service Catalog Metadata Provider + description: This is a test of the Datadog Service Catalog Metadata Provider + lifecycle: production + tier: p3 + team: Team Name Here + email: team-name-here@fakeemaildomainthatdoesntexist.com + slack-support-channel: 'https://fakeorg.slack.com/archives/A0000000000' + repo: https://github.com/arcxp/datadog-service-catalog-metadata-provider + tags: | + - schema-version: v2.1 + - 'application:GitHub Action Config Test' + - intended-env:prod + - infrastructure:serverless + - language:nodejs + - updated:${{ steps.date.outputs.date }} + - internet_accessible: false + links: | + - url: https://github.com/actions/toolkit + type: repo + provider: Github + name: '@actions/toolkit' + - name: AMI Version Status Dashboard + url: https://thisisanentirelyfakeurl.seriouslythisisafakehostname.com/dashboard + type: dashboard + - name: GitHub Actions! + url: https://github.com/features/actions + provider: Github Docs + type: doc + - name: Some Runbook + url: https://thisisanentirelyfakeurl.seriouslythisisafakehostname.com/runbook + type: runbook + provider: Confluence + - name: Some Docs + url: https://thisisanentirelyfakeurl.seriouslythisisafakehostname.com/docs + type: doc + provider: Confluence + - name: Datadog Blog + type: other + url: https://www.datadoghq.com/blog/ + integrations: | + opsgenie: + service-url: https://yourorghere.app.opsgenie.com/service/00000000-0000-0000-0000-000000000000 + region: US diff --git a/.github/workflows/automated-testing.yml b/.github/workflows/v2.2-automated-testing.yml similarity index 97% rename from .github/workflows/automated-testing.yml rename to .github/workflows/v2.2-automated-testing.yml index 5e762ff..a8db6a0 100644 --- a/.github/workflows/automated-testing.yml +++ b/.github/workflows/v2.2-automated-testing.yml @@ -33,7 +33,7 @@ jobs: datadog-hostname: api.us5.datadoghq.com datadog-key: ${{ secrets.DATADOG_API_KEY }} datadog-app-key: ${{ secrets.DATADOG_APPLICATION_KEY }} - service-name: datadog-service-catalog-metadata-provider-test + service-name: v2.2-automated-test application: Datadog Service Catalog Metadata Provider description: This is a test of the Datadog Service Catalog Metadata Provider lifecycle: production @@ -54,6 +54,7 @@ jobs: - language:nodejs - updated:${{ steps.date.outputs.date }} - internet_accessible: false + - schema-version: 2.2 links: | - url: https://github.com/actions/toolkit type: repo diff --git a/README.md b/README.md index 5730600..7fdb17b 100644 --- a/README.md +++ b/README.md @@ -86,7 +86,7 @@ Using a field which is not supported in the schema version you've selected will | `repos[].name` | The name of the repository. | Yes | | `v2` | | `repos[].url` | The URL of the repository. | Yes | | `v2` | | `repos[].provider` | The provider of the repository. Acceptable values are: `Github`. | No | | `v2` | -| `tags` | The list of tags that are associated with the service. This should be a list of key-value pairs separated by colons. | No | | +| `tags` | The list of tags that are associated with the service. This should be a list of key-value pairs separated by colons. | No | `v2`, `v2.1`, `v2.2` | | `links` | A list of links associated with the service. These links are objects with a variety of properties, but due to the limitations of GitHub Actions, please supply these object properties as a multi-line string. | No | `[]` | `v2`, `v2.1`, `v2.2` | | `links[].name` | The name of the link. | Yes | | `v2`, `v2.1`, `v2.2` | | `links[].url` | The URL of the link. | Yes | | `v2`, `v2.1`, `v2.2` | diff --git a/__tests__/lib/fieldMappings-convenience.test.cjs b/__tests__/lib/fieldMappings-convenience.test.cjs index e2f56c9..1441296 100644 --- a/__tests__/lib/fieldMappings-convenience.test.cjs +++ b/__tests__/lib/fieldMappings-convenience.test.cjs @@ -2,7 +2,7 @@ * @fileoverview This test covers all of the field mappings across versions. * @jest-environment node * @group ci - * @author Mike Stemle + * @author Mike Stemle **/ const core = require('@actions/core') diff --git a/__tests__/lib/fieldMappings-schema.test.cjs b/__tests__/lib/fieldMappings-schema.test.cjs index 6420998..993736d 100644 --- a/__tests__/lib/fieldMappings-schema.test.cjs +++ b/__tests__/lib/fieldMappings-schema.test.cjs @@ -2,7 +2,7 @@ * @fileoverview This test covers all of the field mappings across versions. * @jest-environment node * @group ci - * @author Mike Stemle + * @author Mike Stemle **/ const core = require('@actions/core') diff --git a/__tests__/lib/fieldMappings.test.cjs b/__tests__/lib/fieldMappings.test.cjs index 377aecb..8e3e16b 100644 --- a/__tests__/lib/fieldMappings.test.cjs +++ b/__tests__/lib/fieldMappings.test.cjs @@ -2,7 +2,7 @@ * @fileoverview This test covers all of the field mappings across versions. * @jest-environment node * @group ci - * @author Mike Stemle + * @author Mike Stemle **/ const core = require('@actions/core') @@ -34,4 +34,4 @@ describe('constants', () => { core.setFailed.mockReset() core.setFailed.mockClear() }) -}) \ No newline at end of file +}) diff --git a/__tests__/lib/input-to-registry-document.test.cjs b/__tests__/lib/input-to-registry-document.test.cjs index 8068e04..ad90b4c 100644 --- a/__tests__/lib/input-to-registry-document.test.cjs +++ b/__tests__/lib/input-to-registry-document.test.cjs @@ -2,7 +2,7 @@ * Tests. * @jest-environment node * @group ci - * @author Mike Stemle + * @author Mike Stemle **/ const YAML = require('yaml') diff --git a/__tests__/self-workflow-validation.test.cjs b/__tests__/self-workflow-validation.test.cjs index 25a8135..51e736f 100644 --- a/__tests__/self-workflow-validation.test.cjs +++ b/__tests__/self-workflow-validation.test.cjs @@ -2,7 +2,7 @@ const path = require('path') process.env.GITHUB_EVENT_PATH = path.join( __dirname, - './data/github-context-payload.json' + './data/github-context-payload.json', ) process.env.GITHUB_REPOSITORY = 'arcxp/datadog-service-catalog-metadata-provider' @@ -34,20 +34,79 @@ const { } = require('../lib/org-rules') const Ajv = require('ajv') -const ddSchema_v2_2 = require('./data/datadog-service-catalog-schema-v2.2.json') -const validate_v2_2 = new Ajv({ - strict: false, - validateFormats: false, -}).compile(ddSchema_v2_2) -describe('Read and validate the automated testing workflow', () => { +describe('Validate for schema v2', () => { + const ddSchema_v2 = require('./data/datadog-service-catalog-schema-v2.json') + const validate_v2 = new Ajv({ + strict: false, + validateFormats: false, + }).compile(ddSchema_v2) + + test('read and validate workflow', async () => { + const workflowContent = await readFile( + '.github/workflows/v2-automated-testing.yml', + { encoding: 'utf8' }, + ) + const parsedWorkflow = _.last( + YAML.parse(workflowContent)?.jobs?.['automated-testing']?.steps, + )?.with + + core.__setInputsObject(parsedWorkflow) + const serviceDefinition = await inputsToRegistryDocument() + + console.log({ parsedWorkflow, serviceDefinition }) + const isValid = validate_v2(serviceDefinition) + if (!isValid) { + console.log(validate_v2.errors) + console.log(validate_v2) + } + expect(isValid).toBeTruthy() + }) +}) + +describe('Validate for schema v2.1', () => { + const ddSchema_v2_1 = require('./data/datadog-service-catalog-schema-v2.1.json') + const validate_v2_1 = new Ajv({ + strict: false, + validateFormats: false, + }).compile(ddSchema_v2_1) + + test('read and validate workflow', async () => { + const workflowContent = await readFile( + '.github/workflows/v2.1-automated-testing.yml', + { encoding: 'utf8' }, + ) + const parsedWorkflow = _.last( + YAML.parse(workflowContent)?.jobs?.['automated-testing']?.steps, + )?.with + + core.__setInputsObject(parsedWorkflow) + const serviceDefinition = await inputsToRegistryDocument() + + console.log({ parsedWorkflow, serviceDefinition }) + const isValid = validate_v2_1(serviceDefinition) + if (!isValid) { + console.log(validate_v2_1.errors) + console.log(validate_v2_1) + } + expect(isValid).toBeTruthy() + }) +}) + +describe('Validate for schema v2.2', () => { + const ddSchema_v2_2 = require('./data/datadog-service-catalog-schema-v2.2.json') + const validate_v2_2 = new Ajv({ + strict: false, + validateFormats: false, + }).compile(ddSchema_v2_2) + test('read and validate workflow', async () => { const workflowContent = await readFile( - '.github/workflows/automated-testing.yml', - { encoding: 'utf8' } + '.github/workflows/v2.2-automated-testing.yml', + { encoding: 'utf8' }, ) const parsedWorkflow = _.last( - YAML.parse(workflowContent)?.jobs?.['automated-testing']?.steps + YAML.parse(workflowContent)?.jobs?.['automated-testing']?.steps, )?.with core.__setInputsObject(parsedWorkflow) diff --git a/action.yml b/action.yml index e27ce70..2cbcc2e 100644 --- a/action.yml +++ b/action.yml @@ -1,7 +1,7 @@ --- name: 'Datadog Service Catalog Metadata Provider' description: 'This is an action which allows you to provide your Datadog Service Catalog metadata to the Datadog Service Catalog' -author: 'Mike Stemle ' +author: 'Mike Stemle ' branding: icon: 'monitor' color: 'purple' diff --git a/lib/fieldMappings.cjs b/lib/fieldMappings.cjs index cdce219..70df7e2 100644 --- a/lib/fieldMappings.cjs +++ b/lib/fieldMappings.cjs @@ -1,7 +1,7 @@ /** * @file This file contains the mappings between the GitHub Actions inputs and the Datadog Service Catalog schema. * @module lib/fieldMappings - * @author Mike Stemle + * @author Mike Stemle **/ const core = require('@actions/core') @@ -76,8 +76,8 @@ const versionCompatibilityError = (field, chosenVersion, validVersions) => (_input) => core.setFailed( `Sorry, but the «${field}» field is not avaiable in version ${chosenVersion} of the Datadog Service Catalog schema; this field is only available in version(s): ${validVersions.join( - ',' - )}` + ',', + )}`, ) /** @@ -90,15 +90,15 @@ const versionCompatibilityError = const mappings = { 'schema-version': useSharedMappings( ['v2', 'v2.1', 'v2.2'], - mapToUsing('schema-version', (input, value) => ({ + mapToUsing('schema-version', (_, value) => ({ // We default to `v2` because later versions should specify the schema version. 'schema-version': value ?? 'v2', - })) + })), ), 'service-name': useSharedMappings( ['v2', 'v2.1', 'v2.2'], - mapToUsing('dd-service', passThru) + mapToUsing('dd-service', passThru), ), team: useSharedMappings(['v2', 'v2.1', 'v2.2'], mapToUsing('team', passThru)), @@ -108,7 +108,7 @@ const mappings = { { v2: versionCompatibilityError('application', 'v2', ['v2.1']), }, - useSharedMappings(['v2.1', 'v2.2'], mapToUsing('application', passThru)) + useSharedMappings(['v2.1', 'v2.2'], mapToUsing('application', passThru)), ), // New in v2.1 @@ -116,7 +116,7 @@ const mappings = { { v2: versionCompatibilityError('description', 'v2', ['v2.1']), }, - useSharedMappings(['v2.1', 'v2.2'], mapToUsing('description', passThru)) + useSharedMappings(['v2.1', 'v2.2'], mapToUsing('description', passThru)), ), // New in v2.1 @@ -124,7 +124,7 @@ const mappings = { { v2: versionCompatibilityError('tier', 'v2', ['v2.1']), }, - useSharedMappings(['v2.1', 'v2.2'], mapToUsing('tier', passThru)) + useSharedMappings(['v2.1', 'v2.2'], mapToUsing('tier', passThru)), ), // New in v2.1 @@ -132,7 +132,7 @@ const mappings = { { v2: versionCompatibilityError('lifecycle', 'v2', ['v2.1']), }, - useSharedMappings(['v2.1', 'v2.2'], mapToUsing('lifecycle', passThru)) + useSharedMappings(['v2.1', 'v2.2'], mapToUsing('lifecycle', passThru)), ), // New in v2.2 @@ -151,7 +151,7 @@ const mappings = { contacts: useSharedMappings( ['v2', 'v2.1', 'v2.2'], - mapToUsing('contacts', arrayYamlParse) + mapToUsing('contacts', arrayYamlParse), ), links: Object.assign( @@ -159,13 +159,13 @@ const mappings = { v2: (input) => ({ links: forceArray(expandObjectInputs(input)).map((x) => // v2 doesn't have a provider field - _.omit(x, ['provider']) + _.omit(x, ['provider']), ), }), }, useSharedMappings(['v2.1', 'v2.2'], (input) => ({ links: forceArray(expandObjectInputs(input)), - })) + })), ), // This tags setup is a little hairy, but the biggest thing @@ -179,17 +179,17 @@ const mappings = { _.head(_.toPairs(entry)).map((x) => // This check is so that we trim strings, but don't break // numbers or boolean values. - typeof x === 'string' ? x.trim() : x + typeof x === 'string' ? x.trim() : x, ), - ':' + ':', ) - : entry + : entry, ), })), integrations: useSharedMappings( ['v2', 'v2.1', 'v2.2'], - mapToUsing('integrations', objectYamlParse) + mapToUsing('integrations', objectYamlParse), ), docs: Object.assign( @@ -198,8 +198,8 @@ const mappings = { }, useSharedMappings( ['v2.1', 'v2.2'], - versionCompatibilityError('docs', 'v2.1', ['v2']) - ) + versionCompatibilityError('docs', 'v2.1', ['v2']), + ), ), repos: Object.assign( @@ -208,8 +208,8 @@ const mappings = { }, useSharedMappings( ['v2.1', 'v2.2'], - versionCompatibilityError('repos', 'v2.1', ['v2']) - ) + versionCompatibilityError('repos', 'v2.1', ['v2']), + ), ), 'ci-pipeline-fingerprints': { @@ -222,7 +222,7 @@ const mappings = { extensions: useSharedMappings( ['v2', 'v2.1', 'v2.2'], - mapToUsing('extensions', simpleYamlParse) + mapToUsing('extensions', simpleYamlParse), ), } Object.freeze(mappings) @@ -261,16 +261,16 @@ const convenienceMappings = { incorporateConvenienceMapping( { contact: input, type: 'email' }, doc, - 'contacts' - ) + 'contacts', + ), ), slack: useSharedMappings(['v2', 'v2.1', 'v2.2'], (input, doc) => incorporateConvenienceMapping( { contact: input, type: 'slack' }, doc, - 'contacts' - ) + 'contacts', + ), ), // These fields map into `repos` list in the registry document for v2, and into the `links` list in the registry document for v2.1. @@ -280,16 +280,16 @@ const convenienceMappings = { incorporateConvenienceMapping( { name: 'Repo', url: input }, doc, - 'repos' + 'repos', ), }, useSharedMappings(['v2.1', 'v2.2'], (input, doc) => incorporateConvenienceMapping( { name: 'Repo', type: 'repo', url: input }, doc, - 'links' - ) - ) + 'links', + ), + ), ), // These fields map into `integrations` in the registry document. @@ -297,8 +297,8 @@ const convenienceMappings = { incorporateConvenienceMappingToObject( { opsgenie: { 'service-url': input } }, doc, - 'integrations' - ) + 'integrations', + ), ), pagerduty: Object.assign( { @@ -306,16 +306,16 @@ const convenienceMappings = { incorporateConvenienceMappingToObject( { pagerduty: input }, doc, - 'integrations' + 'integrations', ), }, useSharedMappings(['v2.1', 'v2.2'], (input, doc) => incorporateConvenienceMappingToObject( { pagerduty: { 'service-url': input } }, doc, - 'integrations' - ) - ) + 'integrations', + ), + ), ), } convenienceMappings['slack-support-channel'] = convenienceMappings.slack diff --git a/lib/input-expander.cjs b/lib/input-expander.cjs index 70f00e4..eb21cfe 100644 --- a/lib/input-expander.cjs +++ b/lib/input-expander.cjs @@ -4,7 +4,7 @@ * @file This module contains all of the functions which expand scalar * input from GitHub Actions into hydrated structures for the DD API. * - * @author Mike Stemle + * @author Mike Stemle */ // This is my little debug function. Please don't remove. @@ -94,7 +94,7 @@ const _deserializeNestedStrings = (input) => { return Object.assign( ...Object.keys(input).map((x) => ({ [x]: deserializeNestedStrings(input[x]), - })) + })), ) } } diff --git a/lib/input-to-registry-document.cjs b/lib/input-to-registry-document.cjs index 50da0a8..b3293dc 100644 --- a/lib/input-to-registry-document.cjs +++ b/lib/input-to-registry-document.cjs @@ -5,7 +5,7 @@ * @requires path * @requires @actions/core * @requires lodash - * @author Mike Stemle + * @author Mike Stemle **/ const fs = require('fs') diff --git a/lib/input-validation.cjs b/lib/input-validation.cjs index 0baf53f..050ead0 100644 --- a/lib/input-validation.cjs +++ b/lib/input-validation.cjs @@ -2,7 +2,7 @@ * @file This file contains the input validation functions. * @module lib/input-validation * @see lib/input-expander - * @author Mike Stemle + * @author Mike Stemle **/ /** diff --git a/lib/org-rules.cjs b/lib/org-rules.cjs index 95852b3..9b3802c 100644 --- a/lib/org-rules.cjs +++ b/lib/org-rules.cjs @@ -1,7 +1,7 @@ /** * @file This file contains the functions which apply the organization rules to service catalog entries. * @module lib/org-rules - * @author Mike Stemle + * @author Mike Stemle **/ const YAML = require('yaml') @@ -31,12 +31,12 @@ const ghHandle = async (token = undefined) => Promise.resolve() .then( () => - token || core.getInput('github-token') || process.env['GITHUB_TOKEN'] + token || core.getInput('github-token') || process.env['GITHUB_TOKEN'], ) .then((token) => !!token && token.length > 0 ? github.getOctokit(token, {}) - : core.warning('No GitHub token found, org rules cannot be applied.') + : core.warning('No GitHub token found, org rules cannot be applied.'), ) /** @@ -53,7 +53,7 @@ const currentOrg = (gh = undefined) => { } catch (err) { core.error(`Unable to determine current organization or owner: ${err}`) core.setFailed( - 'This GitHub Actions environment does not have a valid context.' + 'This GitHub Actions environment does not have a valid context.', ) } @@ -71,7 +71,7 @@ const currentOrg = (gh = undefined) => { */ const fetchRemoteRules = async ( gh = undefined, - rulesFileName = DEFAULT_RULES_NAME + rulesFileName = DEFAULT_RULES_NAME, ) => { try { const octokit = gh ?? (await ghHandle()) @@ -104,7 +104,7 @@ const fetchRemoteRules = async ( }) if (!data) { core.debug( - `The Org Rules File "${rulesFileName}" in the «${orgName}/.github» repository appears to contain no content.` + `The Org Rules File "${rulesFileName}" in the «${orgName}/.github» repository appears to contain no content.`, ) return defaultPayload } @@ -115,21 +115,21 @@ const fetchRemoteRules = async ( `Org Rules File "${rulesFileName}" contents: ${JSON.stringify( data, undefined, - 2 - )}` + 2, + )}`, ) } // Start parsing the rules file. const orgRulesFileContents = YAML.parse( decodeURIComponent( - Buffer.from(data.content, data.encoding ?? 'base64').toString() - ) + Buffer.from(data.content, data.encoding ?? 'base64').toString(), + ), ) if (!orgRulesFileContents || typeof orgRulesFileContents !== 'object') { return core.setFailed( - `Org Rules File "${rulesFileName}" failed to parse.` + `Org Rules File "${rulesFileName}" failed to parse.`, ) } @@ -138,7 +138,7 @@ const fetchRemoteRules = async ( orgName?.toLocaleLowerCase() ) { return core.warning( - `Org ${orgName} does not match the org in the Org Rules File. This isn't fatal, but it might be an indication that you're using the wrong Org Rules File.` + `Org ${orgName} does not match the org in the Org Rules File. This isn't fatal, but it might be an indication that you're using the wrong Org Rules File.`, ) } @@ -165,7 +165,7 @@ const selectionForTags = (tags, serviceDefinition) => { for (const [tagKey, tagValue] of Object.entries(tags)) { const foundTag = serviceDefinition?.tags?.find((tag) => - _.startsWith(tag.toLocaleLowerCase(), tagKey.toLocaleLowerCase()) + _.startsWith(tag.toLocaleLowerCase(), tagKey.toLocaleLowerCase()), ) || undefined if (!foundTag) { @@ -192,7 +192,7 @@ const caseSensitiveFieldListMatch = } if (!Array.isArray(value)) { core.warning( - `Invalid value for ${fieldName}: ${value}; this should be either 'all' or an array of acceptable values.` + `Invalid value for ${fieldName}: ${value}; this should be either 'all' or an array of acceptable values.`, ) } @@ -247,8 +247,8 @@ const determineApplicabilityOfRule = (rule, serviceDescription) => { if (!selectionCheckers[key]) { throw new Error( `Field "${key}" is not a valid for selection criteria, only ${selectableFields.join( - ', ' - )} are supported.` + ', ', + )} are supported.`, ) } @@ -269,7 +269,7 @@ const determineApplicabilityOfRule = (rule, serviceDescription) => { */ const makeComplianceCheck_countOnly = ( locationOfCountInRequirement, - locationOfCountInServiceDefinition + locationOfCountInServiceDefinition, ) => { const countField_req = (requirement) => _.get(requirement, locationOfCountInRequirement, undefined) @@ -304,7 +304,7 @@ const makeComplianceCheck_valueMatchAndCount = ( locationOfCountInRequirement, locationOfFieldListInServiceDefinition, locationOfMatchInField, - locationOfCountInServiceDefinition + locationOfCountInServiceDefinition, ) => { const matchField_req = (requirement) => _.get(requirement, locationOfMatchInRequirement, undefined) @@ -314,7 +314,7 @@ const makeComplianceCheck_valueMatchAndCount = ( _.get(field, locationOfMatchInField, undefined) const countEnforcer = makeComplianceCheck_countOnly( locationOfCountInRequirement, - locationOfCountInServiceDefinition + locationOfCountInServiceDefinition, ) return (requirement, serviceDefinition) => { @@ -330,7 +330,7 @@ const makeComplianceCheck_valueMatchAndCount = ( if ( permittedValues.length && !sd_list.find((value) => - _.includes(permittedValues, matchField_sd(value).toLocaleLowerCase()) + _.includes(permittedValues, matchField_sd(value).toLocaleLowerCase()), ) ) { return false @@ -393,14 +393,14 @@ const checkTagsCompliance = (requirement, serviceDescription) => { ...(serviceDescription?.tags || []).map((tag) => { const [tagKey, tagValue] = tag.toLocaleLowerCase().split(/\s*?:\s*?/) return { [tagKey]: tagValue } - }) + }), ) const nonCompliantTags = _.filter(requiredTags, (tagObj) => { const [tagName, tagValue] = _.head(Object.entries(tagObj)) validTagValues = (Array.isArray(tagValue) ? tagValue : [tagValue]).map( - (x) => (typeof x === 'string' ? x.toLocaleLowerCase() : x) + (x) => (typeof x === 'string' ? x.toLocaleLowerCase() : x), ) return tagValue === 'ANY' @@ -489,21 +489,21 @@ const determineRuleCompliance = (rule, serviceDescription) => { 'count', 'links', 'type', - 'links.length' + 'links.length', ), docs: makeComplianceCheck_valueMatchAndCount( 'provider', 'count', 'docs', 'provider', - 'docs.length' + 'docs.length', ), contacts: makeComplianceCheck_valueMatchAndCount( 'type', 'count', 'contacts', 'type', - 'contacts.length' + 'contacts.length', ), repos: makeComplianceCheck_countOnly('count', 'repos.length'), } @@ -512,7 +512,7 @@ const determineRuleCompliance = (rule, serviceDescription) => { _.each(rule?.requirements, (reqValue, reqName) => { if (!complianceCheckers[reqName]) { throw new Error( - `Field "${reqName}" is not available for requirements. Please see the documentation for available fields.` + `Field "${reqName}" is not available for requirements. Please see the documentation for available fields.`, ) } @@ -526,8 +526,8 @@ const determineRuleCompliance = (rule, serviceDescription) => { `Fields out of compliance: ${JSON.stringify( fieldsOutOfCompliance, undefined, - 2 - )}` + 2, + )}`, ) return fieldsOutOfCompliance.length === 0 @@ -547,7 +547,7 @@ const determineRuleCompliance = (rule, serviceDescription) => { const applyOrgRules = (serviceDescription, orgRules) => { const rules = orgRules?.rules || [] const brokenRules = rules.filter( - (rule) => !determineRuleCompliance(rule, serviceDescription) + (rule) => !determineRuleCompliance(rule, serviceDescription), ) if (brokenRules.length > 0) { @@ -555,8 +555,8 @@ const applyOrgRules = (serviceDescription, orgRules) => { `The service description violates the following rules: ${JSON.stringify( brokenRules, undefined, - 2 - )}` + 2, + )}`, ) return false } @@ -579,8 +579,8 @@ const fetchAndApplyOrgRules = (serviceDescription) => .then((gh) => fetchRemoteRules( gh, - core.getInput('org-rules-file') || DEFAULT_RULES_NAME - ) + core.getInput('org-rules-file') || DEFAULT_RULES_NAME, + ), ) .then((remoteOrgRules) => { if (!remoteOrgRules) { @@ -590,8 +590,8 @@ const fetchAndApplyOrgRules = (serviceDescription) => `Rules found for the organization "${currentOrg()}": ${JSON.stringify( remoteOrgRules, undefined, - 2 - )}` + 2, + )}`, ) } return remoteOrgRules @@ -600,8 +600,8 @@ const fetchAndApplyOrgRules = (serviceDescription) => Promise.resolve( !!remoteOrgRules ? applyOrgRules(serviceDescription, remoteOrgRules) - : true - ) + : true, + ), ) .catch((err) => { core.warning('Failing with error: ' + err)