diff --git a/.github/workflows/api-dotnetcore.yml b/.github/workflows/api-dotnetcore.yml
index 0249fc5f72..7201b33fbc 100644
--- a/.github/workflows/api-dotnetcore.yml
+++ b/.github/workflows/api-dotnetcore.yml
@@ -9,7 +9,7 @@ on:
jobs:
# JOB to run change detection
check-changes:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
# Set job outputs to values from filter step
outputs:
backend: ${{ steps.filter.outputs.backend }}
@@ -24,11 +24,19 @@ jobs:
build-backend:
needs: check-changes
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
if: ${{ needs.check-changes.outputs.backend == 'true' }}
strategy:
matrix:
- services: [{directory: ./source/backend/api, solution: 'Pims.sln'}, {directory: ./source/backend/proxy, solution: 'Proxy.sln'}]
+ services:
+ [
+ { directory: ./source/backend/api, solution: "Pims.sln" },
+ { directory: ./source/backend/proxy, solution: "Proxy.sln" },
+ {
+ directory: ./source/backend/scheduler,
+ solution: "Scheduler.sln",
+ },
+ ]
env:
working-directory: ${{ matrix.services.directory }}
solution-name: ${{ matrix.services.solution }}
@@ -147,7 +155,7 @@ jobs:
sonarHostname: ${{secrets.SONAR_URL}}
post-build:
needs: build-backend
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
working-directory: ./source/backend
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/app-logging.yml b/.github/workflows/app-logging.yml
index da0d235d23..a98977ee74 100644
--- a/.github/workflows/app-logging.yml
+++ b/.github/workflows/app-logging.yml
@@ -6,14 +6,11 @@ on:
pull_request:
branches: [master, test, dev]
-
jobs:
-
build:
-
name: build-logging
if: github.event.ref == 'refs/heads/master' || github.event.ref == 'refs/heads/test' || github.event.ref == 'refs/heads/dev'
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
SLEEP_TIME: 60
STORAGE_TYPE: Amazon_S3
@@ -32,44 +29,44 @@ jobs:
working-directory: ./openshift/4.0/templates/Logging
steps:
- - uses: actions/checkout@v4
- - name: Set ENV variable
- run: |
- if [[ ${{github.event.ref}} == 'refs/heads/test' ]]; then
- echo "FRONTEND_APP_NAME=pims-app-test" >> $GITHUB_ENV
- echo "API_NAME=pims-api-test" >> $GITHUB_ENV
- elif [[ ${{github.event.ref}} == 'refs/heads/master' ]]; then
- echo "FRONTEND_APP_NAME=pims-app-uat" >> $GITHUB_ENV
- echo "API_NAME=pims-api-uat" >> $GITHUB_ENV
- echo "PROJECT_NAMESPACE=3cd915-test" >> $GITHUB_ENV
- else
- echo "FRONTEND_APP_NAME=pims-app" >> $GITHUB_ENV
- echo "API_NAME=pims-api" >> $GITHUB_ENV
- fi
- - name: Build the pims-logging docker-compose stack
- run: docker-compose -f docker-compose.yml up -d
- working-directory: ${{env.working-directory}}
- - name: Sleep for 180 seconds
- uses: jakejarvis/wait-action@master
- with:
- time: '180s'
- - name: Check Extracted Logs
- run: |
- docker cp pims-logging:/logging/. .
- exitcode=$(docker inspect pims-logging --format='{{.State.ExitCode}}')
- if [[ "$(ls -A pims* 2>/dev/null | wc -l)" != "0" ]]; then
- ls -A pims* && rm -f pims*
- elif [[ $exitcode == 0 ]]; then
- echo "Info: No log captured between sleep time"
- else
- echo "There's an error capturing pims logs" && exit 1
- fi
- - name: Check running containers
- run: docker ps -a
- - name: Check pims-logging logs
- if: always()
- run: docker logs pims-logging
- - name: Stop containers
- if: always()
- run: docker-compose -f "docker-compose.yml" down
- working-directory: ${{env.working-directory}}
+ - uses: actions/checkout@v4
+ - name: Set ENV variable
+ run: |
+ if [[ ${{github.event.ref}} == 'refs/heads/test' ]]; then
+ echo "FRONTEND_APP_NAME=pims-app-test" >> $GITHUB_ENV
+ echo "API_NAME=pims-api-test" >> $GITHUB_ENV
+ elif [[ ${{github.event.ref}} == 'refs/heads/master' ]]; then
+ echo "FRONTEND_APP_NAME=pims-app-uat" >> $GITHUB_ENV
+ echo "API_NAME=pims-api-uat" >> $GITHUB_ENV
+ echo "PROJECT_NAMESPACE=3cd915-test" >> $GITHUB_ENV
+ else
+ echo "FRONTEND_APP_NAME=pims-app" >> $GITHUB_ENV
+ echo "API_NAME=pims-api" >> $GITHUB_ENV
+ fi
+ - name: Build the pims-logging docker-compose stack
+ run: docker-compose -f docker-compose.yml up -d
+ working-directory: ${{env.working-directory}}
+ - name: Sleep for 180 seconds
+ uses: jakejarvis/wait-action@master
+ with:
+ time: "180s"
+ - name: Check Extracted Logs
+ run: |
+ docker cp pims-logging:/logging/. .
+ exitcode=$(docker inspect pims-logging --format='{{.State.ExitCode}}')
+ if [[ "$(ls -A pims* 2>/dev/null | wc -l)" != "0" ]]; then
+ ls -A pims* && rm -f pims*
+ elif [[ $exitcode == 0 ]]; then
+ echo "Info: No log captured between sleep time"
+ else
+ echo "There's an error capturing pims logs" && exit 1
+ fi
+ - name: Check running containers
+ run: docker ps -a
+ - name: Check pims-logging logs
+ if: always()
+ run: docker logs pims-logging
+ - name: Stop containers
+ if: always()
+ run: docker-compose -f "docker-compose.yml" down
+ working-directory: ${{env.working-directory}}
diff --git a/.github/workflows/app-react.yml b/.github/workflows/app-react.yml
index 87453e04f3..7ec1afe7d7 100644
--- a/.github/workflows/app-react.yml
+++ b/.github/workflows/app-react.yml
@@ -9,7 +9,7 @@ on:
jobs:
# JOB to run change detection
check-changes:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
# Set job outputs to values from filter step
outputs:
frontend: ${{ steps.filter.outputs.frontend }}
@@ -25,7 +25,7 @@ jobs:
build-frontend:
needs: check-changes
if: ${{ needs.check-changes.outputs.frontend == 'true' }}
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
CI: true
working-directory: ./source/frontend
diff --git a/.github/workflows/ci-cd-pims-dev.yml b/.github/workflows/ci-cd-pims-dev.yml
index 9a343c2439..fc879705e4 100644
--- a/.github/workflows/ci-cd-pims-dev.yml
+++ b/.github/workflows/ci-cd-pims-dev.yml
@@ -27,15 +27,12 @@ env:
DEPLOYMENT_NAMESPACE: "3cd915-dev"
on:
- pull_request_target:
- branches: [dev]
- types: [closed]
+ workflow_dispatch:
jobs:
ci-cd-start-notification:
- if: github.event.pull_request.merged == true
name: CI-CD Start Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Start notification to Teams Channel
uses: dragos-cojocari/ms-teams-notification@v1.0.2
@@ -49,7 +46,7 @@ jobs:
build-frontend:
name: Build frontend
needs: ci-cd-start-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -68,7 +65,7 @@ jobs:
build-api:
name: Build api
needs: ci-cd-start-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -83,11 +80,12 @@ jobs:
run: |
./openshift/4.0/player.sh build api -apply
./openshift/4.0/player.sh build proxy -apply
+ ./openshift/4.0/player.sh build scheduler -apply
deploy:
name: Deploy to OpenShift
needs: [build-frontend, build-api]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -116,6 +114,12 @@ jobs:
oc tag pims-proxy:latest-$DESTINATION pims-proxy:$DESTINATION
oc -n $DEPLOYMENT_NAMESPACE rollout restart deployment/pims-proxy-$DESTINATION
oc -n $DEPLOYMENT_NAMESPACE rollout status --timeout=600s deployment/pims-proxy-$DESTINATION
+ - name: Deploy scheduler microservice
+ shell: bash
+ run: |
+ oc tag pims-scheduler:latest-$DESTINATION pims-scheduler:$DESTINATION
+ oc -n $DEPLOYMENT_NAMESPACE rollout restart deployment/pims-scheduler-$DESTINATION
+ oc -n $DEPLOYMENT_NAMESPACE rollout status --timeout=600s deployment/pims-scheduler-$DESTINATION
- name: Deploy mayan
shell: bash
run: |
@@ -128,7 +132,7 @@ jobs:
database-upgrade:
name: Upgrade database
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -151,7 +155,7 @@ jobs:
mayan-sync:
name: sync mayan
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -173,7 +177,7 @@ jobs:
keycloak-sync:
name: sync keycloak
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -191,9 +195,8 @@ jobs:
ci-cd-end-notification:
name: CI-CD End Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: keycloak-sync
- if: ${{ always() && github.event.pull_request.merged == true }}
steps:
- name: check workflow status
uses: martialonline/workflow-status@v4
diff --git a/.github/workflows/codecov-comment-pr.yml b/.github/workflows/codecov-comment-pr.yml
index e7b73d6545..987102e947 100644
--- a/.github/workflows/codecov-comment-pr.yml
+++ b/.github/workflows/codecov-comment-pr.yml
@@ -10,7 +10,7 @@ on:
jobs:
upload:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
if: >
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
diff --git a/.github/workflows/credentials-comment-pr.yml b/.github/workflows/credentials-comment-pr.yml
index d214955d67..fb6a19921e 100644
--- a/.github/workflows/credentials-comment-pr.yml
+++ b/.github/workflows/credentials-comment-pr.yml
@@ -10,7 +10,7 @@ on:
jobs:
# this action will leave a comment in response to credential scans performed on pull requests
on-completed:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
if: github.event.workflow_run.event == 'pull_request'
steps:
- uses: actions/checkout@v4
diff --git a/.github/workflows/credentials-scan.yml b/.github/workflows/credentials-scan.yml
index 7ecfb6cfb5..409877e8ab 100644
--- a/.github/workflows/credentials-scan.yml
+++ b/.github/workflows/credentials-scan.yml
@@ -10,7 +10,7 @@ on:
jobs:
credentials-scan:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
diff --git a/.github/workflows/db-schma.yml b/.github/workflows/db-schma.yml
index 951b95de25..04fae0de53 100644
--- a/.github/workflows/db-schma.yml
+++ b/.github/workflows/db-schma.yml
@@ -11,7 +11,7 @@ on:
jobs:
build:
name: db-schema
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
permissions:
actions: read
contents: read
diff --git a/.github/workflows/deploy-prod-end.yml b/.github/workflows/deploy-prod-end.yml
index 12d22b02d2..6842681833 100644
--- a/.github/workflows/deploy-prod-end.yml
+++ b/.github/workflows/deploy-prod-end.yml
@@ -11,7 +11,7 @@ on:
jobs:
maintenance-page:
name: Hide the maintenance page
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -31,7 +31,7 @@ jobs:
ci-cd-end-notification:
name: CI-CD End Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: maintenance-page
steps:
- name: check workflow status
diff --git a/.github/workflows/deploy-prod-start-argo.yml b/.github/workflows/deploy-prod-start-argo.yml
index fc92bf5cbd..27694729d3 100644
--- a/.github/workflows/deploy-prod-start-argo.yml
+++ b/.github/workflows/deploy-prod-start-argo.yml
@@ -35,7 +35,7 @@ on:
jobs:
ci-cd-start-notification:
name: CI-CD Start Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Start notification to Teams Channel
uses: dragos-cojocari/ms-teams-notification@v1.0.2
@@ -48,7 +48,7 @@ jobs:
deploy:
name: Deploy frontend and api to OpenShift
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: ci-cd-start-notification
steps:
- name: Checkout Source Code
@@ -94,7 +94,7 @@ jobs:
database-upgrade:
name: Upgrade database
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -117,7 +117,7 @@ jobs:
mayan-sync:
name: sync mayan
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -139,7 +139,7 @@ jobs:
keycloak-sync:
name: sync keycloak
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
diff --git a/.github/workflows/deploy-prod-start.yml b/.github/workflows/deploy-prod-start.yml
index e72d6c11a7..f0a1170762 100644
--- a/.github/workflows/deploy-prod-start.yml
+++ b/.github/workflows/deploy-prod-start.yml
@@ -34,7 +34,7 @@ on:
jobs:
ci-cd-start-notification:
name: CI-CD Start Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Start notification to Teams Channel
uses: dragos-cojocari/ms-teams-notification@v1.0.2
@@ -47,7 +47,7 @@ jobs:
deploy:
name: Deploy frontend and api to OpenShift
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: ci-cd-start-notification
steps:
- name: Checkout Source Code
@@ -72,6 +72,8 @@ jobs:
run: |
[[ -z ${{github.event.inputs.OVERRIDE_VERSION}} ]] && RELEASE_VERSION=${{steps.previoustag.outputs.tag}}-master || RELEASE_VERSION=${{github.event.inputs.OVERRIDE_VERSION}}-master
RELEASE_TAG=$RELEASE_VERSION ./openshift/4.0/player.sh deploy api $DESTINATION -apply
+ RELEASE_TAG=$RELEASE_VERSION ./openshift/4.0/player.sh deploy proxy $DESTINATION -apply
+ RELEASE_TAG=$RELEASE_VERSION ./openshift/4.0/player.sh deploy scheduler $DESTINATION -apply
RELEASE_TAG=$RELEASE_VERSION ./openshift/4.0/player.sh deploy app $DESTINATION -apply
oc tag mayan-bcgov:$RELEASE_VERSION mayan-bcgov:master
@@ -82,7 +84,7 @@ jobs:
database-upgrade:
name: Upgrade database
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -105,7 +107,7 @@ jobs:
mayan-sync:
name: sync mayan
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -123,11 +125,11 @@ jobs:
oc process -f ./openshift/4.0/templates/jobs/mayan-sync.yaml -p NAMESPACE=3cd915-prod -p TOKEN_URL=https://loginproxy.gov.bc.ca:443/auth/realms/standard/protocol/openid-connect/token -p CLIENT_ID=property-services-project-api-4380 -p MAYAN_SYNC_URL=https://pims-app-3cd915-prod.apps.silver.devops.gov.bc.ca/documents/sync/documenttype -p KEYCLOAK_SECRET_NAME=pims-api-sso | oc create -f - | grep -oP "(?<=\/)[^\s]*" | (read TASK_NAME; oc wait --for=condition=succeeded taskruns/$TASK_NAME --timeout=80s)
oc process -f ./openshift/4.0/templates/jobs/mayan-sync.yaml -p NAMESPACE=3cd915-prod -p TOKEN_URL=https://loginproxy.gov.bc.ca:443/auth/realms/standard/protocol/openid-connect/token -p CLIENT_ID=property-services-project-api-4380 -p MAYAN_SYNC_URL=https://pims-app-3cd915-prod.apps.silver.devops.gov.bc.ca/api/documents/sync/mayan -p KEYCLOAK_SECRET_NAME=pims-api-sso | oc create -f - | grep -oP "(?<=\/)[^\s]*" | (read TASK_NAME; oc wait --for=condition=succeeded taskruns/$TASK_NAME --timeout=80s)
-## Call the tekton pipeline that executes the keycloak sync. Dependent on the pims-api being accessible. Can run in parallel with the mayan sync.
+ ## Call the tekton pipeline that executes the keycloak sync. Dependent on the pims-api being accessible. Can run in parallel with the mayan sync.
keycloak-sync:
name: sync keycloak
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -142,4 +144,3 @@ jobs:
shell: bash
run: |
oc process -f ./openshift/4.0/templates/jobs/keycloak-sync-pipeline-run.yaml -p ASPNETCORE_ENVIRONMENT=$ASPNETCORE_ENVIRONMENT -p NAMESPACE=3cd915-prod -p BRANCH=$DESTINATION -p API_URL=http://pims-api:8080/api | oc create -f - | grep -oP "(?<=\/)[^\s]*" | (read PIPELINE_NAME; oc wait --for=condition=succeeded pipelineruns/$PIPELINE_NAME --timeout=500s)
-
diff --git a/.github/workflows/image-scan-analysis.yml b/.github/workflows/image-scan-analysis.yml
index e31f88a92b..7d116332de 100644
--- a/.github/workflows/image-scan-analysis.yml
+++ b/.github/workflows/image-scan-analysis.yml
@@ -12,7 +12,7 @@ jobs:
nodejs-base-image:
name: nodejs-base-image
if: github.event.pull_request.head.repo.full_name == github.repository
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
permissions:
actions: read
contents: read
@@ -75,7 +75,7 @@ jobs:
# this action will leave a comment in response to vulnerability scans performed on cotnainer image
if: always() && needs.nodejs-base-image.result == 'failure' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
needs: nodejs-base-image
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
working-directory: ./source/frontend
image-name: nodejs-14-ubi8
@@ -145,7 +145,7 @@ jobs:
nginx-base-image:
name: nginx-base-image
if: github.event.pull_request.head.repo.full_name == github.repository
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
permissions:
actions: read
contents: read
@@ -205,7 +205,7 @@ jobs:
# this action will leave a comment in response to vulnerability scans performed on cotnainer image
if: always() && needs.nginx-base-image.result == 'failure' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
needs: nginx-base-image
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
working-directory: ./source/frontend
image-name: nginx-base
@@ -276,7 +276,7 @@ jobs:
if: always() && github.event.pull_request.head.repo.full_name == github.repository
needs: [nodejs-base-image, nginx-base-image]
name: pims-frontend
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
permissions:
actions: read
contents: read
@@ -338,7 +338,7 @@ jobs:
# this action will leave a comment in response to credential scans performed on pull requests
if: always() && needs.build_frontend.result == 'failure' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
needs: build_frontend
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
working-directory: ./source/frontend
image-name: pims-app
@@ -408,7 +408,7 @@ jobs:
aspnet-runtime:
name: aspnet-runtime
if: github.event.pull_request.head.repo.full_name == github.repository
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
permissions:
actions: read
contents: read
@@ -468,7 +468,7 @@ jobs:
# this action will leave a comment in response to credential scans performed on pull requests
if: always() && needs.aspnet-runtime.result == 'failure' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
needs: aspnet-runtime
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
working-directory: ./openshift/4.0/templates/base-images/dotnet50
image-name: dotnet-aspnet
@@ -538,7 +538,7 @@ jobs:
dotnet-sdk:
name: dotnet5-sdk
if: github.event.pull_request.head.repo.full_name == github.repository
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
permissions:
actions: read
contents: read
@@ -598,7 +598,7 @@ jobs:
# this action will leave a comment in response to credential scans performed on pull requests
if: always() && needs.dotnet-sdk.result == 'failure' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
needs: dotnet-sdk
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
working-directory: ./openshift/4.0/templates/base-images/dotnet50
image-name: dotnet-sdk
@@ -669,7 +669,7 @@ jobs:
if: always() && github.event.pull_request.head.repo.full_name == github.repository
needs: [aspnet-runtime, dotnet-sdk]
name: pims-backend
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
permissions:
actions: read
contents: read
@@ -729,7 +729,7 @@ jobs:
# this action will leave a comment in response to credential scans performed on pull requests
if: always() && needs.build_backend.result == 'failure' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
needs: build_backend
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
working-directory: ./source/backend
image-name: pims-api
@@ -800,7 +800,7 @@ jobs:
if: always() && github.event.pull_request.head.repo.full_name == github.repository
needs: [build_frontend, build_backend]
name: logging
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
permissions:
actions: read
contents: read
@@ -861,7 +861,7 @@ jobs:
# this action will leave a comment in response to Image scans performed on pull requests
if: always() && needs.pims_logging.result == 'failure' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
needs: pims_logging
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
working-directory: ./openshift/4.0/templates/jenkins-slaves/jenkins-slave-zap
image-name: pims-logging
@@ -930,7 +930,7 @@ jobs:
jenkins-agent-dotnet:
if: github.event.pull_request.head.repo.full_name == github.repository
name: jenkins-agent-dotnet
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
permissions:
actions: read
contents: read
@@ -992,7 +992,7 @@ jobs:
# this action will leave a comment in response to credential scans performed on pull requests
if: always() && needs.jenkins-agent-dotnet.result == 'failure' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
needs: jenkins-agent-dotnet
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
working-directory: ./openshift/4.0/templates/jenkins-slaves/jenkins-slave-zap
image-name: jenkins-agent-dotnet
diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml
index 3f6d9ce617..ae50f5e065 100644
--- a/.github/workflows/integration-test.yml
+++ b/.github/workflows/integration-test.yml
@@ -19,7 +19,7 @@ on:
jobs:
smoke-test:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
diff --git a/.github/workflows/keycloak-sync.yml b/.github/workflows/keycloak-sync.yml
index c6033e1920..27cfc2a96a 100644
--- a/.github/workflows/keycloak-sync.yml
+++ b/.github/workflows/keycloak-sync.yml
@@ -13,7 +13,7 @@ on:
jobs:
sync-keycloak:
name: Sync Keycloak
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
diff --git a/.github/workflows/mayan-sync.yml b/.github/workflows/mayan-sync.yml
index 342e7a010f..008a3f9f1a 100644
--- a/.github/workflows/mayan-sync.yml
+++ b/.github/workflows/mayan-sync.yml
@@ -32,7 +32,7 @@ on:
jobs:
mayan-sync:
name: mayan-sync
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 816bc26baf..8891eb641b 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -9,7 +9,7 @@ on:
jobs:
release:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout code
uses: actions/checkout@v4
diff --git a/.github/workflows/retag-dev-to-test.yml b/.github/workflows/retag-dev-to-test.yml
index 89d979e77e..06b9cbeca2 100644
--- a/.github/workflows/retag-dev-to-test.yml
+++ b/.github/workflows/retag-dev-to-test.yml
@@ -34,7 +34,7 @@ on: workflow_dispatch
jobs:
ci-cd-start-notification:
name: CI-CD Start Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Start notification to Teams Channel
uses: dragos-cojocari/ms-teams-notification@v1.0.2
@@ -48,7 +48,7 @@ jobs:
deploy:
name: Retag/Deploy to OpenShift
needs: ci-cd-start-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -77,6 +77,12 @@ jobs:
oc tag pims-proxy:$RELEASE_TAG pims-proxy:$DESTINATION
oc -n $DEPLOYMENT_NAMESPACE rollout restart deployment/pims-proxy-$DESTINATION
oc -n $DEPLOYMENT_NAMESPACE rollout status --timeout=600s deployment/pims-proxy-$DESTINATION
+ - name: Deploy scheduler microservice
+ shell: bash
+ run: |
+ oc tag pims-scheduler:$RELEASE_TAG pims-scheduler:$DESTINATION
+ oc -n $DEPLOYMENT_NAMESPACE rollout restart deployment/pims-scheduler-$DESTINATION
+ oc -n $DEPLOYMENT_NAMESPACE rollout status --timeout=600s deployment/pims-scheduler-$DESTINATION
- name: Deploy mayan
shell: bash
run: |
@@ -89,7 +95,7 @@ jobs:
database-upgrade:
name: Upgrade database
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -107,12 +113,12 @@ jobs:
oc wait --for=condition=complete job/$JOB_NAME --timeout=120s
oc get pods -o custom-columns=POD:.metadata.name --no-headers | grep -Eo $JOB_NAME-[^\s].* | (read POD_NAME; oc logs $POD_NAME)
-## Call the mayan sync task three times, once for each mayan sync endpoint. The task will wait for the job to complete before exiting.
-## Note: this depends on the mayan-sync configmap for the target namespace being up to date.
+ ## Call the mayan sync task three times, once for each mayan sync endpoint. The task will wait for the job to complete before exiting.
+ ## Note: this depends on the mayan-sync configmap for the target namespace being up to date.
mayan-sync:
name: sync mayan
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -130,11 +136,11 @@ jobs:
oc process -f ./openshift/4.0/templates/jobs/mayan-sync.yaml -p NAMESPACE=3cd915-dev -p TOKEN_URL=https://dev.loginproxy.gov.bc.ca:443/auth/realms/standard/protocol/openid-connect/token -p CLIENT_ID=property-services-project-api-4380 -p MAYAN_SYNC_URL=https://pims-app-test-3cd915-dev.apps.silver.devops.gov.bc.ca:443/api/documents/sync/documenttype -p KEYCLOAK_SECRET_NAME=pims-api-sso-test | oc create -f - | grep -oP "(?<=\/)[^\s]*" | (read TASK_NAME; oc wait --for=condition=succeeded taskruns/$TASK_NAME --timeout=80s)
oc process -f ./openshift/4.0/templates/jobs/mayan-sync.yaml -p NAMESPACE=3cd915-dev -p TOKEN_URL=https://dev.loginproxy.gov.bc.ca:443/auth/realms/standard/protocol/openid-connect/token -p CLIENT_ID=property-services-project-api-4380 -p MAYAN_SYNC_URL=https://pims-app-test-3cd915-dev.apps.silver.devops.gov.bc.ca:443/api/documents/sync/mayan -p KEYCLOAK_SECRET_NAME=pims-api-sso-test | oc create -f - | grep -oP "(?<=\/)[^\s]*" | (read TASK_NAME; oc wait --for=condition=succeeded taskruns/$TASK_NAME --timeout=80s)
-## Call the tekton pipeline that executes the keycloak sync. Dependent on the pims-api being accessible. Can run in parallel with the mayan sync.
+ ## Call the tekton pipeline that executes the keycloak sync. Dependent on the pims-api being accessible. Can run in parallel with the mayan sync.
keycloak-sync:
name: sync keycloak
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -150,10 +156,9 @@ jobs:
run: |
oc process -f ./openshift/4.0/templates/jobs/keycloak-sync-pipeline-run.yaml -p ASPNETCORE_ENVIRONMENT=$ASPNETCORE_ENVIRONMENT -p NAMESPACE=$NAMESPACE_OVERRIDE -p BRANCH=dev -p KEYCLOAK_SECRET_NAME=pims-api-sso-test -p KEYCLOAK_SERVICE_ACCOUNT_SECRET_NAME=pims-api-sso-test -p API_URL=http://pims-api-test:8080/api | oc create -f - | grep -oP "(?<=\/)[^\s]*" | (read PIPELINE_NAME; oc wait --for=condition=succeeded pipelineruns/$PIPELINE_NAME --timeout=600s)
-
ci-cd-end-notification:
name: CI-CD End Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: keycloak-sync
steps:
- name: check workflow status
diff --git a/.github/workflows/retag-test-to-uat-argo.yml b/.github/workflows/retag-test-to-uat-argo.yml
index 963b21dbf3..6626fa8e0e 100644
--- a/.github/workflows/retag-test-to-uat-argo.yml
+++ b/.github/workflows/retag-test-to-uat-argo.yml
@@ -35,7 +35,7 @@ on: workflow_dispatch
jobs:
ci-cd-start-notification:
name: CI-CD Start Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Start notification to Teams Channel
uses: dragos-cojocari/ms-teams-notification@v1.0.2
@@ -49,7 +49,7 @@ jobs:
deploy:
name: Retag/Deploy to OpenShift
needs: ci-cd-start-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -78,6 +78,12 @@ jobs:
oc tag pims-proxy:$RELEASE_TAG pims-proxy:$DESTINATION
oc -n $DEPLOYMENT_NAMESPACE rollout restart deployment/pims-proxy-$DESTINATION
oc -n $DEPLOYMENT_NAMESPACE rollout status --timeout=600s deployment/pims-proxy-$DESTINATION
+ - name: Deploy scheduler microservice
+ shell: bash
+ run: |
+ oc tag pims-scheduler:$RELEASE_TAG pims-scheduler:$DESTINATION
+ oc -n $DEPLOYMENT_NAMESPACE rollout restart deployment/pims-scheduler-$DESTINATION
+ oc -n $DEPLOYMENT_NAMESPACE rollout status --timeout=600s deployment/pims-scheduler-$DESTINATION
- name: Deploy mayan
shell: bash
run: |
@@ -90,7 +96,7 @@ jobs:
database-upgrade:
name: Upgrade database
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -113,7 +119,7 @@ jobs:
mayan-sync:
name: sync mayan
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -135,7 +141,7 @@ jobs:
keycloak-sync:
name: sync keycloak
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -154,7 +160,7 @@ jobs:
tag-release-image:
name: Release Tag
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -172,11 +178,12 @@ jobs:
oc tag pims-app:uat pims-app:v${VERSION}-master
oc tag pims-api:uat pims-api:v${VERSION}-master
oc tag pims-proxy:uat pims-proxy:v${VERSION}-master
+ oc tag pims-scheduler:uat pims-scheduler:v${VERSION}-master
oc tag mayan-bcgov:uat mayan-bcgov:v${VERSION}-master
ci-cd-end-notification:
name: CI-CD End Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: [keycloak-sync, mayan-sync]
if: always()
steps:
diff --git a/.github/workflows/retag-test-to-uat.yml b/.github/workflows/retag-test-to-uat.yml
index 065219d3e7..6b12fdc362 100644
--- a/.github/workflows/retag-test-to-uat.yml
+++ b/.github/workflows/retag-test-to-uat.yml
@@ -35,7 +35,7 @@ on: workflow_dispatch
jobs:
ci-cd-start-notification:
name: CI-CD Start Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Start notification to Teams Channel
uses: dragos-cojocari/ms-teams-notification@v1.0.2
@@ -49,7 +49,7 @@ jobs:
deploy:
name: Retag/Deploy frontend and api to OpenShift
needs: ci-cd-start-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -63,6 +63,8 @@ jobs:
- name: call scripts to deploy api and frontend
run: |
./openshift/4.0/player.sh deploy api $DESTINATION -apply
+ ./openshift/4.0/player.sh deploy proxy $DESTINATION -apply
+ ./openshift/4.0/player.sh deploy scheduler $DESTINATION -apply
./openshift/4.0/player.sh deploy app $DESTINATION -apply
oc tag mayan-bcgov:test mayan-bcgov:$DESTINATION
# the proxy can only be deployed via DEPLOYMENTS (ArgoCD way)
@@ -80,7 +82,7 @@ jobs:
database-upgrade:
name: Upgrade database
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -103,7 +105,7 @@ jobs:
mayan-sync:
name: sync mayan
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -125,7 +127,7 @@ jobs:
keycloak-sync:
name: sync keycloak
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -144,7 +146,7 @@ jobs:
tag-release-image:
name: Release Tag
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -162,11 +164,12 @@ jobs:
oc tag pims-app:uat pims-app:v${VERSION}-master
oc tag pims-api:uat pims-api:v${VERSION}-master
oc tag pims-proxy:uat pims-proxy:v${VERSION}-master
+ oc tag pims-scheduler:uat pims-scheduler:v${VERSION}-master
oc tag mayan-bcgov:uat mayan-bcgov:v${VERSION}-master
ci-cd-end-notification:
name: CI-CD End Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: [keycloak-sync, mayan-sync]
if: always()
steps:
diff --git a/.github/workflows/tag.yml b/.github/workflows/tag.yml
index 408cf3a934..69d71ce36b 100644
--- a/.github/workflows/tag.yml
+++ b/.github/workflows/tag.yml
@@ -9,7 +9,7 @@ jobs:
prepare-release:
# this job will only run if the PR has been merged
if: github.event.pull_request.merged == true
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout code
uses: actions/checkout@v4
diff --git a/.github/workflows/uat_hotfix.yml b/.github/workflows/uat_hotfix.yml
index 366df8bd37..8301332c5f 100644
--- a/.github/workflows/uat_hotfix.yml
+++ b/.github/workflows/uat_hotfix.yml
@@ -33,7 +33,7 @@ on:
jobs:
ci-cd-start-notification:
name: CI-CD Start Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Start notification to Teams Channel
uses: dragos-cojocari/ms-teams-notification@v1.0.2
@@ -47,7 +47,7 @@ jobs:
build-frontend:
name: Build frontend
needs: ci-cd-start-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -66,7 +66,7 @@ jobs:
build-api:
name: Build api
needs: ci-cd-start-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -81,11 +81,12 @@ jobs:
run: |
./openshift/4.0/player.sh build api -apply
./openshift/4.0/player.sh build proxy -apply
+ ./openshift/4.0/player.sh build scheduler -apply
deploy:
name: Deploy frontend and api to OpenShift
needs: [build-frontend, build-api]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -100,6 +101,7 @@ jobs:
run: |
./openshift/4.0/player.sh deploy api $DESTINATION -apply
./openshift/4.0/player.sh deploy proxy $DESTINATION -apply
+ ./openshift/4.0/player.sh deploy scheduler $DESTINATION -apply
./openshift/4.0/player.sh deploy app $DESTINATION -apply
# the command:
@@ -109,7 +111,7 @@ jobs:
database-upgrade:
name: Upgrade database
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -130,7 +132,7 @@ jobs:
mayan-sync:
name: sync mayan
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -152,7 +154,7 @@ jobs:
keycloak-sync:
name: sync keycloak
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -171,7 +173,7 @@ jobs:
tag-release-image:
name: Release Tag
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -188,11 +190,13 @@ jobs:
VERSION=$(make version)
oc tag pims-app:uat pims-app:v${VERSION}-master
oc tag pims-api:uat pims-api:v${VERSION}-master
+ oc tag pims-api:uat pims-proxy:v${VERSION}-master
+ oc tag pims-api:uat pims-scheduler:v${VERSION}-master
ci-cd-end-notification:
if: always()
name: CI-CD End Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: [mayan-sync, keycloak-sync]
steps:
- name: check workflow status
diff --git a/.github/workflows/uat_hotfix_argo.yml b/.github/workflows/uat_hotfix_argo.yml
index 6b656b22b9..90b4c9832c 100644
--- a/.github/workflows/uat_hotfix_argo.yml
+++ b/.github/workflows/uat_hotfix_argo.yml
@@ -34,7 +34,7 @@ on:
jobs:
ci-cd-start-notification:
name: CI-CD Start Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Start notification to Teams Channel
uses: dragos-cojocari/ms-teams-notification@v1.0.2
@@ -48,7 +48,7 @@ jobs:
build-frontend:
name: Build frontend
needs: ci-cd-start-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -67,7 +67,7 @@ jobs:
build-api:
name: Build api
needs: ci-cd-start-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -82,11 +82,12 @@ jobs:
run: |
./openshift/4.0/player.sh build api -apply
./openshift/4.0/player.sh build proxy -apply
+ ./openshift/4.0/player.sh build scheduler -apply
deploy:
name: Deploy to OpenShift
needs: [build-frontend, build-api]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -115,6 +116,12 @@ jobs:
oc tag pims-proxy:latest-$DESTINATION pims-proxy:$DESTINATION
oc -n $DEPLOYMENT_NAMESPACE rollout restart deployment/pims-proxy-$DESTINATION
oc -n $DEPLOYMENT_NAMESPACE rollout status --timeout=600s deployment/pims-proxy-$DESTINATION
+ - name: Deploy scheduler microservice
+ shell: bash
+ run: |
+ oc tag pims-scheduler:latest-$DESTINATION pims-scheduler:$DESTINATION
+ oc -n $DEPLOYMENT_NAMESPACE rollout restart deployment/pims-scheduler-$DESTINATION
+ oc -n $DEPLOYMENT_NAMESPACE rollout status --timeout=600s deployment/pims-scheduler-$DESTINATION
# the command:
# 1) creates an openshift job with generated name to avoid name conflict, substituting the variables in the template.
@@ -123,7 +130,7 @@ jobs:
database-upgrade:
name: Upgrade database
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -144,7 +151,7 @@ jobs:
mayan-sync:
name: sync mayan
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -166,7 +173,7 @@ jobs:
keycloak-sync:
name: sync keycloak
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -185,7 +192,7 @@ jobs:
tag-release-image:
name: Release Tag
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -203,12 +210,13 @@ jobs:
oc tag pims-app:uat pims-app:v${VERSION}-master
oc tag pims-api:uat pims-api:v${VERSION}-master
oc tag pims-proxy:uat pims-proxy:v${VERSION}-master
+ oc tag pims-scheduler:uat pims-scheduler:v${VERSION}-master
oc tag mayan-bcgov:uat mayan-bcgov:v${VERSION}-master
ci-cd-end-notification:
if: always()
name: CI-CD End Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: [mayan-sync, keycloak-sync]
steps:
- name: check workflow status
diff --git a/.github/workflows/uat_pre_release_hotfix.yml b/.github/workflows/uat_pre_release_hotfix.yml
index 2713184f32..b5895e9893 100644
--- a/.github/workflows/uat_pre_release_hotfix.yml
+++ b/.github/workflows/uat_pre_release_hotfix.yml
@@ -37,7 +37,7 @@ on:
jobs:
ci-cd-start-notification:
name: CI-CD Start Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Start notification to Teams Channel
uses: dragos-cojocari/ms-teams-notification@v1.0.2
@@ -51,7 +51,7 @@ jobs:
create-builds:
name: create builds
needs: ci-cd-start-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -72,7 +72,7 @@ jobs:
build-frontend:
name: Build frontend
needs: create-builds
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -92,7 +92,7 @@ jobs:
build-api:
name: Build api
needs: create-builds
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -107,11 +107,12 @@ jobs:
run: |
OC_JOB_NAME=${{github.event.inputs.HOTFIX_BRANCH}} && ./openshift/4.0/player.sh build api -apply
OC_JOB_NAME=${{github.event.inputs.HOTFIX_BRANCH}} && ./openshift/4.0/player.sh build proxy -apply
+ OC_JOB_NAME=${{github.event.inputs.HOTFIX_BRANCH}} && ./openshift/4.0/player.sh build scheduler -apply
deploy:
name: Deploy frontend and api to OpenShift
needs: [build-frontend, build-api]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -126,6 +127,7 @@ jobs:
run: |
RELEASE_TAG=latest-${{github.event.inputs.HOTFIX_BRANCH}} && ./openshift/4.0/player.sh deploy api $DESTINATION -apply
RELEASE_TAG=latest-${{github.event.inputs.HOTFIX_BRANCH}} && ./openshift/4.0/player.sh deploy proxy $DESTINATION -apply
+ RELEASE_TAG=latest-${{github.event.inputs.HOTFIX_BRANCH}} && ./openshift/4.0/player.sh deploy scheduler $DESTINATION -apply
RELEASE_TAG=latest-${{github.event.inputs.HOTFIX_BRANCH}} && ./openshift/4.0/player.sh deploy app $DESTINATION -apply
# the command:
@@ -135,7 +137,7 @@ jobs:
database-upgrade:
name: Upgrade database
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -156,7 +158,7 @@ jobs:
mayan-sync:
name: sync mayan
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -178,7 +180,7 @@ jobs:
keycloak-sync:
name: sync keycloak
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -197,7 +199,7 @@ jobs:
tag-release-image:
name: Release Tag
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -214,12 +216,13 @@ jobs:
VERSION=$(make version)
oc tag pims-app:uat pims-app:v${VERSION}-master
oc tag pims-api:uat pims-api:v${VERSION}-master
- oc tag pims-proxy:uat pims-api:v${VERSION}-master
+ oc tag pims-proxy:uat pims-proxy:v${VERSION}-master
+ oc tag pims-scheduler:uat pims-scheduler:v${VERSION}-master
ci-cd-end-notification:
if: always()
name: CI-CD End Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: [mayan-sync, keycloak-sync]
steps:
- name: check workflow status
@@ -238,7 +241,7 @@ jobs:
if: always()
name: cleanup builds
needs: ci-cd-end-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
diff --git a/.github/workflows/uat_pre_release_hotfix_argo.yml b/.github/workflows/uat_pre_release_hotfix_argo.yml
index 6fb30a0591..a8a3c2a026 100644
--- a/.github/workflows/uat_pre_release_hotfix_argo.yml
+++ b/.github/workflows/uat_pre_release_hotfix_argo.yml
@@ -38,7 +38,7 @@ on:
jobs:
ci-cd-start-notification:
name: CI-CD Start Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Start notification to Teams Channel
uses: dragos-cojocari/ms-teams-notification@v1.0.2
@@ -52,7 +52,7 @@ jobs:
create-builds:
name: create builds
needs: ci-cd-start-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -73,7 +73,7 @@ jobs:
build-frontend:
name: Build frontend
needs: create-builds
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -93,7 +93,7 @@ jobs:
build-api:
name: Build api
needs: create-builds
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -108,11 +108,12 @@ jobs:
run: |
OC_JOB_NAME=${{github.event.inputs.HOTFIX_BRANCH}} && ./openshift/4.0/player.sh build api -apply
OC_JOB_NAME=${{github.event.inputs.HOTFIX_BRANCH}} && ./openshift/4.0/player.sh build proxy -apply
+ OC_JOB_NAME=${{github.event.inputs.HOTFIX_BRANCH}} && ./openshift/4.0/player.sh build scheduler -apply
deploy:
name: Deploy to OpenShift
needs: [build-frontend, build-api]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -144,6 +145,13 @@ jobs:
oc tag pims-proxy:$RELEASE_TAG pims-proxy:$DESTINATION
oc -n $DEPLOYMENT_NAMESPACE rollout restart deployment/pims-proxy-$DESTINATION
oc -n $DEPLOYMENT_NAMESPACE rollout status --timeout=600s deployment/pims-proxy-$DESTINATION
+ - name: Deploy scheduler microservice
+ shell: bash
+ run: |
+ export RELEASE_TAG=latest-${{github.event.inputs.HOTFIX_BRANCH}}
+ oc tag pims-scheduler:$RELEASE_TAG pims-scheduler:$DESTINATION
+ oc -n $DEPLOYMENT_NAMESPACE rollout restart deployment/pims-scheduler-$DESTINATION
+ oc -n $DEPLOYMENT_NAMESPACE rollout status --timeout=600s deployment/pims-scheduler-$DESTINATION
# the command:
# 1) creates an openshift job with generated name to avoid name conflict, substituting the variables in the template.
@@ -152,7 +160,7 @@ jobs:
database-upgrade:
name: Upgrade database
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -173,7 +181,7 @@ jobs:
mayan-sync:
name: sync mayan
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -195,7 +203,7 @@ jobs:
keycloak-sync:
name: sync keycloak
needs: database-upgrade
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -214,7 +222,7 @@ jobs:
tag-release-image:
name: Release Tag
needs: [deploy]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
@@ -231,12 +239,13 @@ jobs:
VERSION=$(make version)
oc tag pims-app:uat pims-app:v${VERSION}-master
oc tag pims-api:uat pims-api:v${VERSION}-master
- oc tag pims-proxy:uat pims-api:v${VERSION}-master
+ oc tag pims-proxy:uat pims-proxy:v${VERSION}-master
+ oc tag pims-scheduler:uat pims-scheduler:v${VERSION}-master
ci-cd-end-notification:
if: always()
name: CI-CD End Notification to Teams Channel
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: [mayan-sync, keycloak-sync]
steps:
- name: check workflow status
@@ -255,7 +264,7 @@ jobs:
if: always()
name: cleanup builds
needs: ci-cd-end-notification
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml
index 5027fbb558..3596a124d1 100644
--- a/.github/workflows/version.yml
+++ b/.github/workflows/version.yml
@@ -9,7 +9,7 @@ jobs:
bump-version:
# this job will only run if the PR has been merged
if: github.event.pull_request.merged == true
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
with:
diff --git a/.github/workflows/zap-scan.yml b/.github/workflows/zap-scan.yml
index efaf794407..b8ff77f9cb 100644
--- a/.github/workflows/zap-scan.yml
+++ b/.github/workflows/zap-scan.yml
@@ -6,7 +6,7 @@ on:
jobs:
zap_scan:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
name: Scan the web application
env:
ZAP_REPORT: zap-report.xml
diff --git a/docker-compose.yml b/docker-compose.yml
index da85dba690..58ce6b72e1 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -92,6 +92,44 @@ services:
networks:
- psp
+ ####################### Scheduler #######################
+ scheduler:
+ profiles:
+ - all
+ - psp
+ restart: on-failure:1
+ container_name: psp-scheduler
+ build:
+ context: source/backend
+ dockerfile: Dockerfile.scheduler
+ args:
+ BUILD_CONFIGURATION: Debug
+ env_file: source/backend/scheduler/.env
+ environment:
+ - ASPNETCORE_ENVIRONMENT=Docker
+ ports:
+ - ${SCHEDULER_HTTPS_PORT:-8058}:443
+ - ${SCHEDULER_HTTP_PORT:-8057}:8080
+ depends_on:
+ - scheduler-redis
+ networks:
+ - psp
+
+ ####################### Redis #######################
+ scheduler-redis:
+ image: redis:6.2-alpine
+ profiles:
+ - all
+ - psp
+ restart: on-failure:1
+ ports:
+ - 6379:6379
+ networks:
+ - psp
+ command: redis-server --save 20 1 --loglevel warning
+ volumes:
+ - redis:/data
+
####################### Frontend #######################
frontend:
profiles:
@@ -199,3 +237,5 @@ volumes:
name: psp-api-db-rhel-data
frontend-node-cache:
name: psp-frontend-node-cache
+ redis-scheduler:
+ driver: local
diff --git a/source/backend/Dockerfile.scheduler b/source/backend/Dockerfile.scheduler
new file mode 100644
index 0000000000..847c0fa060
--- /dev/null
+++ b/source/backend/Dockerfile.scheduler
@@ -0,0 +1,33 @@
+ARG BUILD_CONFIGURATION=Release
+FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
+WORKDIR /app
+EXPOSE 5001 5000
+
+# Copy csproj and restore as distinct layers
+FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
+WORKDIR /src
+COPY scheduler scheduler/
+COPY scheduler/Directory.Build.props ./
+COPY core core/
+COPY apimodels apimodels/
+COPY entities entities/
+COPY core.api core.api/
+COPY keycloak keycloak/
+COPY scheduler/*.csproj scheduler/
+
+RUN dotnet restore scheduler/Scheduler.sln
+ENV PATH="$PATH:/root/.dotnet/tools"
+# Copy everything else and build
+WORKDIR /src/scheduler
+RUN dotnet build "Pims.Scheduler.csproj" -c "$BUILD_CONFIGURATION" -o /app/build
+
+FROM build AS publish
+RUN dotnet publish "Pims.Scheduler.csproj" -c "$BUILD_CONFIGURATION" -o /app/publish
+
+# Runtime image
+FROM base AS final
+WORKDIR /app
+COPY --from=publish /app/publish .
+COPY entrypoint.scheduler.sh .
+RUN chmod +x /app/entrypoint.scheduler.sh
+ENTRYPOINT ["/app/entrypoint.scheduler.sh"]
diff --git a/source/backend/api/Areas/Disposition/Controllers/ChecklistController.cs b/source/backend/api/Areas/Disposition/Controllers/ChecklistController.cs
index af993497d6..c15cbcb0dd 100644
--- a/source/backend/api/Areas/Disposition/Controllers/ChecklistController.cs
+++ b/source/backend/api/Areas/Disposition/Controllers/ChecklistController.cs
@@ -2,11 +2,11 @@
using MapsterMapper;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
-using Pims.Core.Api.Exceptions;
using Pims.Api.Models.Concepts.DispositionFile;
using Pims.Api.Models.Concepts.File;
-using Pims.Core.Api.Policies;
using Pims.Api.Services;
+using Pims.Core.Api.Exceptions;
+using Pims.Core.Api.Policies;
using Pims.Core.Json;
using Pims.Core.Security;
using Swashbuckle.AspNetCore.Annotations;
diff --git a/source/backend/api/Areas/Documents/DocumentQueueController.cs b/source/backend/api/Areas/Documents/DocumentQueueController.cs
new file mode 100644
index 0000000000..9fead6a65e
--- /dev/null
+++ b/source/backend/api/Areas/Documents/DocumentQueueController.cs
@@ -0,0 +1,64 @@
+using System.Collections.Generic;
+using MapsterMapper;
+using Microsoft.AspNetCore.Authorization;
+using Microsoft.AspNetCore.Mvc;
+using Pims.Api.Services;
+using Pims.Core.Api.Policies;
+using Pims.Core.Json;
+using Pims.Core.Security;
+using Pims.Dal.Entities.Models;
+using Swashbuckle.AspNetCore.Annotations;
+
+namespace Pims.Api.Controllers
+{
+ ///
+ /// DocumentQueueController class, provides endpoints to handle document queue requests.
+ ///
+ [Authorize]
+ [ApiController]
+ [ApiVersion("1.0")]
+ [Route("v{version:apiVersion}/documents/queue")]
+ [Route("/documents")]
+ public class DocumentQueueController : ControllerBase
+ {
+ #region Variables
+ private readonly IDocumentQueueService _documentQueueService;
+ private readonly IMapper _mapper;
+ #endregion
+
+ #region Constructors
+
+ ///
+ /// Creates a new instance of a DocumentQueueController class.
+ ///
+ ///
+ ///
+ public DocumentQueueController(IDocumentQueueService documentQueueService, IMapper mapper)
+ {
+ _documentQueueService = documentQueueService;
+ _mapper = mapper;
+ }
+ #endregion
+
+ #region Endpoints
+
+ ///
+ /// Search for Document Queue items via filter.
+ ///
+ ///
+ [HttpGet("search")]
+ [HasPermission(Permissions.SystemAdmin)]
+ [Produces("application/json")]
+ [ProducesResponseType(typeof(List), 200)]
+ [SwaggerOperation(Tags = new[] { "document-types" })]
+ [TypeFilter(typeof(NullJsonResultFilter))]
+ public IActionResult GetDocumentTypes([FromBody] DocumentQueueFilter filter)
+ {
+ var queuedDocuments = _documentQueueService.SearchDocumentQueue(filter);
+ var documentQueueModels = _mapper.Map>(queuedDocuments);
+ return new JsonResult(documentQueueModels);
+ }
+
+ #endregion
+ }
+}
diff --git a/source/backend/api/Pims.Api.csproj b/source/backend/api/Pims.Api.csproj
index bfc4c90bd1..79dcc9d373 100644
--- a/source/backend/api/Pims.Api.csproj
+++ b/source/backend/api/Pims.Api.csproj
@@ -2,8 +2,8 @@
0ef6255f-9ea0-49ec-8c65-c172304b4926
- 5.7.0-94.20
- 5.7.0.94
+ 5.7.0-95.20
+ 5.7.0.95
true
{16BC0468-78F6-4C91-87DA-7403C919E646}
net8.0
diff --git a/source/backend/README.md b/source/backend/api/README.md
similarity index 100%
rename from source/backend/README.md
rename to source/backend/api/README.md
diff --git a/source/backend/api/Repositories/Cdogs/CdogsBaseRepository.cs b/source/backend/api/Repositories/Cdogs/CdogsBaseRepository.cs
index f8df726c8e..d1d192b1d5 100644
--- a/source/backend/api/Repositories/Cdogs/CdogsBaseRepository.cs
+++ b/source/backend/api/Repositories/Cdogs/CdogsBaseRepository.cs
@@ -3,7 +3,7 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Pims.Api.Models.Config;
-using Pims.Api.Repositories.Rest;
+using Pims.Core.Api.Repositories.Rest;
namespace Pims.Api.Repositories.Cdogs
{
diff --git a/source/backend/api/Repositories/Mayan/MayanBaseRepository.cs b/source/backend/api/Repositories/Mayan/MayanBaseRepository.cs
index 4b7dfca7c9..ab06e068fc 100644
--- a/source/backend/api/Repositories/Mayan/MayanBaseRepository.cs
+++ b/source/backend/api/Repositories/Mayan/MayanBaseRepository.cs
@@ -4,7 +4,7 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Pims.Api.Models.Config;
-using Pims.Api.Repositories.Rest;
+using Pims.Core.Api.Repositories.Rest;
namespace Pims.Api.Repositories.Mayan
{
diff --git a/source/backend/api/Services/AcquisitionFileService.cs b/source/backend/api/Services/AcquisitionFileService.cs
index 54e1c32a0b..8f23f5fdce 100644
--- a/source/backend/api/Services/AcquisitionFileService.cs
+++ b/source/backend/api/Services/AcquisitionFileService.cs
@@ -315,6 +315,11 @@ public PimsAcquisitionFile UpdateProperties(PimsAcquisitionFile acquisitionFile,
// Check if the property is new or if it is being updated
foreach (var incomingAcquisitionProperty in acquisitionFile.PimsPropertyAcquisitionFiles)
{
+ var matchingProperty = currentFileProperties.FirstOrDefault(c => c.PropertyId == incomingAcquisitionProperty.PropertyId);
+ if (matchingProperty is not null && incomingAcquisitionProperty.Internal_Id == 0)
+ {
+ incomingAcquisitionProperty.Internal_Id = matchingProperty.Internal_Id;
+ }
// If the property is not new, check if the name has been updated.
if (incomingAcquisitionProperty.Internal_Id != 0)
{
diff --git a/source/backend/api/Services/DispositionFileService.cs b/source/backend/api/Services/DispositionFileService.cs
index aded90176d..89b870e499 100644
--- a/source/backend/api/Services/DispositionFileService.cs
+++ b/source/backend/api/Services/DispositionFileService.cs
@@ -501,6 +501,11 @@ public PimsDispositionFile UpdateProperties(PimsDispositionFile dispositionFile,
// Check if the property is new or if it is being updated
foreach (var incomingDispositionProperty in dispositionFile.PimsDispositionFileProperties)
{
+ var matchingProperty = currentFileProperties.FirstOrDefault(c => c.PropertyId == incomingDispositionProperty.PropertyId);
+ if (matchingProperty is not null && incomingDispositionProperty.Internal_Id == 0)
+ {
+ incomingDispositionProperty.Internal_Id = matchingProperty.Internal_Id;
+ }
// If the property is not new, check if the name has been updated.
if (incomingDispositionProperty.Internal_Id != 0)
{
diff --git a/source/backend/api/Services/DocumentFileService.cs b/source/backend/api/Services/DocumentFileService.cs
index f87910eb59..098e3e794c 100644
--- a/source/backend/api/Services/DocumentFileService.cs
+++ b/source/backend/api/Services/DocumentFileService.cs
@@ -5,11 +5,12 @@
using MapsterMapper;
using Microsoft.Extensions.Logging;
using Pims.Api.Constants;
-using Pims.Core.Api.Exceptions;
using Pims.Api.Models.CodeTypes;
using Pims.Api.Models.Concepts.Document;
using Pims.Api.Models.Requests.Document.Upload;
using Pims.Api.Models.Requests.Http;
+using Pims.Core.Api.Exceptions;
+using Pims.Core.Api.Services;
using Pims.Core.Extensions;
using Pims.Core.Security;
using Pims.Dal.Entities;
diff --git a/source/backend/api/Services/DocumentGenerationService.cs b/source/backend/api/Services/DocumentGenerationService.cs
index 2f9aef6ebb..a22e49b1ed 100644
--- a/source/backend/api/Services/DocumentGenerationService.cs
+++ b/source/backend/api/Services/DocumentGenerationService.cs
@@ -14,6 +14,7 @@
using Pims.Api.Models.Requests.Http;
using Pims.Api.Repositories.Cdogs;
using Pims.Av;
+using Pims.Core.Api.Services;
using Pims.Core.Extensions;
using Pims.Core.Http.Configuration;
using Pims.Core.Security;
diff --git a/source/backend/api/Services/DocumentQueueService.cs b/source/backend/api/Services/DocumentQueueService.cs
new file mode 100644
index 0000000000..4ffb2c4363
--- /dev/null
+++ b/source/backend/api/Services/DocumentQueueService.cs
@@ -0,0 +1,42 @@
+using System.Collections.Generic;
+using System.Security.Claims;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Options;
+using Pims.Core.Api.Services;
+using Pims.Core.Extensions;
+using Pims.Core.Http.Configuration;
+using Pims.Core.Security;
+using Pims.Dal.Entities;
+using Pims.Dal.Entities.Models;
+using Pims.Dal.Repositories;
+
+namespace Pims.Api.Services
+{
+ ///
+ /// DocumentQueueService implementation provides document queue managing capabilities.
+ ///
+ public class DocumentQueueService : BaseService, IDocumentQueueService
+ {
+ private readonly IDocumentQueueRepository documentQueueRepository;
+ private readonly IOptionsMonitor keycloakOptions;
+
+ public DocumentQueueService(
+ ClaimsPrincipal user,
+ ILogger logger,
+ IDocumentQueueRepository documentQueueRepository,
+ IOptionsMonitor options)
+ : base(user, logger)
+ {
+ this.documentQueueRepository = documentQueueRepository;
+ this.keycloakOptions = options;
+ }
+
+ public IEnumerable SearchDocumentQueue(DocumentQueueFilter filter)
+ {
+ this.Logger.LogInformation("Retrieving queued PIMS documents using filter {filter}", filter);
+ this.User.ThrowIfNotAuthorizedOrServiceAccount(Permissions.SystemAdmin, this.keycloakOptions);
+
+ return documentQueueRepository.GetAllByFilter(filter);
+ }
+ }
+}
diff --git a/source/backend/api/Services/DocumentService.cs b/source/backend/api/Services/DocumentService.cs
index 72cf9b7008..333fdb1c2b 100644
--- a/source/backend/api/Services/DocumentService.cs
+++ b/source/backend/api/Services/DocumentService.cs
@@ -11,7 +11,6 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
-using Pims.Core.Api.Exceptions;
using Pims.Api.Models;
using Pims.Api.Models.CodeTypes;
using Pims.Api.Models.Concepts.Document;
@@ -23,6 +22,8 @@
using Pims.Api.Models.Requests.Http;
using Pims.Api.Repositories.Mayan;
using Pims.Av;
+using Pims.Core.Api.Exceptions;
+using Pims.Core.Api.Services;
using Pims.Core.Exceptions;
using Pims.Core.Extensions;
using Pims.Core.Http.Configuration;
diff --git a/source/backend/api/Services/DocumentSyncService.cs b/source/backend/api/Services/DocumentSyncService.cs
index ae7030ef0a..fca48277a9 100644
--- a/source/backend/api/Services/DocumentSyncService.cs
+++ b/source/backend/api/Services/DocumentSyncService.cs
@@ -12,6 +12,7 @@
using Pims.Api.Models.PimsSync;
using Pims.Api.Models.Requests.Http;
using Pims.Api.Repositories.Mayan;
+using Pims.Core.Api.Services;
using Pims.Core.Extensions;
using Pims.Core.Http.Configuration;
using Pims.Core.Security;
diff --git a/source/backend/api/Services/FinancialCodeService.cs b/source/backend/api/Services/FinancialCodeService.cs
index 01ca8f84b0..d8ebe83ff0 100644
--- a/source/backend/api/Services/FinancialCodeService.cs
+++ b/source/backend/api/Services/FinancialCodeService.cs
@@ -3,13 +3,13 @@
using MapsterMapper;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
-using Pims.Core.Api.Exceptions;
using Pims.Api.Models.Concepts.FinancialCode;
+using Pims.Core.Api.Exceptions;
+using Pims.Core.Api.Services;
using Pims.Core.Extensions;
+using Pims.Core.Security;
using Pims.Dal.Entities;
-using Pims.Dal.Helpers.Extensions;
using Pims.Dal.Repositories;
-using Pims.Core.Security;
namespace Pims.Api.Services
{
diff --git a/source/backend/api/Services/FormDocumentService.cs b/source/backend/api/Services/FormDocumentService.cs
index dc56329c7e..00cab505bd 100644
--- a/source/backend/api/Services/FormDocumentService.cs
+++ b/source/backend/api/Services/FormDocumentService.cs
@@ -8,6 +8,7 @@
using Pims.Api.Models.Concepts.Document;
using Pims.Api.Models.Requests.Document.Upload;
using Pims.Api.Models.Requests.Http;
+using Pims.Core.Api.Services;
using Pims.Core.Extensions;
using Pims.Core.Security;
using Pims.Dal.Entities;
diff --git a/source/backend/api/Services/IDocumentQueueService.cs b/source/backend/api/Services/IDocumentQueueService.cs
new file mode 100644
index 0000000000..b1f3c09206
--- /dev/null
+++ b/source/backend/api/Services/IDocumentQueueService.cs
@@ -0,0 +1,14 @@
+using System.Collections.Generic;
+using Pims.Dal.Entities;
+using Pims.Dal.Entities.Models;
+
+namespace Pims.Api.Services
+{
+ ///
+ /// IDocumentService interface, defines the functionality for document queue services.
+ ///
+ public interface IDocumentQueueService
+ {
+ public IEnumerable SearchDocumentQueue(DocumentQueueFilter filter);
+ }
+}
diff --git a/source/backend/api/Services/IPropertyService.cs b/source/backend/api/Services/IPropertyService.cs
index 18a52c564b..cd7c81ab22 100644
--- a/source/backend/api/Services/IPropertyService.cs
+++ b/source/backend/api/Services/IPropertyService.cs
@@ -45,7 +45,7 @@ public interface IPropertyService
PimsProperty PopulateNewProperty(PimsProperty property, bool isOwned = false, bool isPropertyOfInterest = true);
- void UpdateLocation(PimsProperty incomingProperty, ref PimsProperty propertyToUpdate, IEnumerable overrideCodes);
+ void UpdateLocation(PimsProperty incomingProperty, ref PimsProperty propertyToUpdate, IEnumerable overrideCodes, bool allowRetired = false);
T PopulateNewFileProperty(T fileProperty)
where T : IFilePropertyEntity;
diff --git a/source/backend/api/Services/LeaseService.cs b/source/backend/api/Services/LeaseService.cs
index 2a4e7a3fce..14d550a1b7 100644
--- a/source/backend/api/Services/LeaseService.cs
+++ b/source/backend/api/Services/LeaseService.cs
@@ -5,17 +5,17 @@
using System.Security.Claims;
using System.Text;
using Microsoft.Extensions.Logging;
-using Pims.Core.Api.Exceptions;
using Pims.Api.Models.CodeTypes;
+using Pims.Core.Api.Exceptions;
+using Pims.Core.Api.Services;
using Pims.Core.Exceptions;
using Pims.Core.Extensions;
+using Pims.Core.Security;
using Pims.Dal.Entities;
using Pims.Dal.Entities.Extensions;
using Pims.Dal.Entities.Models;
using Pims.Dal.Exceptions;
-using Pims.Dal.Helpers.Extensions;
using Pims.Dal.Repositories;
-using Pims.Core.Security;
using static Pims.Dal.Entities.PimsLeaseStatusType;
namespace Pims.Api.Services
@@ -232,12 +232,6 @@ public PimsLease Update(PimsLease lease, IEnumerable userOverr
pimsUser.ThrowInvalidAccessToLeaseFile(lease.RegionCode);
var currentFileProperties = _propertyLeaseRepository.GetAllByLeaseId(lease.LeaseId);
- var newPropertiesAdded = lease.PimsPropertyLeases.Where(x => !currentFileProperties.Any(y => y.Internal_Id == x.Internal_Id)).ToList();
-
- if (newPropertiesAdded.Any(x => x.Property.IsRetired.HasValue && x.Property.IsRetired.Value))
- {
- throw new BusinessRuleViolationException("Retired property can not be selected.");
- }
if (currentLease.LeaseStatusTypeCode != lease.LeaseStatusTypeCode)
{
@@ -256,6 +250,11 @@ public PimsLease Update(PimsLease lease, IEnumerable userOverr
// Update marker locations in the context of this file
foreach (var incomingLeaseProperty in leaseWithProperties.PimsPropertyLeases)
{
+ var matchingProperty = currentFileProperties.FirstOrDefault(c => c.PropertyId == incomingLeaseProperty.PropertyId);
+ if (matchingProperty is not null && incomingLeaseProperty.Internal_Id == 0)
+ {
+ incomingLeaseProperty.Internal_Id = matchingProperty.Internal_Id;
+ }
// If the property is not new, check if the marker location has been updated.
if (incomingLeaseProperty.Internal_Id != 0)
{
@@ -548,8 +547,11 @@ private PimsLease AssociatePropertyLeases(PimsLease lease, IEnumerable p.LeaseId != lease.Internal_Id);
var isPropertyOnThisLease = existingPropertyLeases.Any(p => p.LeaseId == lease.Internal_Id);
+ var isDisposedOrRetired = existingPropertyLeases.Any(p => p.Property.IsRetired.HasValue && p.Property.IsRetired.Value)
+ || propertyWithAssociations?.PimsDispositionFileProperties?.Any(d => d.DispositionFile.DispositionFileStatusTypeCode == DispositionFileStatusTypes.COMPLETE.ToString()) == true;
if (isPropertyOnOtherLease && !isPropertyOnThisLease && !userOverrides.Contains(UserOverrideCode.AddPropertyToInventory))
{
@@ -568,6 +570,11 @@ private PimsLease AssociatePropertyLeases(PimsLease lease, IEnumerable overrideCodes)
+ public void UpdateLocation(PimsProperty incomingProperty, ref PimsProperty propertyToUpdate, IEnumerable overrideCodes, bool allowRetired = false)
{
if (propertyToUpdate.Location == null || propertyToUpdate.Boundary == null)
{
@@ -397,7 +397,7 @@ public void UpdateLocation(PimsProperty incomingProperty, ref PimsProperty prope
// convert spatial location from lat/long (4326) to BC Albers (3005) for database storage
var geom = incomingProperty.Location;
- if (geom.SRID != SpatialReference.BCALBERS)
+ if (geom != null && geom.SRID != SpatialReference.BCALBERS)
{
var newCoords = _coordinateService.TransformCoordinates(geom.SRID, SpatialReference.BCALBERS, geom.Coordinate);
propertyToUpdate.Location = GeometryHelper.CreatePoint(newCoords, SpatialReference.BCALBERS);
@@ -415,7 +415,7 @@ public void UpdateLocation(PimsProperty incomingProperty, ref PimsProperty prope
if (needsUpdate)
{
- _propertyRepository.Update(propertyToUpdate, overrideLocation: true);
+ _propertyRepository.Update(propertyToUpdate, overrideLocation: true, allowRetired: allowRetired);
}
}
else
diff --git a/source/backend/api/Services/ResearchFileService.cs b/source/backend/api/Services/ResearchFileService.cs
index ab8e10e58a..f28025f757 100644
--- a/source/backend/api/Services/ResearchFileService.cs
+++ b/source/backend/api/Services/ResearchFileService.cs
@@ -115,6 +115,11 @@ public PimsResearchFile UpdateProperties(PimsResearchFile researchFile, IEnumera
// Check if the property is new or if it is being updated
foreach (var incomingResearchProperty in researchFile.PimsPropertyResearchFiles)
{
+ var matchingProperty = currentFileProperties.FirstOrDefault(c => c.PropertyId == incomingResearchProperty.PropertyId);
+ if (matchingProperty is not null && incomingResearchProperty.Internal_Id == 0)
+ {
+ incomingResearchProperty.Internal_Id = matchingProperty.Internal_Id;
+ }
// If the property is not new, check if the name has been updated.
if (incomingResearchProperty.Internal_Id != 0)
{
@@ -204,7 +209,7 @@ private void MatchProperties(PimsResearchFile researchFile, IEnumerable();
services.AddScoped();
services.AddScoped();
+ services.AddScoped();
services.AddScoped();
}
diff --git a/source/backend/apimodels/CodeTypes/DocumentQueueStatusTypes.cs b/source/backend/apimodels/CodeTypes/DocumentQueueStatusTypes.cs
new file mode 100644
index 0000000000..3b38b2899d
--- /dev/null
+++ b/source/backend/apimodels/CodeTypes/DocumentQueueStatusTypes.cs
@@ -0,0 +1,25 @@
+using System.Runtime.Serialization;
+using System.Text.Json.Serialization;
+
+namespace Pims.Api.Models.CodeTypes
+{
+ [JsonConverter(typeof(JsonStringEnumMemberConverter))]
+ public enum DocumentQueueStatusTypes
+ {
+
+ [EnumMember(Value = "MAYAN_ERROR")]
+ MAYAN_ERROR,
+
+ [EnumMember(Value = "PENDING")]
+ PENDING,
+
+ [EnumMember(Value = "PIMS_ERROR")]
+ PIMS_ERROR,
+
+ [EnumMember(Value = "PROCESSING")]
+ PROCESSING,
+
+ [EnumMember(Value = "SUCCESS")]
+ SUCCESS,
+ }
+}
diff --git a/source/backend/apimodels/Models/Concepts/CompensationRequisition/CompensationRequisitionMap.cs b/source/backend/apimodels/Models/Concepts/CompensationRequisition/CompensationRequisitionMap.cs
index c54bc85949..d87ffd2158 100644
--- a/source/backend/apimodels/Models/Concepts/CompensationRequisition/CompensationRequisitionMap.cs
+++ b/source/backend/apimodels/Models/Concepts/CompensationRequisition/CompensationRequisitionMap.cs
@@ -37,7 +37,6 @@ public void Register(TypeAdapterConfig config)
.Map(dest => dest.IsPaymentInTrust, src => src.IsPaymentInTrust)
.Map(dest => dest.GstNumber, src => src.GstNumber)
.Map(dest => dest.FinalizedDate, src => src.FinalizedDate)
- .Map(dest => dest.AdvancedPaymentServedDate, src => src.AdvPmtServedDt)
.Map(dest => dest.SpecialInstruction, src => src.SpecialInstruction)
.Map(dest => dest.DetailedRemarks, src => src.DetailedRemarks)
.Map(dest => dest.AlternateProjectId, src => src.AlternateProjectId)
@@ -60,7 +59,6 @@ public void Register(TypeAdapterConfig config)
.Map(dest => dest.AgreementDt, src => src.AgreementDate)
.Map(dest => dest.ExpropNoticeServedDt, src => src.ExpropriationNoticeServedDate)
.Map(dest => dest.ExpropVestingDt, src => src.ExpropriationVestingDate)
- .Map(dest => dest.AdvPmtServedDt, src => src.AdvancedPaymentServedDate)
.Map(dest => dest.GenerationDt, src => src.GenerationDate)
.Map(dest => dest.PimsCompReqFinancials, src => src.Financials)
.Map(dest => dest.AcquisitionOwnerId, src => src.AcquisitionOwnerId)
diff --git a/source/backend/apimodels/Models/Concepts/CompensationRequisition/CompensationRequisitionModel.cs b/source/backend/apimodels/Models/Concepts/CompensationRequisition/CompensationRequisitionModel.cs
index e0c1d05290..4a8f276ce7 100644
--- a/source/backend/apimodels/Models/Concepts/CompensationRequisition/CompensationRequisitionModel.cs
+++ b/source/backend/apimodels/Models/Concepts/CompensationRequisition/CompensationRequisitionModel.cs
@@ -42,8 +42,6 @@ public class CompensationRequisitionModel : BaseAuditModel
public DateOnly? ExpropriationVestingDate { get; set; }
- public DateOnly? AdvancedPaymentServedDate { get; set; }
-
public DateOnly? GenerationDate { get; set; }
public List Financials { get; set; }
diff --git a/source/backend/apimodels/Models/Concepts/DocumentQueue/DocumentQueueMap.cs b/source/backend/apimodels/Models/Concepts/DocumentQueue/DocumentQueueMap.cs
new file mode 100644
index 0000000000..1a98ba0d07
--- /dev/null
+++ b/source/backend/apimodels/Models/Concepts/DocumentQueue/DocumentQueueMap.cs
@@ -0,0 +1,36 @@
+using Mapster;
+using Pims.Api.Models.Base;
+using Entity = Pims.Dal.Entities;
+
+namespace Pims.Api.Models.Concepts.Document
+{
+ public class DocumentQueueMap : IRegister
+ {
+ public void Register(TypeAdapterConfig config)
+ {
+ config.NewConfig()
+ .Map(dest => dest.Id, src => src.DocumentQueueId)
+ .Map(dest => dest.DocumentExternalId, src => src.DocumentExternalId)
+ .Map(dest => dest.DocumentId, src => src.DocumentId)
+ .Map(dest => dest.DocumentQueueStatusType, src => src.DocumentQueueStatusTypeCodeNavigation)
+ .Map(dest => dest.DataSourceTypeCode, src => src.DataSourceTypeCodeNavigation)
+ .Map(dest => dest.DocumentProcessStartTimestamp, src => src.DocProcessStartDt)
+ .Map(dest => dest.DocumentProcessEndTimestamp, src => src.DocProcessEndDt)
+ .Map(dest => dest.DocumentProcessRetries, src => src.DocProcessRetries)
+ .Map(dest => dest.Document, src => src.Document)
+ .Inherits();
+
+ config.NewConfig()
+ .Map(dest => dest.DocumentQueueId, src => src.Id)
+ .Map(dest => dest.DocumentExternalId, src => src.DocumentExternalId)
+ .Map(dest => dest.DocumentId, src => src.DocumentId)
+ .Map(dest => dest.DocumentQueueStatusTypeCode, src => src.DocumentQueueStatusType.Id)
+ .Map(dest => dest.DataSourceTypeCode, src => src.DataSourceTypeCode.Id)
+ .Map(dest => dest.DocProcessStartDt, src => src.DocumentProcessStartTimestamp)
+ .Map(dest => dest.DocProcessEndDt, src => src.DocumentProcessEndTimestamp)
+ .Map(dest => dest.DocProcessRetries, src => src.DocumentProcessRetries)
+ .Map(dest => dest.Document, src => src.Document)
+ .Inherits();
+ }
+ }
+}
diff --git a/source/backend/apimodels/Models/Concepts/DocumentQueue/DocumentQueueModel.cs b/source/backend/apimodels/Models/Concepts/DocumentQueue/DocumentQueueModel.cs
new file mode 100644
index 0000000000..73d553f74d
--- /dev/null
+++ b/source/backend/apimodels/Models/Concepts/DocumentQueue/DocumentQueueModel.cs
@@ -0,0 +1,67 @@
+using System;
+using Pims.Api.Models.Base;
+
+namespace Pims.Api.Models.Concepts.Document
+{
+ ///
+ /// DocumentQueueModel class, provides a model to represent document associated to entities.
+ ///
+ public class DocumentQueueModel : BaseAuditModel
+ {
+
+ #region Properties
+
+ ///
+ /// get/set - Document Queue Id.
+ ///
+ public long Id { get; set; }
+
+ ///
+ /// get/set - The document id within PIMS.
+ ///
+ public long? DocumentId { get; set; }
+
+ ///
+ /// get/set - The original identifier in the source system.
+ ///
+ public long? DocumentExternalId { get; set; }
+
+ ///
+ /// get/set - The document queue status type.
+ ///
+ public CodeTypeModel DocumentQueueStatusType { get; set; }
+
+ ///
+ /// get/set - The document source type.
+ ///
+ public CodeTypeModel DataSourceTypeCode { get; set; }
+
+ ///
+ /// get/set - When the processing of the document began.
+ ///
+ public DateTime? DocumentProcessStartTimestamp { get; set; }
+
+ ///
+ /// get/set - When the processing of the document ended.
+ ///
+ public DateTime? DocumentProcessEndTimestamp { get; set; }
+
+ ///
+ /// get/set - The number of times an attempt was made to process this document.
+ ///
+ public int? DocumentProcessRetries { get; set; }
+
+ ///
+ /// get/set - The latest error from mayan, if processing failed.
+ ///
+ public string MayanError { get; set; }
+
+ ///
+ /// get/set - The actual document, represented as a byte[].
+ ///
+ public byte[] Document { get; set; }
+
+
+ #endregion
+ }
+}
diff --git a/source/backend/core.api/Pims.Core.Api.csproj b/source/backend/core.api/Pims.Core.Api.csproj
index 2e34c312af..70c947b9d1 100644
--- a/source/backend/core.api/Pims.Core.Api.csproj
+++ b/source/backend/core.api/Pims.Core.Api.csproj
@@ -22,6 +22,7 @@
+
diff --git a/source/backend/api/Repositories/RestCommon/BaseRestRepository.cs b/source/backend/core.api/Repositories/RestCommon/BaseRestRepository.cs
similarity index 99%
rename from source/backend/api/Repositories/RestCommon/BaseRestRepository.cs
rename to source/backend/core.api/Repositories/RestCommon/BaseRestRepository.cs
index 2b5559b8fe..214d359f9b 100644
--- a/source/backend/api/Repositories/RestCommon/BaseRestRepository.cs
+++ b/source/backend/core.api/Repositories/RestCommon/BaseRestRepository.cs
@@ -14,7 +14,7 @@
using Pims.Api.Models.CodeTypes;
using Pims.Api.Models.Requests.Http;
-namespace Pims.Api.Repositories.Rest
+namespace Pims.Core.Api.Repositories.Rest
{
///
/// BaseRestRepository provides common methods to interact with Rest-ful external interfaces.
diff --git a/source/backend/api/Repositories/RestCommon/IRestRespository.cs b/source/backend/core.api/Repositories/RestCommon/IRestRespository.cs
similarity index 95%
rename from source/backend/api/Repositories/RestCommon/IRestRespository.cs
rename to source/backend/core.api/Repositories/RestCommon/IRestRespository.cs
index 3229499989..da802e813e 100644
--- a/source/backend/api/Repositories/RestCommon/IRestRespository.cs
+++ b/source/backend/core.api/Repositories/RestCommon/IRestRespository.cs
@@ -3,7 +3,7 @@
using System.Threading.Tasks;
using Pims.Api.Models.Requests.Http;
-namespace Pims.Api.Repositories.Rest
+namespace Pims.Core.Api.Repositories.Rest
{
///
/// IRestRespository interface, defines common functionality among Rest-ful Interfaces.
diff --git a/source/backend/api/Services/BaseService.cs b/source/backend/core.api/Services/BaseService.cs
similarity index 97%
rename from source/backend/api/Services/BaseService.cs
rename to source/backend/core.api/Services/BaseService.cs
index 6a83bb3d8b..10174a96ec 100644
--- a/source/backend/api/Services/BaseService.cs
+++ b/source/backend/core.api/Services/BaseService.cs
@@ -1,7 +1,7 @@
using System.Security.Claims;
using Microsoft.Extensions.Logging;
-namespace Pims.Api.Services
+namespace Pims.Core.Api.Services
{
///
/// BaseService abstract class, provides a generic service layer to perform business logic.
diff --git a/source/backend/dal/Helpers/Extensions/DbContextExtensions.cs b/source/backend/dal/Helpers/Extensions/DbContextExtensions.cs
index aa8cc965b5..7e4ef89f23 100644
--- a/source/backend/dal/Helpers/Extensions/DbContextExtensions.cs
+++ b/source/backend/dal/Helpers/Extensions/DbContextExtensions.cs
@@ -8,18 +8,6 @@ namespace Pims.Dal.Helpers.Extensions
///
public static class DbContextExtensions
{
- ///
- /// When manipulating entities it is necessary to reset the original value for 'ConcurrencyControlNumber' so that concurrency checking can occur.
- ///
- ///
- ///
- /// The original source entity from the database.
- public static void SetOriginalConcurrencyControlNumber(this DbContext context, T source)
- where T : IBaseEntity
- {
- context.Entry(source).OriginalValues[nameof(IBaseEntity.ConcurrencyControlNumber)] = source.ConcurrencyControlNumber;
- }
-
///
/// Detach the entity from the context.
///
diff --git a/source/backend/dal/Helpers/Extensions/ServiceCollectionExtensions.cs b/source/backend/dal/Helpers/Extensions/ServiceCollectionExtensions.cs
index 75511b9f23..267afba4f5 100644
--- a/source/backend/dal/Helpers/Extensions/ServiceCollectionExtensions.cs
+++ b/source/backend/dal/Helpers/Extensions/ServiceCollectionExtensions.cs
@@ -84,6 +84,7 @@ public static IServiceCollection AddPimsDalRepositories(this IServiceCollection
repositories.AddScoped();
repositories.AddScoped();
repositories.AddScoped();
+ repositories.AddScoped();
return repositories;
}
diff --git a/source/backend/dal/Repositories/AcquisitionFileRepository.cs b/source/backend/dal/Repositories/AcquisitionFileRepository.cs
index d04b8bfdfe..fbac91b873 100644
--- a/source/backend/dal/Repositories/AcquisitionFileRepository.cs
+++ b/source/backend/dal/Repositories/AcquisitionFileRepository.cs
@@ -690,9 +690,6 @@ public PimsAcquisitionFile Add(PimsAcquisitionFile acquisitionFile)
acquisitionFile.FileNoSuffix = nextSuffix;
}
- // TODO: Remove this once the FILE_NUMBER column gets removed. We need this here because it is not nullable
- acquisitionFile.FileNumber = string.Empty;
-
Context.PimsAcquisitionFiles.Add(acquisitionFile);
return acquisitionFile;
}
@@ -713,7 +710,6 @@ public PimsAcquisitionFile Update(PimsAcquisitionFile acquisitionFile)
// Make sure the frontend cannot override these auto-generated fields
acquisitionFile.FileNo = existingAcqFile.FileNo;
acquisitionFile.FileNoSuffix = existingAcqFile.FileNoSuffix;
- acquisitionFile.FileNumber = existingAcqFile.FileNumber; // TODO: Remove this once the FILE_NUMBER column gets removed. We need this here because it is not nullable
// PSP-9268 Changes to Project/Product on the main file need to be propagated to all sub-files
if (existingAcqFile.ProjectId != acquisitionFile.ProjectId || existingAcqFile.ProductId != acquisitionFile.ProductId)
@@ -804,7 +800,7 @@ public List GetAcquisitionSubFiles(long acquisitionFileId,
return Context.PimsAcquisitionFiles.AsNoTracking()
.Include(s => s.AcquisitionFileStatusTypeCodeNavigation)
- .Where(predicate).OrderBy(x => x.FileNumber).ToList();
+ .Where(predicate).OrderBy(x => x.FileNoSuffix).ToList();
}
///
diff --git a/source/backend/dal/Repositories/DocumentQueueRepository.cs b/source/backend/dal/Repositories/DocumentQueueRepository.cs
new file mode 100644
index 0000000000..fa2472427a
--- /dev/null
+++ b/source/backend/dal/Repositories/DocumentQueueRepository.cs
@@ -0,0 +1,100 @@
+using System.Collections.Generic;
+using System.Linq;
+using System.Security.Claims;
+using Microsoft.Extensions.Logging;
+using Pims.Core.Extensions;
+using Pims.Dal.Entities;
+using Pims.Dal.Entities.Models;
+
+namespace Pims.Dal.Repositories
+{
+ ///
+ /// DocumentQueueRepository class, provides a repository to interact with queued documents within the datasource.
+ ///
+ public class DocumentQueueRepository : BaseRepository, IDocumentQueueRepository
+ {
+ #region Constructors
+
+ ///
+ /// Creates a new instance of a DocumentQueueRepository, and initializes it with the specified arguments.
+ ///
+ ///
+ ///
+ ///
+ public DocumentQueueRepository(
+ PimsContext dbContext,
+ ClaimsPrincipal user,
+ ILogger logger)
+ : base(dbContext, user, logger)
+ {
+ }
+ #endregion
+
+ #region Methods
+
+ ///
+ /// Updates the queued document in the database.
+ ///
+ ///
+ ///
+ public PimsDocumentQueue Update(PimsDocumentQueue queuedDocument)
+ {
+ queuedDocument.ThrowIfNull(nameof(queuedDocument));
+
+ queuedDocument = Context.Update(queuedDocument).Entity;
+ return queuedDocument;
+ }
+
+ ///
+ /// Deletes the passed queued document from the database. Note, removing a queued document does not delete the imported document.
+ ///
+ ///
+ ///
+ public bool Delete(PimsDocumentQueue queuedDocument)
+ {
+ queuedDocument.ThrowIfNull(nameof(queuedDocument));
+
+ Context.Remove(queuedDocument);
+ return true;
+ }
+
+ ///
+ /// Return a list of documents, filtered by the specified arguments.
+ ///
+ ///
+ ///
+ public IEnumerable GetAllByFilter(DocumentQueueFilter filter)
+ {
+ var query = Context.PimsDocumentQueues.Where(q => true);
+
+ if (filter.DataSourceTypeCode != null)
+ {
+ query.Where(d => d.DataSourceTypeCode == filter.DataSourceTypeCode);
+ }
+ if (filter.DocumentQueueStatusTypeCode != null)
+ {
+ query.Where(d => d.DocumentQueueStatusTypeCode == filter.DocumentQueueStatusTypeCode);
+ }
+ if (filter.DocProcessStartDate != null)
+ {
+ query.Where(d => d.DocProcessStartDt >= filter.DocProcessStartDate);
+ }
+ if (filter.DocProcessEndDate != null)
+ {
+ query.Where(d => d.DocProcessEndDt <= filter.DocProcessEndDate);
+ }
+ return query.ToList();
+ }
+
+ public int DocumentQueueCount(PimsDocumentQueueStatusType pimsDocumentQueueStatusType)
+ {
+ if (pimsDocumentQueueStatusType == null)
+ {
+ Context.PimsDocumentQueues.Count();
+ }
+ return Context.PimsDocumentQueues.Count(d => d.DocumentQueueStatusTypeCode == pimsDocumentQueueStatusType.DocumentQueueStatusTypeCode);
+ }
+
+ #endregion
+ }
+}
diff --git a/source/backend/dal/Repositories/Interfaces/IDocumentQueueRepository.cs b/source/backend/dal/Repositories/Interfaces/IDocumentQueueRepository.cs
new file mode 100644
index 0000000000..45c14d19ac
--- /dev/null
+++ b/source/backend/dal/Repositories/Interfaces/IDocumentQueueRepository.cs
@@ -0,0 +1,20 @@
+using System.Collections.Generic;
+using Pims.Dal.Entities;
+using Pims.Dal.Entities.Models;
+
+namespace Pims.Dal.Repositories
+{
+ ///
+ /// IDocumentQueueRepository interface, provides functions to interact with queued documents within the datasource.
+ ///
+ public interface IDocumentQueueRepository : IRepository
+ {
+ IEnumerable GetAllByFilter(DocumentQueueFilter filter);
+
+ PimsDocumentQueue Update(PimsDocumentQueue queuedDocument);
+
+ bool Delete(PimsDocumentQueue queuedDocument);
+
+ int DocumentQueueCount(PimsDocumentQueueStatusType pimsDocumentQueueStatusType);
+ }
+}
diff --git a/source/backend/dal/Repositories/Interfaces/IPropertyRepository.cs b/source/backend/dal/Repositories/Interfaces/IPropertyRepository.cs
index 94bc79172b..7a021d3760 100644
--- a/source/backend/dal/Repositories/Interfaces/IPropertyRepository.cs
+++ b/source/backend/dal/Repositories/Interfaces/IPropertyRepository.cs
@@ -27,7 +27,7 @@ public interface IPropertyRepository : IRepository
long GetAllAssociationsCountById(long id);
- PimsProperty Update(PimsProperty property, bool overrideLocation = false);
+ PimsProperty Update(PimsProperty property, bool overrideLocation = false, bool allowRetired = false);
PimsProperty UpdatePropertyManagement(PimsProperty property);
diff --git a/source/backend/dal/Repositories/PropertyRepository.cs b/source/backend/dal/Repositories/PropertyRepository.cs
index 4b75be1251..50ec958dcb 100644
--- a/source/backend/dal/Repositories/PropertyRepository.cs
+++ b/source/backend/dal/Repositories/PropertyRepository.cs
@@ -310,7 +310,7 @@ public long GetAllAssociationsCountById(long id)
/// The property to update.
/// Whether to update the property spatial location with the incoming value. Defaults to false.
/// The updated property.
- public PimsProperty Update(PimsProperty property, bool overrideLocation = false)
+ public PimsProperty Update(PimsProperty property, bool overrideLocation = false, bool allowRetired = false)
{
property.ThrowIfNull(nameof(property));
@@ -320,7 +320,7 @@ public PimsProperty Update(PimsProperty property, bool overrideLocation = false)
.FirstOrDefault(p => p.PropertyId == propertyId) ?? throw new KeyNotFoundException();
// prevent editing on retired properties
- if (existingProperty.IsRetired.HasValue && existingProperty.IsRetired.Value)
+ if (existingProperty.IsRetired.HasValue && existingProperty.IsRetired.Value && !allowRetired)
{
throw new BusinessRuleViolationException("Retired records are referenced for historical purposes only and cannot be edited or deleted.");
}
diff --git a/source/backend/dal/Repositories/ResearchFilePropertyRepository.cs b/source/backend/dal/Repositories/ResearchFilePropertyRepository.cs
index ddee4e3d41..826356f900 100644
--- a/source/backend/dal/Repositories/ResearchFilePropertyRepository.cs
+++ b/source/backend/dal/Repositories/ResearchFilePropertyRepository.cs
@@ -73,11 +73,8 @@ public void Delete(PimsPropertyResearchFile propertyResearchFile)
public PimsPropertyResearchFile Update(PimsPropertyResearchFile propertyResearchFile)
{
- // Mark the property not to be changed it was being tracked.
- if (propertyResearchFile.Property != null)
- {
- Context.Entry(propertyResearchFile.Property).State = EntityState.Unchanged;
- }
+ // Do not allow this method to make any updates to the related property entity.
+ propertyResearchFile.Property = null;
// Retrieve the existing property research purpose types for the property
// Note: This is needed given the research file properties purpose types may not have the corresponging id, but corresponding code.
diff --git a/source/backend/dal/Repositories/UserRepository.cs b/source/backend/dal/Repositories/UserRepository.cs
index a280f14964..4e4902ffd1 100644
--- a/source/backend/dal/Repositories/UserRepository.cs
+++ b/source/backend/dal/Repositories/UserRepository.cs
@@ -458,9 +458,6 @@ public PimsUser UpdateWithoutSave(PimsUser update)
user.IssueDate = DateTime.UtcNow;
}
- user.ConcurrencyControlNumber = update.ConcurrencyControlNumber;
- this.Context.SetOriginalConcurrencyControlNumber(user);
-
var addRoles = update.PimsUserRoles.Except(user.PimsUserRoles, new UserRoleRoleIdComparer());
addRoles.ForEach(r => user.PimsUserRoles.Add(new PimsUserRole() { UserId = user.UserId, RoleId = r.RoleId }));
var removeRoles = user.PimsUserRoles.Except(update.PimsUserRoles, new UserRoleRoleIdComparer());
@@ -511,7 +508,6 @@ public void Delete(PimsUser delete)
.FirstOrDefault(u => u.UserId == delete.UserId) ?? throw new KeyNotFoundException();
user.ConcurrencyControlNumber = delete.ConcurrencyControlNumber;
- this.Context.SetOriginalConcurrencyControlNumber(user);
user.PimsUserRoles.ForEach(ur => this.Context.Remove(ur));
user.PimsUserRoles.Clear();
diff --git a/source/backend/dal/Services/BaseService.cs b/source/backend/dal/Services/BaseService.cs
deleted file mode 100644
index 920366878a..0000000000
--- a/source/backend/dal/Services/BaseService.cs
+++ /dev/null
@@ -1,52 +0,0 @@
-using System.Security.Claims;
-using Microsoft.Extensions.Logging;
-
-namespace Pims.Dal.Services
-{
- ///
- /// BaseService abstract class, provides a generic service layer to perform business logic.
- /// It can access the datastore via available repositories.
- ///
- public abstract class BaseService
- {
- #region Properties
-
- ///
- /// get - The current user accessing the service.
- ///
- protected ClaimsPrincipal User { get; }
-
- ///
- /// get - The logger.
- ///
- protected ILogger Logger { get; }
-
- #endregion
-
- #region Constructors
-
- ///
- /// Creates a new instance of a BaseService class, and initializes it with the specified arguments.
- ///
- ///
- ///
- protected BaseService(ClaimsPrincipal user, ILogger logger)
- {
- this.User = user;
- this.Logger = logger;
- }
- #endregion
-
- #region Methods
-
- ///
- /// Provides a way to fetch the user within the assembly.
- ///
- ///
- internal ClaimsPrincipal GetUser()
- {
- return this.User;
- }
- #endregion
- }
-}
diff --git a/source/backend/dal/Models/AccessRequestFilter.cs b/source/backend/entities/Models/AccessRequestFilter.cs
similarity index 100%
rename from source/backend/dal/Models/AccessRequestFilter.cs
rename to source/backend/entities/Models/AccessRequestFilter.cs
diff --git a/source/backend/dal/Models/AcquisitionFileExportModel.cs b/source/backend/entities/Models/AcquisitionFileExportModel.cs
similarity index 100%
rename from source/backend/dal/Models/AcquisitionFileExportModel.cs
rename to source/backend/entities/Models/AcquisitionFileExportModel.cs
diff --git a/source/backend/dal/Models/AcquisitionFilter.cs b/source/backend/entities/Models/AcquisitionFilter.cs
similarity index 100%
rename from source/backend/dal/Models/AcquisitionFilter.cs
rename to source/backend/entities/Models/AcquisitionFilter.cs
diff --git a/source/backend/dal/Models/AcquisitionReportFilterModel.cs b/source/backend/entities/Models/AcquisitionReportFilterModel.cs
similarity index 100%
rename from source/backend/dal/Models/AcquisitionReportFilterModel.cs
rename to source/backend/entities/Models/AcquisitionReportFilterModel.cs
diff --git a/source/backend/dal/Models/AutocompletionRequestModel.cs b/source/backend/entities/Models/AutocompletionRequestModel.cs
similarity index 100%
rename from source/backend/dal/Models/AutocompletionRequestModel.cs
rename to source/backend/entities/Models/AutocompletionRequestModel.cs
diff --git a/source/backend/dal/Models/ContactFilter.cs b/source/backend/entities/Models/ContactFilter.cs
similarity index 100%
rename from source/backend/dal/Models/ContactFilter.cs
rename to source/backend/entities/Models/ContactFilter.cs
diff --git a/source/backend/dal/Models/DispositionFileExportModel.cs b/source/backend/entities/Models/DispositionFileExportModel.cs
similarity index 100%
rename from source/backend/dal/Models/DispositionFileExportModel.cs
rename to source/backend/entities/Models/DispositionFileExportModel.cs
diff --git a/source/backend/dal/Models/DispositionFilter.cs b/source/backend/entities/Models/DispositionFilter.cs
similarity index 100%
rename from source/backend/dal/Models/DispositionFilter.cs
rename to source/backend/entities/Models/DispositionFilter.cs
diff --git a/source/backend/entities/Models/DocumentQueueFilter.cs b/source/backend/entities/Models/DocumentQueueFilter.cs
new file mode 100644
index 0000000000..c9d43c5e61
--- /dev/null
+++ b/source/backend/entities/Models/DocumentQueueFilter.cs
@@ -0,0 +1,42 @@
+using System;
+
+namespace Pims.Dal.Entities.Models
+{
+ public class DocumentQueueFilter : PageFilter
+ {
+ #region Properties
+
+ ///
+ /// get/set - The source system for this document.
+ ///
+ public string DataSourceTypeCode { get; set; }
+
+ ///
+ /// get/set - The status of the document in the queue, such as 'Pending'.
+ ///
+ public string DocumentQueueStatusTypeCode { get; set; }
+
+ ///
+ /// get/set - The date/time that processing of the document started.
+ ///
+ public DateTime? DocProcessStartDate { get; set; }
+
+ ///
+ /// get/set - The date/time that processing of the document ended.
+ ///
+ public DateTime? DocProcessEndDate { get; set; }
+
+ #endregion
+
+ #region Constructors
+
+ ///
+ /// Creates a new instance of a DocumentQueueFilter class.
+ ///
+ public DocumentQueueFilter()
+ {
+ }
+
+ #endregion
+ }
+}
diff --git a/source/backend/dal/Models/EnvironmentModel.cs b/source/backend/entities/Models/EnvironmentModel.cs
similarity index 100%
rename from source/backend/dal/Models/EnvironmentModel.cs
rename to source/backend/entities/Models/EnvironmentModel.cs
diff --git a/source/backend/dal/Models/LastUpdateBy.cs b/source/backend/entities/Models/LastUpdateBy.cs
similarity index 100%
rename from source/backend/dal/Models/LastUpdateBy.cs
rename to source/backend/entities/Models/LastUpdateBy.cs
diff --git a/source/backend/dal/Models/LeaseFilter.cs b/source/backend/entities/Models/LeaseFilter.cs
similarity index 100%
rename from source/backend/dal/Models/LeaseFilter.cs
rename to source/backend/entities/Models/LeaseFilter.cs
diff --git a/source/backend/dal/Models/OrganizationFilter.cs b/source/backend/entities/Models/OrganizationFilter.cs
similarity index 100%
rename from source/backend/dal/Models/OrganizationFilter.cs
rename to source/backend/entities/Models/OrganizationFilter.cs
diff --git a/source/backend/dal/Models/PageFilter.cs b/source/backend/entities/Models/PageFilter.cs
similarity index 100%
rename from source/backend/dal/Models/PageFilter.cs
rename to source/backend/entities/Models/PageFilter.cs
diff --git a/source/backend/dal/Models/Paged{TModel}.cs b/source/backend/entities/Models/Paged{TModel}.cs
similarity index 100%
rename from source/backend/dal/Models/Paged{TModel}.cs
rename to source/backend/entities/Models/Paged{TModel}.cs
diff --git a/source/backend/dal/Models/ProjectFilter.cs b/source/backend/entities/Models/ProjectFilter.cs
similarity index 100%
rename from source/backend/dal/Models/ProjectFilter.cs
rename to source/backend/entities/Models/ProjectFilter.cs
diff --git a/source/backend/dal/Models/PropertyFilter.cs b/source/backend/entities/Models/PropertyFilter.cs
similarity index 100%
rename from source/backend/dal/Models/PropertyFilter.cs
rename to source/backend/entities/Models/PropertyFilter.cs
diff --git a/source/backend/dal/Models/PropertyFilterCriteria.cs b/source/backend/entities/Models/PropertyFilterCriteria.cs
similarity index 100%
rename from source/backend/dal/Models/PropertyFilterCriteria.cs
rename to source/backend/entities/Models/PropertyFilterCriteria.cs
diff --git a/source/backend/dal/Models/PropertyOwnershipState.cs b/source/backend/entities/Models/PropertyOwnershipState.cs
similarity index 100%
rename from source/backend/dal/Models/PropertyOwnershipState.cs
rename to source/backend/entities/Models/PropertyOwnershipState.cs
diff --git a/source/backend/dal/Models/ResearchFilter.cs b/source/backend/entities/Models/ResearchFilter.cs
similarity index 100%
rename from source/backend/dal/Models/ResearchFilter.cs
rename to source/backend/entities/Models/ResearchFilter.cs
diff --git a/source/backend/dal/Models/UserFilter.cs b/source/backend/entities/Models/UserFilter.cs
similarity index 100%
rename from source/backend/dal/Models/UserFilter.cs
rename to source/backend/entities/Models/UserFilter.cs
diff --git a/source/backend/entities/Partials/DocumentQueue.cs b/source/backend/entities/Partials/DocumentQueue.cs
new file mode 100644
index 0000000000..5ddae49be5
--- /dev/null
+++ b/source/backend/entities/Partials/DocumentQueue.cs
@@ -0,0 +1,15 @@
+using System.ComponentModel.DataAnnotations.Schema;
+
+namespace Pims.Dal.Entities
+{
+ ///
+ /// PimsDocumentQueue class, provides an entity for the datamodel to manage document entities.
+ ///
+ public partial class PimsDocumentQueue : StandardIdentityBaseAppEntity, IBaseAppEntity
+ {
+ #region Properties
+ [NotMapped]
+ public override long Internal_Id { get => this.DocumentQueueId; set => this.DocumentQueueId = value; }
+ #endregion
+ }
+}
diff --git a/source/backend/entities/PimsBaseContext.cs b/source/backend/entities/PimsBaseContext.cs
index e410ff3a52..75a183c72b 100644
--- a/source/backend/entities/PimsBaseContext.cs
+++ b/source/backend/entities/PimsBaseContext.cs
@@ -949,7 +949,6 @@ protected override void OnModelCreating(ModelBuilder modelBuilder)
entity.Property(e => e.FileNoSuffix)
.HasDefaultValue((short)1)
.HasComment("Acquisition file number suffix");
- entity.Property(e => e.FileNumber).HasComment("Formatted file number assigned to the acquisition file. Format follows YY-XXXXXX-ZZ where YY = MoTI region number, XXXXXX = generated integer sequence number, and ZZ = file suffix number (defaulting to '01')");
entity.Property(e => e.FundingOther).HasComment("Description of other funding type.");
entity.Property(e => e.LegacyFileNumber).HasComment("Legacy formatted file number assigned to the acquisition file. Format follows YY-XXXXXX-ZZ where YY = MoTI region number, XXXXXX = generated integer sequence number, and ZZ = file suffix number (defaulting to '01'). Required due to some files having t");
entity.Property(e => e.LegacyStakeholder).HasComment("Legacy stakeholders imported from PAIMS.");
@@ -1691,22 +1690,50 @@ protected override void OnModelCreating(ModelBuilder modelBuilder)
tb.HasTrigger("PIMS_CMPREQ_I_S_U_TR");
});
- entity.Property(e => e.CompensationRequisitionId).HasDefaultValueSql("(NEXT VALUE FOR [PIMS_COMPENSATION_REQUISITION_ID_SEQ])");
+ entity.Property(e => e.CompensationRequisitionId)
+ .HasDefaultValueSql("(NEXT VALUE FOR [PIMS_COMPENSATION_REQUISITION_ID_SEQ])")
+ .HasComment("Generated surrogate primary key.");
entity.Property(e => e.AcquisitionFileId).HasComment("Foreign key to the PIMS_ACQUISITION_FILE table.");
- entity.Property(e => e.AdvPmtServedDt).HasComment("Date that the advanced payment was made.");
+ entity.Property(e => e.AcquisitionFileTeamId).HasComment("Foreign key to the PIMS_ACQUISITION_FILE_TEAM table.");
+ entity.Property(e => e.AcquisitionOwnerId).HasComment("Foreign key to the PIMS_ACQUISITION_OWNER table.");
entity.Property(e => e.AgreementDt).HasComment("Agreement date.");
entity.Property(e => e.AlternateProjectId).HasComment("Link a file to an \"Alternate Project\", so the user can make alternate payments that may be due after the original file's project closes.");
- entity.Property(e => e.AppCreateTimestamp).HasDefaultValueSql("(getutcdate())");
- entity.Property(e => e.AppCreateUserDirectory).HasDefaultValueSql("(user_name())");
- entity.Property(e => e.AppCreateUserid).HasDefaultValueSql("(user_name())");
- entity.Property(e => e.AppLastUpdateTimestamp).HasDefaultValueSql("(getutcdate())");
- entity.Property(e => e.AppLastUpdateUserDirectory).HasDefaultValueSql("(user_name())");
- entity.Property(e => e.AppLastUpdateUserid).HasDefaultValueSql("(user_name())");
- entity.Property(e => e.ConcurrencyControlNumber).HasDefaultValue(1L);
- entity.Property(e => e.DbCreateTimestamp).HasDefaultValueSql("(getutcdate())");
- entity.Property(e => e.DbCreateUserid).HasDefaultValueSql("(user_name())");
- entity.Property(e => e.DbLastUpdateTimestamp).HasDefaultValueSql("(getutcdate())");
- entity.Property(e => e.DbLastUpdateUserid).HasDefaultValueSql("(user_name())");
+ entity.Property(e => e.AppCreateTimestamp)
+ .HasDefaultValueSql("(getutcdate())")
+ .HasComment("The date and time the user created the record.");
+ entity.Property(e => e.AppCreateUserDirectory)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The directory of the user account that created the record.");
+ entity.Property(e => e.AppCreateUserGuid).HasComment("The GUID of the user account that created the record.");
+ entity.Property(e => e.AppCreateUserid)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The user account that created the record.");
+ entity.Property(e => e.AppLastUpdateTimestamp)
+ .HasDefaultValueSql("(getutcdate())")
+ .HasComment("The date and time the user updated the record.");
+ entity.Property(e => e.AppLastUpdateUserDirectory)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The directory of the user account that updated the record.");
+ entity.Property(e => e.AppLastUpdateUserGuid).HasComment("The GUID of the user account that updated the record.");
+ entity.Property(e => e.AppLastUpdateUserid)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The user account that updated the record.");
+ entity.Property(e => e.ChartOfAccountsId).HasComment("Foreign key to the PIMS_CHART_OF_ACCOUNTS table.");
+ entity.Property(e => e.ConcurrencyControlNumber)
+ .HasDefaultValue(1L)
+ .HasComment("Application code is responsible for retrieving the row and then incrementing the value of the CONCURRENCY_CONTROL_NUMBER column by one prior to issuing an update. If this is done then the update will succeed, provided that the row was not updated by any o");
+ entity.Property(e => e.DbCreateTimestamp)
+ .HasDefaultValueSql("(getutcdate())")
+ .HasComment("The date and time the record was created.");
+ entity.Property(e => e.DbCreateUserid)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The user or proxy account that created the record.");
+ entity.Property(e => e.DbLastUpdateTimestamp)
+ .HasDefaultValueSql("(getutcdate())")
+ .HasComment("The date and time the record was created or last updated.");
+ entity.Property(e => e.DbLastUpdateUserid)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The user or proxy account that created or last updated the record.");
entity.Property(e => e.DetailedRemarks).HasComment("Detailed remarks for the compensation requisition.");
entity.Property(e => e.ExpropNoticeServedDt).HasComment("Expropriation notice served date.");
entity.Property(e => e.ExpropVestingDt).HasComment("Expropriation vesting date.");
@@ -1714,6 +1741,7 @@ protected override void OnModelCreating(ModelBuilder modelBuilder)
entity.Property(e => e.FiscalYear).HasComment("Fiscal year of the compensation requisition.");
entity.Property(e => e.GenerationDt).HasComment("Document generation date.");
entity.Property(e => e.GstNumber).HasComment("GST number of the organization receiving the payment.");
+ entity.Property(e => e.InterestHolderId).HasComment("Foreign key to the PIMS_INTEREST_HOLDER table.");
entity.Property(e => e.IsDisabled)
.HasDefaultValue(false)
.HasComment("Indicates if the requisition is inactive.");
@@ -1725,7 +1753,9 @@ protected override void OnModelCreating(ModelBuilder modelBuilder)
.HasComment("Indicates if the payment was made in trust.");
entity.Property(e => e.LeaseId).HasComment("Foreign key to the PIMS_LEASE table.");
entity.Property(e => e.LegacyPayee).HasComment("Payee where only the name is known from the PAIMS system,");
+ entity.Property(e => e.ResponsibilityId).HasComment("Foreign key to the PIMS_RESPONSIBILITY table.");
entity.Property(e => e.SpecialInstruction).HasComment("Special instructions for the compensation requisition.");
+ entity.Property(e => e.YearlyFinancialId).HasComment("Foreign key to the PIMS_YEARLY_FINANCIAL table.");
entity.HasOne(d => d.AcquisitionFile).WithMany(p => p.PimsCompensationRequisitions).HasConstraintName("PIM_ACQNFL_PIM_CMPREQ_FK");
@@ -3668,19 +3698,50 @@ protected override void OnModelCreating(ModelBuilder modelBuilder)
tb.HasTrigger("PIMS_EXPPMT_I_S_U_TR");
});
- entity.Property(e => e.ExpropriationPaymentId).HasDefaultValueSql("(NEXT VALUE FOR [PIMS_EXPROPRIATION_PAYMENT_ID_SEQ])");
- entity.Property(e => e.AppCreateTimestamp).HasDefaultValueSql("(getutcdate())");
- entity.Property(e => e.AppCreateUserDirectory).HasDefaultValueSql("(user_name())");
- entity.Property(e => e.AppCreateUserid).HasDefaultValueSql("(user_name())");
- entity.Property(e => e.AppLastUpdateTimestamp).HasDefaultValueSql("(getutcdate())");
- entity.Property(e => e.AppLastUpdateUserDirectory).HasDefaultValueSql("(user_name())");
- entity.Property(e => e.AppLastUpdateUserid).HasDefaultValueSql("(user_name())");
- entity.Property(e => e.ConcurrencyControlNumber).HasDefaultValue(1L);
- entity.Property(e => e.DbCreateTimestamp).HasDefaultValueSql("(getutcdate())");
- entity.Property(e => e.DbCreateUserid).HasDefaultValueSql("(user_name())");
- entity.Property(e => e.DbLastUpdateTimestamp).HasDefaultValueSql("(getutcdate())");
- entity.Property(e => e.DbLastUpdateUserid).HasDefaultValueSql("(user_name())");
+ entity.Property(e => e.ExpropriationPaymentId)
+ .HasDefaultValueSql("(NEXT VALUE FOR [PIMS_EXPROPRIATION_PAYMENT_ID_SEQ])")
+ .HasComment("Unique auto-generated surrogate primary key");
+ entity.Property(e => e.AcquisitionFileId).HasComment("Foreign key of the acquisition file.");
+ entity.Property(e => e.AcquisitionOwnerId).HasComment("Foreign key of the acquisition owner.");
+ entity.Property(e => e.AdvPmtServedDt).HasComment("Date that the advanced payment was made.");
+ entity.Property(e => e.AppCreateTimestamp)
+ .HasDefaultValueSql("(getutcdate())")
+ .HasComment("The date and time the user created the record.");
+ entity.Property(e => e.AppCreateUserDirectory)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The directory of the user account that created the record.");
+ entity.Property(e => e.AppCreateUserGuid).HasComment("The GUID of the user account that created the record.");
+ entity.Property(e => e.AppCreateUserid)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The user account that created the record.");
+ entity.Property(e => e.AppLastUpdateTimestamp)
+ .HasDefaultValueSql("(getutcdate())")
+ .HasComment("The date and time the user updated the record.");
+ entity.Property(e => e.AppLastUpdateUserDirectory)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The directory of the user account that updated the record.");
+ entity.Property(e => e.AppLastUpdateUserGuid).HasComment("The GUID of the user account that updated the record.");
+ entity.Property(e => e.AppLastUpdateUserid)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The user account that updated the record.");
+ entity.Property(e => e.ConcurrencyControlNumber)
+ .HasDefaultValue(1L)
+ .HasComment("Application code is responsible for retrieving the row and then incrementing the value of the CONCURRENCY_CONTROL_NUMBER column by one prior to issuing an update. If this is done then the update will succeed, provided that the row was not updated by any o");
+ entity.Property(e => e.DbCreateTimestamp)
+ .HasDefaultValueSql("(getutcdate())")
+ .HasComment("The date and time the record was created.");
+ entity.Property(e => e.DbCreateUserid)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The user or proxy account that created the record.");
+ entity.Property(e => e.DbLastUpdateTimestamp)
+ .HasDefaultValueSql("(getutcdate())")
+ .HasComment("The date and time the record was created or last updated.");
+ entity.Property(e => e.DbLastUpdateUserid)
+ .HasDefaultValueSql("(user_name())")
+ .HasComment("The user or proxy account that created or last updated the record.");
entity.Property(e => e.Description).HasComment("Form 8 description field. There are lawyer remarks pending. This field could be used for: - providing remarks particular to an expropriation form, and /or - for any ETL descriptive fields as well as - a place-holder forfields that do not have a mapping");
+ entity.Property(e => e.ExpropriatingAuthority).HasComment("Foreign key of the expropriating authoritry.");
+ entity.Property(e => e.InterestHolderId).HasComment("Foreign key of the acquisition interest holder.");
entity.Property(e => e.IsDisabled)
.HasDefaultValue(false)
.HasComment("Indicates if the Form 8 payment is inactive.");
diff --git a/source/backend/entities/ef/PimsAcquisitionFile.cs b/source/backend/entities/ef/PimsAcquisitionFile.cs
index 816a8034e8..cc2ce826af 100644
--- a/source/backend/entities/ef/PimsAcquisitionFile.cs
+++ b/source/backend/entities/ef/PimsAcquisitionFile.cs
@@ -111,14 +111,6 @@ public partial class PimsAcquisitionFile
[Column("FILE_NO_SUFFIX")]
public short FileNoSuffix { get; set; }
- ///
- /// Formatted file number assigned to the acquisition file. Format follows YY-XXXXXX-ZZ where YY = MoTI region number, XXXXXX = generated integer sequence number, and ZZ = file suffix number (defaulting to '01')
- ///
- [Required]
- [Column("FILE_NUMBER")]
- [StringLength(18)]
- public string FileNumber { get; set; }
-
///
/// Legacy formatted file number assigned to the acquisition file. Format follows YY-XXXXXX-ZZ where YY = MoTI region number, XXXXXX = generated integer sequence number, and ZZ = file suffix number (defaulting to '01'). Required due to some files having t
///
diff --git a/source/backend/entities/ef/PimsAcquisitionFileHist.cs b/source/backend/entities/ef/PimsAcquisitionFileHist.cs
index 53106922fd..4240521d63 100644
--- a/source/backend/entities/ef/PimsAcquisitionFileHist.cs
+++ b/source/backend/entities/ef/PimsAcquisitionFileHist.cs
@@ -68,11 +68,6 @@ public partial class PimsAcquisitionFileHist
[Column("FILE_NO_SUFFIX")]
public short FileNoSuffix { get; set; }
- [Required]
- [Column("FILE_NUMBER")]
- [StringLength(18)]
- public string FileNumber { get; set; }
-
[Column("LEGACY_FILE_NUMBER")]
[StringLength(18)]
public string LegacyFileNumber { get; set; }
@@ -160,4 +155,8 @@ public partial class PimsAcquisitionFileHist
[Column("COMPLETION_DATE", TypeName = "datetime")]
public DateTime? CompletionDate { get; set; }
+
+ [Column("FILE_NUMBER")]
+ [StringLength(18)]
+ public string FileNumber { get; set; }
}
diff --git a/source/backend/entities/ef/PimsCompensationRequisition.cs b/source/backend/entities/ef/PimsCompensationRequisition.cs
index f836085244..82b6d786c1 100644
--- a/source/backend/entities/ef/PimsCompensationRequisition.cs
+++ b/source/backend/entities/ef/PimsCompensationRequisition.cs
@@ -21,6 +21,9 @@ namespace Pims.Dal.Entities;
[Index("YearlyFinancialId", Name = "CMPREQ_YEARLY_FINANCIAL_ID_IDX")]
public partial class PimsCompensationRequisition
{
+ ///
+ /// Generated surrogate primary key.
+ ///
[Key]
[Column("COMPENSATION_REQUISITION_ID")]
public long CompensationRequisitionId { get; set; }
@@ -37,21 +40,39 @@ public partial class PimsCompensationRequisition
[Column("LEASE_ID")]
public long? LeaseId { get; set; }
+ ///
+ /// Foreign key to the PIMS_ACQUISITION_OWNER table.
+ ///
[Column("ACQUISITION_OWNER_ID")]
public long? AcquisitionOwnerId { get; set; }
+ ///
+ /// Foreign key to the PIMS_INTEREST_HOLDER table.
+ ///
[Column("INTEREST_HOLDER_ID")]
public long? InterestHolderId { get; set; }
+ ///
+ /// Foreign key to the PIMS_ACQUISITION_FILE_TEAM table.
+ ///
[Column("ACQUISITION_FILE_TEAM_ID")]
public long? AcquisitionFileTeamId { get; set; }
+ ///
+ /// Foreign key to the PIMS_CHART_OF_ACCOUNTS table.
+ ///
[Column("CHART_OF_ACCOUNTS_ID")]
public long? ChartOfAccountsId { get; set; }
+ ///
+ /// Foreign key to the PIMS_RESPONSIBILITY table.
+ ///
[Column("RESPONSIBILITY_ID")]
public long? ResponsibilityId { get; set; }
+ ///
+ /// Foreign key to the PIMS_YEARLY_FINANCIAL table.
+ ///
[Column("YEARLY_FINANCIAL_ID")]
public long? YearlyFinancialId { get; set; }
@@ -124,12 +145,6 @@ public partial class PimsCompensationRequisition
[Column("FINALIZED_DATE")]
public DateOnly? FinalizedDate { get; set; }
- ///
- /// Date that the advanced payment was made.
- ///
- [Column("ADV_PMT_SERVED_DT")]
- public DateOnly? AdvPmtServedDt { get; set; }
-
///
/// Special instructions for the compensation requisition.
///
@@ -150,52 +165,91 @@ public partial class PimsCompensationRequisition
[Column("IS_DISABLED")]
public bool? IsDisabled { get; set; }
+ ///
+ /// Application code is responsible for retrieving the row and then incrementing the value of the CONCURRENCY_CONTROL_NUMBER column by one prior to issuing an update. If this is done then the update will succeed, provided that the row was not updated by any o
+ ///
[Column("CONCURRENCY_CONTROL_NUMBER")]
public long ConcurrencyControlNumber { get; set; }
+ ///
+ /// The date and time the user created the record.
+ ///
[Column("APP_CREATE_TIMESTAMP", TypeName = "datetime")]
public DateTime AppCreateTimestamp { get; set; }
+ ///
+ /// The user account that created the record.
+ ///
[Required]
[Column("APP_CREATE_USERID")]
[StringLength(30)]
public string AppCreateUserid { get; set; }
+ ///
+ /// The GUID of the user account that created the record.
+ ///
[Column("APP_CREATE_USER_GUID")]
public Guid? AppCreateUserGuid { get; set; }
+ ///
+ /// The directory of the user account that created the record.
+ ///
[Required]
[Column("APP_CREATE_USER_DIRECTORY")]
[StringLength(30)]
public string AppCreateUserDirectory { get; set; }
+ ///
+ /// The date and time the user updated the record.
+ ///
[Column("APP_LAST_UPDATE_TIMESTAMP", TypeName = "datetime")]
public DateTime AppLastUpdateTimestamp { get; set; }
+ ///
+ /// The user account that updated the record.
+ ///
[Required]
[Column("APP_LAST_UPDATE_USERID")]
[StringLength(30)]
public string AppLastUpdateUserid { get; set; }
+ ///
+ /// The GUID of the user account that updated the record.
+ ///
[Column("APP_LAST_UPDATE_USER_GUID")]
public Guid? AppLastUpdateUserGuid { get; set; }
+ ///
+ /// The directory of the user account that updated the record.
+ ///
[Required]
[Column("APP_LAST_UPDATE_USER_DIRECTORY")]
[StringLength(30)]
public string AppLastUpdateUserDirectory { get; set; }
+ ///
+ /// The date and time the record was created.
+ ///
[Column("DB_CREATE_TIMESTAMP", TypeName = "datetime")]
public DateTime DbCreateTimestamp { get; set; }
+ ///
+ /// The user or proxy account that created the record.
+ ///
[Required]
[Column("DB_CREATE_USERID")]
[StringLength(30)]
public string DbCreateUserid { get; set; }
+ ///
+ /// The date and time the record was created or last updated.
+ ///
[Column("DB_LAST_UPDATE_TIMESTAMP", TypeName = "datetime")]
public DateTime DbLastUpdateTimestamp { get; set; }
+ ///
+ /// The user or proxy account that created or last updated the record.
+ ///
[Required]
[Column("DB_LAST_UPDATE_USERID")]
[StringLength(30)]
diff --git a/source/backend/entities/ef/PimsCompensationRequisitionHist.cs b/source/backend/entities/ef/PimsCompensationRequisitionHist.cs
index 1f4ff9a63b..062edaade3 100644
--- a/source/backend/entities/ef/PimsCompensationRequisitionHist.cs
+++ b/source/backend/entities/ef/PimsCompensationRequisitionHist.cs
@@ -83,9 +83,6 @@ public partial class PimsCompensationRequisitionHist
[Column("FINALIZED_DATE")]
public DateOnly? FinalizedDate { get; set; }
- [Column("ADV_PMT_SERVED_DT")]
- public DateOnly? AdvPmtServedDt { get; set; }
-
[Column("SPECIAL_INSTRUCTION")]
[StringLength(2000)]
public string SpecialInstruction { get; set; }
@@ -147,4 +144,7 @@ public partial class PimsCompensationRequisitionHist
[Column("DB_LAST_UPDATE_USERID")]
[StringLength(30)]
public string DbLastUpdateUserid { get; set; }
+
+ [Column("ADV_PMT_SERVED_DT")]
+ public DateOnly? AdvPmtServedDt { get; set; }
}
diff --git a/source/backend/entities/ef/PimsExpropriationPayment.cs b/source/backend/entities/ef/PimsExpropriationPayment.cs
index 6ed1ee32fe..bd5fd68ca3 100644
--- a/source/backend/entities/ef/PimsExpropriationPayment.cs
+++ b/source/backend/entities/ef/PimsExpropriationPayment.cs
@@ -16,19 +16,34 @@ namespace Pims.Dal.Entities;
[Index("InterestHolderId", Name = "EXPPMT_INTEREST_HOLDER_ID_IDX")]
public partial class PimsExpropriationPayment
{
+ ///
+ /// Unique auto-generated surrogate primary key
+ ///
[Key]
[Column("EXPROPRIATION_PAYMENT_ID")]
public long ExpropriationPaymentId { get; set; }
+ ///
+ /// Foreign key of the acquisition file.
+ ///
[Column("ACQUISITION_FILE_ID")]
public long AcquisitionFileId { get; set; }
+ ///
+ /// Foreign key of the acquisition owner.
+ ///
[Column("ACQUISITION_OWNER_ID")]
public long? AcquisitionOwnerId { get; set; }
+ ///
+ /// Foreign key of the acquisition interest holder.
+ ///
[Column("INTEREST_HOLDER_ID")]
public long? InterestHolderId { get; set; }
+ ///
+ /// Foreign key of the expropriating authoritry.
+ ///
[Column("EXPROPRIATING_AUTHORITY")]
public long? ExpropriatingAuthority { get; set; }
@@ -39,58 +54,103 @@ public partial class PimsExpropriationPayment
[StringLength(2000)]
public string Description { get; set; }
+ ///
+ /// Date that the advanced payment was made.
+ ///
+ [Column("ADV_PMT_SERVED_DT")]
+ public DateOnly? AdvPmtServedDt { get; set; }
+
///
/// Indicates if the Form 8 payment is inactive.
///
[Column("IS_DISABLED")]
public bool? IsDisabled { get; set; }
+ ///
+ /// Application code is responsible for retrieving the row and then incrementing the value of the CONCURRENCY_CONTROL_NUMBER column by one prior to issuing an update. If this is done then the update will succeed, provided that the row was not updated by any o
+ ///
[Column("CONCURRENCY_CONTROL_NUMBER")]
public long ConcurrencyControlNumber { get; set; }
+ ///
+ /// The date and time the user created the record.
+ ///
[Column("APP_CREATE_TIMESTAMP", TypeName = "datetime")]
public DateTime AppCreateTimestamp { get; set; }
+ ///
+ /// The user account that created the record.
+ ///
[Required]
[Column("APP_CREATE_USERID")]
[StringLength(30)]
public string AppCreateUserid { get; set; }
+ ///
+ /// The GUID of the user account that created the record.
+ ///
[Column("APP_CREATE_USER_GUID")]
public Guid? AppCreateUserGuid { get; set; }
+ ///
+ /// The directory of the user account that created the record.
+ ///
[Required]
[Column("APP_CREATE_USER_DIRECTORY")]
[StringLength(30)]
public string AppCreateUserDirectory { get; set; }
+ ///
+ /// The date and time the user updated the record.
+ ///
[Column("APP_LAST_UPDATE_TIMESTAMP", TypeName = "datetime")]
public DateTime AppLastUpdateTimestamp { get; set; }
+ ///
+ /// The user account that updated the record.
+ ///
[Required]
[Column("APP_LAST_UPDATE_USERID")]
[StringLength(30)]
public string AppLastUpdateUserid { get; set; }
+ ///
+ /// The GUID of the user account that updated the record.
+ ///
[Column("APP_LAST_UPDATE_USER_GUID")]
public Guid? AppLastUpdateUserGuid { get; set; }
+ ///
+ /// The directory of the user account that updated the record.
+ ///
[Required]
[Column("APP_LAST_UPDATE_USER_DIRECTORY")]
[StringLength(30)]
public string AppLastUpdateUserDirectory { get; set; }
+ ///
+ /// The date and time the record was created.
+ ///
[Column("DB_CREATE_TIMESTAMP", TypeName = "datetime")]
public DateTime DbCreateTimestamp { get; set; }
+ ///
+ /// The user or proxy account that created the record.
+ ///
[Required]
[Column("DB_CREATE_USERID")]
[StringLength(30)]
public string DbCreateUserid { get; set; }
+ ///
+ /// The date and time the record was created or last updated.
+ ///
[Column("DB_LAST_UPDATE_TIMESTAMP", TypeName = "datetime")]
public DateTime DbLastUpdateTimestamp { get; set; }
+ ///
+ /// The user or proxy account that created or last updated the record.
+ ///
[Required]
[Column("DB_LAST_UPDATE_USERID")]
[StringLength(30)]
diff --git a/source/backend/entities/ef/PimsExpropriationPaymentHist.cs b/source/backend/entities/ef/PimsExpropriationPaymentHist.cs
index 876b927188..b0d204b4c3 100644
--- a/source/backend/entities/ef/PimsExpropriationPaymentHist.cs
+++ b/source/backend/entities/ef/PimsExpropriationPaymentHist.cs
@@ -39,6 +39,9 @@ public partial class PimsExpropriationPaymentHist
[StringLength(2000)]
public string Description { get; set; }
+ [Column("ADV_PMT_SERVED_DT")]
+ public DateOnly? AdvPmtServedDt { get; set; }
+
[Column("IS_DISABLED")]
public bool? IsDisabled { get; set; }
diff --git a/source/backend/entrypoint.scheduler.sh b/source/backend/entrypoint.scheduler.sh
new file mode 100644
index 0000000000..21a3da7bef
--- /dev/null
+++ b/source/backend/entrypoint.scheduler.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+# export MAIL_SERVER_URL=`route -n | grep "UG" | grep -v "UGH" | cut -f 10 -d " "`
+# export MAIL_SERVER_PORT=1025
+dotnet Pims.Scheduler.dll
diff --git a/source/backend/proxy/README.md b/source/backend/proxy/README.md
index 3875eae4b6..5146d9580a 100644
--- a/source/backend/proxy/README.md
+++ b/source/backend/proxy/README.md
@@ -1,10 +1,6 @@
-# PIMS RESTful API - .NET CORE
+# PIMS PROXY API
-The PIMS API provides an RESTful interface to interact with the configured data-source.
-
-The API is configured to run in a Docker container and has the following dependencies with other containers; database, keycloak.
-
-For more information refer to documentation [here](https://github.com/bcgov/PSP/wiki/api/API.md).
+The PROXY API provides a keycloak-authenticated passthrough to the PIMS geoserver instance (via service account credentials).
To run the API locally you will need to create the appropriate environment variable `.env` files. You can do this through using the prebuilt scripts [here](../../scripts/README.md).
@@ -28,27 +24,3 @@ ASPNETCORE_ENVIRONMENT=Development
ASPNETCORE_URLS=http://*:8080
ASPNETCORE_FORWARDEDHEADERS_ENABLED=true
```
-
-## Running Locally
-
-to run the API locally with vscode, comment out the following lines, and add the `ConnectionStrings__PIMS` value in your `.env` file;
-
-```conf
-# ASPNETCORE_ENVIRONMENT=Development
-# ASPNETCORE_URLS=http://*:8080
-```
-
-This is so that the `/.vscode/launch.json` configured environment variables are used instead. Specifically it will run with the following;
-
-```json
-{
- "configurations": [{
- ...
- "env": {
- "ASPNETCORE_ENVIRONMENT": "Local",
- "ASPNETCORE_URLS": "http://*:5002"
- }
- ...
- }]
-}
-```
diff --git a/source/backend/scheduler/.dockerignore b/source/backend/scheduler/.dockerignore
new file mode 100644
index 0000000000..7ed9d732a6
--- /dev/null
+++ b/source/backend/scheduler/.dockerignore
@@ -0,0 +1,18 @@
+.vs/
+.env
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+build/
+bld/
+**/[Bb]in/
+**/[Oo]bj/
+**/[Oo]ut/
+msbuild.log
+msbuild.err
+msbuild.wrn
diff --git a/source/backend/scheduler/.editorconfig b/source/backend/scheduler/.editorconfig
new file mode 100644
index 0000000000..01b3182133
--- /dev/null
+++ b/source/backend/scheduler/.editorconfig
@@ -0,0 +1,257 @@
+# Editor configuration, see https://editorconfig.org
+root = true
+
+[*]
+end_of_line = lf
+charset = utf-8
+indent_style = space
+insert_final_newline = true
+trim_trailing_whitespace = true
+
+[*.ts]
+indent_size = 2
+
+[*.md]
+max_line_length = off
+trim_trailing_whitespace = false
+
+[*.env]
+insert_final_newline = false
+
+[{Makefile,**.mk}]
+# Use tabs for indentation (Makefiles require tabs)
+indent_style = tab
+
+[*.cs]
+indent_size = 4
+
+# IDE0005: Using directive is unnecessary.
+dotnet_diagnostic.IDE0005.severity = warning
+# CA1032: Implement standard exception constructors
+dotnet_diagnostic.CA1032.severity = warning
+# CA1816: Call GC.SuppressFinalize correctly
+dotnet_diagnostic.CA1816.severity = error
+# CA1063: Implement IDisposable correctly
+dotnet_diagnostic.CA1063.severity = error
+# CA2007: Do not directly await a Task
+dotnet_diagnostic.CA2007.severity = none
+# CA1062: Validate arguments of public methods
+dotnet_diagnostic.CA1062.severity = none
+# CA1031: Do not catch general exception types
+dotnet_diagnostic.CA1031.severity = none
+# CA2000: Dispose objects before losing scope
+dotnet_diagnostic.CA2000.severity = error
+# CA1508: Avoid dead conditional code
+dotnet_diagnostic.CA1508.severity = none #TODO: Requires further analysis
+# CA1305: Specify IFormatProvider
+dotnet_diagnostic.CA1305.severity = none # #TODO: Requires further analysis
+# CA1801: Review unused parameters
+dotnet_diagnostic.CA1801.severity = none # #TODO: Requires further analysis
+# CA1822: Mark members as static
+dotnet_diagnostic.CA1822.severity = warning
+# CS1591: Missing XML comment
+dotnet_diagnostic.CS1591.severity = none
+# CS0108 X member hides inherited member Y. Use the new keyword if hiding was intended.
+dotnet_diagnostic.CS0108.severity = error
+# SA1400 Element X should declare an access modifier
+dotnet_diagnostic.SA1400.severity = error
+# SA1314 Type parameter names should begin with T
+dotnet_diagnostic.SA1314.severity = error
+
+# SonarQube
+# S4487 Unread "private" fields should be removed.
+dotnet_diagnostic.S4487.severity = warning
+# S927 Parameter names should match base declaration and other partial definitions
+dotnet_diagnostic.S927.severity = error
+# S1006 Remove the default parameter value to match the signature of overridden method
+dotnet_diagnostic.S1006.severity = error
+# S3928 The parameter name '{param}' is not declared in the argument list
+dotnet_diagnostic.S3928.severity = error
+# S1481 Remove the unused local variable
+dotnet_diagnostic.S1481.severity = warning
+# S1116 Remove empty statement
+dotnet_diagnostic.S1116.severity = warning
+# S1118 Utility classes should not have public constructors
+dotnet_diagnostic.S1118.severity = error
+# S3925 "ISerializable" should be implemented correctly
+dotnet_diagnostic.S3925.severity = error
+# S4457 Parameter validation in "async"/"await" methods should be wrapped
+dotnet_diagnostic.S4457.severity = error
+# S3442 "abstract" classes should not have "public" constructors
+dotnet_diagnostic.S3442.severity = error
+# S125 Remove commented out code
+dotnet_diagnostic.S125.severity = warning
+# S4136 Method overloads should be adjacent
+dotnet_diagnostic.S4136.severity = none
+# S1128 Remove this unnecessary 'using'
+dotnet_diagnostic.S1128.severity = warning
+# S3358 Extract this nested ternary operation into an independent statement.
+dotnet_diagnostic.S3358.severity = warning
+# SA1617 Void return value should not be documented
+dotnet_diagnostic.SA1617.severity = error
+# SA1122 Use string.Empty for empty strings
+dotnet_diagnostic.SA1122.severity = error
+
+# StyleCop
+# SA1600 Elements should be documented
+dotnet_diagnostic.SA1600.severity = none
+# SA1200 Using directive should appear within a namespace declaration
+dotnet_diagnostic.SA1200.severity = none
+# SA1633 The file header is missing or not located at the top of the file.
+dotnet_diagnostic.SA1633.severity = none
+# SA1642 Constructor summary documentation should begin with standard text
+dotnet_diagnostic.SA1642.severity = none
+# SA1614 Element parameter documentation should have text
+dotnet_diagnostic.SA1614.severity = none
+# SA1101 Prefix local calls with this
+dotnet_diagnostic.SA1101.severity = none
+# SA1616 Element return value documentation should have text
+dotnet_diagnostic.SA1616.severity = none
+# SA1623 The property's documentation summary text should begin with: 'Gets or sets'
+dotnet_diagnostic.SA1623.severity = none
+# SA1309 Field should not begin with an underscore
+dotnet_diagnostic.SA1309.severity = none
+# SA1513 Closing brace should be followed by an empty line
+dotnet_diagnostic.SA1505.severity = none
+# SA1505 Opening brace should be followed by an empty line
+dotnet_diagnostic.SA1513.severity = none
+# SA1124 Do not use regions
+dotnet_diagnostic.SA1124.severity = none
+# SA1139 Use literal suffix notation
+dotnet_diagnostic.SA1139.severity = none
+# SA1501 Statement should not be on a single line.
+dotnet_diagnostic.SA1501.severity = none
+# SA1000 The keyword 'new' should be followed by a space
+dotnet_diagnostic.SA1000.severity = none
+
+# TODO: Fix these
+# SA1622 Generic type parameter documentation should have text. // TODO: This should get added
+dotnet_diagnostic.SA1622.severity = none
+# SA1618 The documentation for type parameter 'T' is missing. // TODO: This should be an error
+dotnet_diagnostic.SA1618.severity = none
+# SA1402 File may only contain a single type. // TODO: This should be enabled
+dotnet_diagnostic.SA1402.severity = none
+# SA1649 File name should match first type name. // TODO: This should be an error || LTSA needs to be skiped
+dotnet_diagnostic.SA1649.severity = none
+# CA2254 The logging message template should not vary between calls. // TODO: requires code changes
+dotnet_diagnostic.CA2254.severity = none
+# SA1129 Do not use default value type constructor.
+dotnet_diagnostic.SA1129.severity = none
+# S1135 Complete the task associated to this 'TODO' comment
+dotnet_diagnostic.S1135.severity = none
+##
+
+# SA1611: The documentation for parameter X is missing. // Not necessary unless it proves value
+dotnet_diagnostic.SA1611.severity = none
+# SA1615 Element return value should be documented // Not necessary unless it proves value
+dotnet_diagnostic.SA1615.severity = none
+# SA1201 A constructor should not follow a property
+dotnet_diagnostic.SA1201.severity = none
+# SA1602 Enumeration items should be documented. // Not necessary unless it proves value
+dotnet_diagnostic.SA1602.severity = none
+
+# SA0001 XML comment analysis is disabled due to project configuration
+dotnet_diagnostic.SA0001.severity = none
+
+# --- Set to error before running formatter ---
+# dotnet format --severity error --exclude entities/ef/** --exclude entities/PimsBaseContext.cs
+# SA1515 Single-line comment should be preceded by blank line.
+dotnet_diagnostic.SA1515.severity = warning
+# SA1516 Elements should be separated by blank line
+dotnet_diagnostic.SA1516.severity = warning
+# SA1208 Using directive for 'X' should appear before directive 'Y'
+dotnet_diagnostic.SA1208.severity = warning
+# SA1507 Code should not contain multiple blank lines in a row
+dotnet_diagnostic.SA1507.severity = warning
+# SA1629 Documentation text should end with a period
+dotnet_diagnostic.SA1629.severity = warning
+# SA1121 Use built-in type alias
+dotnet_diagnostic.SA1121.severity = warning
+# SA1413 Use trailing comma in multi-line initializers
+dotnet_diagnostic.SA1413.severity = warning
+# SA1128 Put constructor initializers on their own line
+dotnet_diagnostic.SA1128.severity = warning
+# SA1514 Element documentation header should be preceded by blank line
+dotnet_diagnostic.SA1514.severity = warning
+# SA1502 Element should not be on a single line
+dotnet_diagnostic.SA1502.severity = warning
+# S1128 Remove this unnecessary 'using'
+dotnet_diagnostic.S1128.severity = warning
+# SA1517 Code should not contain blank lines at start of file
+dotnet_diagnostic.SA1517.severity = warning
+# S1128 Remove this unnecessary 'using'
+dotnet_diagnostic.S1128.severity = warning
+# SA1508 A closing brace should not be preceded by a blank line.
+dotnet_diagnostic.SA1508.severity = warning
+# SA1009 Closing parenthesis should be followed by a space
+dotnet_diagnostic.SA1009.severity = warning
+# SA1210 Using directives should be ordered alphabetically by the namespaces
+dotnet_diagnostic.SA1210.severity = warning
+# SA1610 Property documentation should have value text
+dotnet_diagnostic.SA1610.severity = warning
+# CS8019: Using directive is unnecessary.
+dotnet_diagnostic.CS8019.severity = warning
+# SA1005 Single line comment should begin with a space.
+dotnet_diagnostic.SA1005.severity = warning
+# SA1411 Attribute constructor should not use unnecessary parenthesis
+dotnet_diagnostic.SA1411.severity = warning
+# SA1519 Braces should not be omitted from multi-line child statement
+dotnet_diagnostic.SA1519.severity = warning
+# SA1202 'public' members should come before 'private' members
+dotnet_diagnostic.SA1202.severity = warning
+# SA1204 Static members should appear before non-static members.
+dotnet_diagnostic.SA1204.severity = warning
+# SA1127 Generic type constraints should be on their own line
+dotnet_diagnostic.SA1127.severity = warning
+# SA1520 Use braces consistently
+dotnet_diagnostic.SA1520.severity = warning
+# SA1028 Code should not contain trailing whitespace.
+dotnet_diagnostic.SA1028.severity = warning
+# SA1512 Single-line comments should not be followed by blank line.
+dotnet_diagnostic.SA1512.severity = warning
+# SA1500 Braces for multi-line statements should not share line
+dotnet_diagnostic.SA1500.severity = warning
+# SA1119 Statement should not use unnecessary parenthesis.
+dotnet_diagnostic.SA1119.severity = warning
+# CA1847 Use 'string.Contains(char)' instead of 'string.Contains(string)' when searching for a single character
+dotnet_diagnostic.CA1847.severity = warning
+# SA1107 Code should not contain multiple statements on one line
+dotnet_diagnostic.SA1107.severity = warning
+# IDE0005 Using directive is unnecessary.
+dotnet_diagnostic.IDE0005.severity = warning
+# SA1518 Code should not contain blank lines at the end of the file
+dotnet_diagnostic.SA1518.severity = warning
+# SA1510 'else' statement should not be preceded by a blank line.
+dotnet_diagnostic.SA1510.severity = warning
+# SA1116 The parameters should begin on the line after the declaration.
+dotnet_diagnostic.SA1116.severity = warning
+# SA1137 Elements should have the same indentation
+dotnet_diagnostic.SA1137.severity = warning
+# SA1117 The parameters should all be placed on the same line or each parameter should be placed on its own line.
+dotnet_diagnostic.SA1117.severity = warning
+# SA1115 The parameter should begin on the line after the previous parameter.
+dotnet_diagnostic.SA1115.severity = warning
+# SA1612 The parameter 'id' does not exist.
+dotnet_diagnostic.SA1612.severity = warning
+# SA1025 Code should not contain multiple whitespace characters in a row.
+dotnet_diagnostic.SA1025.severity = warning
+# IDE0052 Private member X can be removed as the value assigned to it is never read.
+dotnet_diagnostic.IDE0052.severity = warning
+# SA1214 Readonly fields should appear before non-readonly fields.
+dotnet_diagnostic.SA1214.severity = warning
+# SA1300 Element X should begin with an uppercase letter
+dotnet_diagnostic.SA1300.severity = warning
+
+# Entity Framework files
+[**{entities/ef/*,PIMSContext,PimsBaseContext}.cs]
+# CS8019: Using directive is unnecessary.
+dotnet_diagnostic.CS8019.severity = none
+# SA1502 Element should not be on a single line
+dotnet_diagnostic.SA1502.severity = none
+# SA1516 Elements should be separated by blank line
+dotnet_diagnostic.SA1516.severity = none
+# SonarQube
+# S1128 Remove this unnecessary 'using'
+dotnet_diagnostic.S1128.severity = none
+# S3251 Supply an implementation for this partial method.
+dotnet_diagnostic.S3251.severity = none
\ No newline at end of file
diff --git a/source/backend/scheduler/.gitignore b/source/backend/scheduler/.gitignore
new file mode 100644
index 0000000000..7f9566aaa2
--- /dev/null
+++ b/source/backend/scheduler/.gitignore
@@ -0,0 +1,55 @@
+# Environment variables
+.env
+connectionstrings.*json
+.vscode/settings.json
+
+*.swp
+*.*~
+project.lock.json
+.DS_Store
+*.pyc
+nupkg/
+
+# IDE - VSCode
+.vscode/*
+!.vscode/settings.json
+!.vscode/tasks.json
+!.vscode/launch.json
+!.vscode/extensions.json
+TestResults/
+
+# Rider
+.idea
+
+# User-specific files
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# Build results
+**/[Dd]ebug/
+**/[Dd]ebugPublic/
+**/[Rr]elease/
+**/[Rr]eleases/
+**/x64/
+**/x86/
+**/build/
+**/bld/
+**/[Bb]in/
+**/[Oo]bj/
+**/[Oo]ut/
+**/msbuild.log
+**/msbuild.err
+**/msbuild.wrn
+
+# Visual Studio 2015
+.vs/
+
+# SonarQube
+.sonarqube/
+
+# NET Core Healthchecks UI
+healthchecksdb
+healthchecksdb-shm
+healthchecksdb-wal
diff --git a/source/backend/scheduler/Configuration/PimsOptions.cs b/source/backend/scheduler/Configuration/PimsOptions.cs
new file mode 100644
index 0000000000..eeedc9d212
--- /dev/null
+++ b/source/backend/scheduler/Configuration/PimsOptions.cs
@@ -0,0 +1,21 @@
+using System.ComponentModel.DataAnnotations;
+
+namespace Pims.Scheduler.Http.Configuration
+{
+ ///
+ /// PimsOptions class, provides a way to store connection information for the PIMS application.
+ ///
+ public class PimsOptions
+ {
+ #region Properties
+
+ ///
+ /// get/set - the internal Uri of the pims server.
+ ///
+ [Required(ErrorMessage = "Configuration 'Uri' is required.")]
+ public string Uri { get; set; }
+
+ public string Environment { get; set; }
+ #endregion
+ }
+}
diff --git a/source/backend/scheduler/Configuration/ProgramOptions.cs b/source/backend/scheduler/Configuration/ProgramOptions.cs
new file mode 100644
index 0000000000..ce2c0d06c0
--- /dev/null
+++ b/source/backend/scheduler/Configuration/ProgramOptions.cs
@@ -0,0 +1,51 @@
+using System.Globalization;
+using System.Linq;
+using CommandLine;
+using Pims.Core.Extensions;
+
+namespace Pims.Api.Configuration
+{
+ ///
+ /// ProgramOptions class, provides a way to parse command line arguments for the PIMS API program.
+ ///
+ public class ProgramOptions
+ {
+ #region Properties
+
+ ///
+ /// get/set - ASP NET Core environment.
+ ///
+ [Option('e', "environment", Required = false, HelpText = "ASPNETCORE_ENVIRONMENT")]
+ public string Environment { get; set; }
+
+ ///
+ /// get/set - ASP NET Core URLs.
+ ///
+ [Option('u', "urls", Required = false, HelpText = "ASPNETCORE_URLS")]
+ public string Urls { get; set; }
+
+ ///
+ /// get/set - ASP NET Core HTTPS port.
+ ///
+ [Option('p', "port", Required = false, HelpText = "ASPNETCORE_HTTPS_PORT")]
+ public int? HttpsPort { get; set; }
+ #endregion
+
+ #region Methods
+
+ ///
+ /// Return an array of arguments of valid option values.
+ ///
+ ///
+ public string[] ToArgs()
+ {
+ return new[]
+ {
+ this.Urls,
+ this.Environment,
+ this.HttpsPort?.ToString(CultureInfo.InvariantCulture),
+ }.NotNull().ToArray();
+ }
+ #endregion
+ }
+}
diff --git a/source/backend/scheduler/Controllers/PimsSchedulerController.cs b/source/backend/scheduler/Controllers/PimsSchedulerController.cs
new file mode 100644
index 0000000000..8074e9a6d5
--- /dev/null
+++ b/source/backend/scheduler/Controllers/PimsSchedulerController.cs
@@ -0,0 +1,64 @@
+using System;
+using System.Threading.Tasks;
+using Hangfire;
+using Microsoft.AspNetCore.Authorization;
+using Microsoft.AspNetCore.Mvc;
+using Microsoft.Extensions.Logging;
+using Pims.Core.Api.Policies;
+using Pims.Core.Extensions;
+using Pims.Core.Security;
+using Pims.Scheduler.Services;
+
+namespace Pims.Scheduler.Controllers
+{
+ ///
+ /// DocumentSchedulerController class, allows a caller to create a job to process pending PIMS documents.
+ ///
+ [Authorize]
+ [ApiController]
+ [Route("v{version:apiVersion}/documents")]
+ [Route("/documents")]
+ public class DocumentSchedulerController : ControllerBase
+ {
+ #region Variables
+
+ private readonly ILogger _logger;
+ private readonly IDocumentQueueService _documentQueueService;
+ private readonly IBackgroundJobClient _backgroundJobClient;
+ #endregion
+
+ #region Constructors
+
+ ///
+ /// Creates a new instance of a PimsGeoserverController class.
+ ///
+ public DocumentSchedulerController(ILogger logger, IDocumentQueueService documentQueueService, IBackgroundJobClient backgroundJobClient)
+ {
+ _logger = logger;
+ _documentQueueService = documentQueueService;
+ _backgroundJobClient = backgroundJobClient;
+ }
+ #endregion
+
+ #region Endpoints
+
+ [Route("queued")]
+ [HasPermission(Permissions.SystemAdmin)]
+ public Task ProcessQueuedDocuments()
+ {
+ _logger.LogInformation(
+ "Request received by Controller: {Controller}, Action: {ControllerAction}, User: {User}, DateTime: {DateTime}",
+ nameof(DocumentSchedulerController),
+ nameof(ProcessQueuedDocuments),
+ User.GetUsername(),
+ DateTime.Now);
+
+ // TODO: this is a placeholder only
+ _backgroundJobClient.Enqueue(() => _documentQueueService.UploadQueuedDocuments());
+
+ return Task.CompletedTask;
+ }
+
+ #endregion
+ }
+}
diff --git a/source/backend/scheduler/Directory.Build.props b/source/backend/scheduler/Directory.Build.props
new file mode 100644
index 0000000000..d9a8909ec7
--- /dev/null
+++ b/source/backend/scheduler/Directory.Build.props
@@ -0,0 +1,40 @@
+
+
+ net8.0
+ 11.0
+ Minimum
+ 4
+ true
+ true
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers;
+
+
+
+
+
+
diff --git a/source/backend/scheduler/Pims.Scheduler.csproj b/source/backend/scheduler/Pims.Scheduler.csproj
new file mode 100644
index 0000000000..e4f4993886
--- /dev/null
+++ b/source/backend/scheduler/Pims.Scheduler.csproj
@@ -0,0 +1,63 @@
+
+
+
+ true
+ 16BC0468-78F6-4C91-87DA-7403C919E646
+ net8.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/source/backend/scheduler/Policies/HangfireDashboardAuthorizationFilter.cs b/source/backend/scheduler/Policies/HangfireDashboardAuthorizationFilter.cs
new file mode 100644
index 0000000000..2117b5f9e0
--- /dev/null
+++ b/source/backend/scheduler/Policies/HangfireDashboardAuthorizationFilter.cs
@@ -0,0 +1,40 @@
+using System.Linq;
+using Hangfire.Dashboard;
+using Microsoft.Extensions.Options;
+using Pims.Core.Extensions;
+using Pims.Core.Security;
+using Pims.Keycloak.Configuration;
+
+namespace Pims.Scheduler.Policies
+{
+ public class HangfireDashboardAuthorizationFilter : IDashboardAuthorizationFilter
+ {
+ #region Variables
+ private readonly Permissions[] _permissions;
+ private readonly IOptionsMonitor _keycloakOptions;
+ #endregion
+
+ #region Constructors
+
+ ///
+ /// Creates a new instance of a PermissionFilter class, initializes it with the specified permission.
+ /// This will ensure the user has the specified permission.
+ ///
+ ///
+ ///
+ public HangfireDashboardAuthorizationFilter(IOptionsMonitor options, Permissions permission)
+ {
+ _permissions = new[] { permission };
+ _keycloakOptions = options;
+ }
+ #endregion
+
+ public bool Authorize(DashboardContext context)
+ {
+ var httpContext = context.GetHttpContext();
+ var hasRole = httpContext.User.HasPermission(_permissions.ToArray());
+ var isServiceAccount = httpContext.User.IsServiceAccount(_keycloakOptions);
+ return !hasRole && !isServiceAccount;
+ }
+ }
+}
diff --git a/source/backend/scheduler/Program.cs b/source/backend/scheduler/Program.cs
new file mode 100644
index 0000000000..485337ce60
--- /dev/null
+++ b/source/backend/scheduler/Program.cs
@@ -0,0 +1,84 @@
+using System;
+using System.Diagnostics.CodeAnalysis;
+using CommandLine;
+using CommandLine.Text;
+using Microsoft.AspNetCore.Hosting;
+using Microsoft.Extensions.Configuration;
+using Microsoft.Extensions.Hosting;
+using Pims.Api.Configuration;
+using Serilog;
+
+namespace Pims.Scheduler
+{
+ ///
+ /// Program class, provides the main program starting point for the scheduling application.
+ ///
+ [ExcludeFromCodeCoverage]
+ public static class Program
+ {
+ ///
+ /// The primary entry point for the application.
+ ///
+ ///
+ public static void Main(string[] args)
+ {
+ var results = Parser.Default.ParseArguments(args);
+
+ results.WithParsed((options) =>
+ {
+ var builder = CreateWebHostBuilder(options);
+ builder.Build().Run();
+ })
+ .WithNotParsed((errors) =>
+ {
+ var helpText = HelpText.AutoBuild(
+ results,
+ h => { return HelpText.DefaultParsingErrorsHandler(results, h); },
+ e => e);
+ Console.WriteLine(helpText);
+ });
+ }
+
+ public static IHostBuilder CreateHostBuilder(string[] args)
+ {
+ DotNetEnv.Env.Load();
+ return Host.CreateDefaultBuilder(args).ConfigureAppConfiguration((hostingContext, config) =>
+ {
+ config.AddJsonFile("appsettings.json", optional: false, reloadOnChange: true);
+ config.AddEnvironmentVariables();
+ config.AddCommandLine(args);
+ });
+ }
+
+ ///
+ /// Create a default configuration and setup for a web application.
+ ///
+ ///
+ ///
+ private static IHostBuilder CreateWebHostBuilder(ProgramOptions options)
+ {
+ var args = options.ToArgs();
+ DotNetEnv.Env.Load();
+ var env = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
+ var config = new ConfigurationBuilder()
+ .AddEnvironmentVariables()
+ .AddCommandLine(args)
+ .Build();
+
+ return Host.CreateDefaultBuilder(args).ConfigureWebHostDefaults(webHostBuilder =>
+ webHostBuilder.ConfigureAppConfiguration((hostingContext, config) =>
+ {
+ config.AddJsonFile("appsettings.json", optional: false, reloadOnChange: true);
+ config.AddJsonFile($"appsettings.{env}.json", optional: true, reloadOnChange: true);
+ config.AddEnvironmentVariables();
+ config.AddCommandLine(args);
+ })
+ .UseUrls(config.GetValue("ASPNETCORE_URLS"))
+ .UseStartup()
+ .UseKestrel(options =>
+ {
+ options.Limits.MaxRequestBodySize = 524288000; // 500MB
+ })).UseSerilog();
+ }
+ }
+}
diff --git a/source/backend/scheduler/Properties/launchSettings.json b/source/backend/scheduler/Properties/launchSettings.json
new file mode 100644
index 0000000000..129dea4ff0
--- /dev/null
+++ b/source/backend/scheduler/Properties/launchSettings.json
@@ -0,0 +1,27 @@
+{
+ "iisSettings": {
+ "windowsAuthentication": false,
+ "anonymousAuthentication": true,
+ "iisExpress": {
+ "applicationUrl": "http://localhost:8057;https://localhost:8058",
+ "sslPort": 44342
+ }
+ },
+ "profiles": {
+ "IIS Express": {
+ "commandName": "IISExpress",
+ "launchBrowser": false,
+ "environmentVariables": {
+ "ASPNETCORE_ENVIRONMENT": "Local"
+ }
+ },
+ "Pims.Scheduler": {
+ "commandName": "Project",
+ "launchBrowser": false,
+ "environmentVariables": {
+ "ASPNETCORE_ENVIRONMENT": "Local",
+ "URLS": "http://localhost:8057;https://localhost:8058"
+ }
+ }
+ }
+}
diff --git a/source/backend/scheduler/README.md b/source/backend/scheduler/README.md
new file mode 100644
index 0000000000..94532a28c9
--- /dev/null
+++ b/source/backend/scheduler/README.md
@@ -0,0 +1,33 @@
+# PIMS scheduler
+
+The PROXY scheduler provides a keycloak-authenticated hangfire instance.
+
+To run the API locally you will need to create the appropriate environment variable `.env` files. You can do this through using the prebuilt scripts [here](../../scripts/README.md).
+
+## Hangfire
+
+1. All serilog logging is piped into hangfire and visible in the job dashboard.
+2. All recurring hangfire jobs must be registered in code (currently within Startup.cs). They can then be controlled in the appsettings files or by env.
+3. Hangfire is dependent on redis to store job state. This is handled by a separate container with storage in local docker and OS.
+4. See best practices for hangfire jobs, but all jobs should be written such that they can continue at any point if the are interupted.
+
+## API Environment Variables
+
+The current environment is initialized through the environment variable `ASPNETCORE_ENVIRONMENT`.
+
+When running the solution it applies the configuration setting in the following order;
+
+> NOTE: When the environment is Development it will look for your _User Secrets_ file.
+
+1. appsettings.json
+2. appsettings.`[environment]`.json
+3. User Secrets `(if environment=Development)`
+4. Environment Variables
+
+To run the solution with docker-compose create a `.env` file within the `/api` directory and populate with the following;
+
+```conf
+ASPNETCORE_ENVIRONMENT=Development
+ASPNETCORE_URLS=http://*:8080
+ASPNETCORE_FORWARDEDHEADERS_ENABLED=true
+```
diff --git a/source/backend/scheduler/Repositories/Interfaces/IPimsDocumentRepository.cs b/source/backend/scheduler/Repositories/Interfaces/IPimsDocumentRepository.cs
new file mode 100644
index 0000000000..7505e3576d
--- /dev/null
+++ b/source/backend/scheduler/Repositories/Interfaces/IPimsDocumentRepository.cs
@@ -0,0 +1,16 @@
+using System.Collections.Generic;
+using System.Threading.Tasks;
+using Pims.Api.Models.Concepts.Document;
+using Pims.Api.Models.Requests.Http;
+using Pims.Dal.Entities.Models;
+
+namespace Pims.Scheduler.Repositories.Pims
+{
+ ///
+ /// IPimsDocumentQueueRepository interface, defines the functionality for a pims repository.
+ ///
+ public interface IPimsDocumentQueueRepository
+ {
+ Task>> SearchQueuedDocumentsAsync(DocumentQueueFilter filter);
+ }
+}
diff --git a/source/backend/scheduler/Repositories/PimsBaseRepository.cs b/source/backend/scheduler/Repositories/PimsBaseRepository.cs
new file mode 100644
index 0000000000..6cdccd99cc
--- /dev/null
+++ b/source/backend/scheduler/Repositories/PimsBaseRepository.cs
@@ -0,0 +1,52 @@
+using System.Collections.Generic;
+using System.Net.Http;
+using System.Net.Http.Headers;
+using Microsoft.Extensions.Logging;
+using Pims.Core.Api.Repositories.Rest;
+
+namespace Pims.Scheduler.Repositories
+{
+ public abstract class PimsBaseRepository : BaseRestRepository
+ {
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Injected Logger Provider.
+ /// Injected Httpclient factory.
+ protected PimsBaseRepository(
+ ILogger logger,
+ IHttpClientFactory httpClientFactory)
+ : base(logger, httpClientFactory)
+ {
+ }
+
+ public override void AddAuthentication(HttpClient client, string authenticationToken = null)
+ {
+ if (!string.IsNullOrEmpty(authenticationToken))
+ {
+ client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", authenticationToken.Split(" ")[1]);
+ }
+ }
+
+ protected Dictionary GenerateQueryParams(string ordering = "", int? page = null, int? pageSize = null)
+ {
+ Dictionary queryParams = new();
+
+ if (!string.IsNullOrEmpty(ordering))
+ {
+ queryParams["ordering"] = ordering;
+ }
+ if (page.HasValue)
+ {
+ queryParams["page"] = page.ToString();
+ }
+ if (pageSize.HasValue)
+ {
+ queryParams["page_size"] = pageSize.ToString();
+ }
+
+ return queryParams;
+ }
+ }
+}
diff --git a/source/backend/scheduler/Repositories/PimsRepository.cs b/source/backend/scheduler/Repositories/PimsRepository.cs
new file mode 100644
index 0000000000..f6b2b061c6
--- /dev/null
+++ b/source/backend/scheduler/Repositories/PimsRepository.cs
@@ -0,0 +1,55 @@
+using System;
+using System.Collections.Generic;
+using System.Net.Http;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Options;
+using Pims.Api.Models.Concepts.Document;
+using Pims.Api.Models.Requests.Http;
+using Pims.Core.Http;
+using Pims.Dal.Entities.Models;
+using Pims.Scheduler.Http.Configuration;
+
+namespace Pims.Scheduler.Repositories.Pims
+{
+ ///
+ /// PimsDocumentQueueRepository provides document access from the PIMS document queue api.
+ ///
+ public class PimsDocumentQueueRepository : PimsBaseRepository, IPimsDocumentQueueRepository
+ {
+ private readonly IOpenIdConnectRequestClient _authRepository;
+ private readonly IOptionsMonitor _configuration;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Injected Logger Provider.
+ /// Injected Httpclient factory.
+ /// Injected repository that handles authentication.
+ /// The injected configuration provider.
+ public PimsDocumentQueueRepository(
+ ILogger logger,
+ IHttpClientFactory httpClientFactory,
+ IOpenIdConnectRequestClient authRepository,
+ IOptionsMonitor configuration)
+ : base(logger, httpClientFactory)
+ {
+ _authRepository = authRepository;
+ _configuration = configuration;
+ }
+
+ public async Task>> SearchQueuedDocumentsAsync(DocumentQueueFilter filter)
+ {
+ _logger.LogDebug("Getting filtered list of queued documents by {filter}", filter);
+
+ string authenticationToken = await _authRepository.RequestAccessToken();
+
+ Uri endpoint = new($"{_configuration.CurrentValue.Uri}/documents/queue/search");
+
+ var response = await GetAsync>(endpoint, authenticationToken);
+ _logger.LogDebug($"Retrieved list of queued documents based on {filter} ", filter);
+
+ return response;
+ }
+ }
+}
diff --git a/source/backend/scheduler/Scheduler.sln b/source/backend/scheduler/Scheduler.sln
new file mode 100644
index 0000000000..9175bf6124
--- /dev/null
+++ b/source/backend/scheduler/Scheduler.sln
@@ -0,0 +1,132 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 17
+VisualStudioVersion = 17.4.33110.190
+MinimumVisualStudioVersion = 15.0.26124.0
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Pims.Scheduler", "Pims.Scheduler.csproj", "{16BC0468-78F6-4C91-87DA-7403C919E646}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "root", "root", "{8D3E4CB2-41D6-4AA6-B9E4-CFCAB0E2F5BC}"
+ ProjectSection(SolutionItems) = preProject
+ .dockerignore = .dockerignore
+ .editorconfig = .editorconfig
+ .gitignore = .gitignore
+ Dockerfile = Dockerfile
+ Dockerfile.bak = Dockerfile.bak
+ entrypoint.sh = entrypoint.sh
+ README.md = README.md
+ EndProjectSection
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "libraries", "libraries", "{5237F8A4-67F5-4751-B8B2-B93A06791480}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{F256F2A5-0DBF-4137-A7D6-21F08111BD4A}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "unit", "unit", "{3D70B211-74A8-484C-9B86-B0A2835C71E7}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Pims.Core.Api", "..\core.api\Pims.Core.Api.csproj", "{89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Pims.Core", "..\core\Pims.Core.csproj", "{AC8F04FF-3164-41FB-9EDF-E468B8B77837}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Pims.Keycloak", "..\keycloak\Pims.Keycloak.csproj", "{970903E9-BC53-436F-BA77-C62349546425}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Pims.Api.Models", "..\apimodels\Pims.Api.Models.csproj", "{58C42283-68DA-477F-915D-C67597543546}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Pims.Dal.Entities", "..\entities\Pims.Dal.Entities.csproj", "{1C724CD5-CD24-46CD-835A-A83F673F97B5}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Debug|x64 = Debug|x64
+ Debug|x86 = Debug|x86
+ Release|Any CPU = Release|Any CPU
+ Release|x64 = Release|x64
+ Release|x86 = Release|x86
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Debug|x64.Build.0 = Debug|Any CPU
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Debug|x86.Build.0 = Debug|Any CPU
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Release|Any CPU.Build.0 = Release|Any CPU
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Release|x64.ActiveCfg = Release|Any CPU
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Release|x64.Build.0 = Release|Any CPU
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Release|x86.ActiveCfg = Release|Any CPU
+ {16BC0468-78F6-4C91-87DA-7403C919E646}.Release|x86.Build.0 = Release|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Debug|x64.Build.0 = Debug|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Debug|x86.Build.0 = Debug|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Release|Any CPU.Build.0 = Release|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Release|x64.ActiveCfg = Release|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Release|x64.Build.0 = Release|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Release|x86.ActiveCfg = Release|Any CPU
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8}.Release|x86.Build.0 = Release|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Debug|x64.Build.0 = Debug|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Debug|x86.Build.0 = Debug|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Release|Any CPU.Build.0 = Release|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Release|x64.ActiveCfg = Release|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Release|x64.Build.0 = Release|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Release|x86.ActiveCfg = Release|Any CPU
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837}.Release|x86.Build.0 = Release|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Debug|x64.Build.0 = Debug|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Debug|x86.Build.0 = Debug|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Release|Any CPU.Build.0 = Release|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Release|x64.ActiveCfg = Release|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Release|x64.Build.0 = Release|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Release|x86.ActiveCfg = Release|Any CPU
+ {970903E9-BC53-436F-BA77-C62349546425}.Release|x86.Build.0 = Release|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Debug|x64.Build.0 = Debug|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Debug|x86.Build.0 = Debug|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Release|Any CPU.Build.0 = Release|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Release|x64.ActiveCfg = Release|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Release|x64.Build.0 = Release|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Release|x86.ActiveCfg = Release|Any CPU
+ {58C42283-68DA-477F-915D-C67597543546}.Release|x86.Build.0 = Release|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Debug|x64.Build.0 = Debug|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Debug|x86.Build.0 = Debug|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Release|Any CPU.Build.0 = Release|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Release|x64.ActiveCfg = Release|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Release|x64.Build.0 = Release|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Release|x86.ActiveCfg = Release|Any CPU
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5}.Release|x86.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(NestedProjects) = preSolution
+ {3D70B211-74A8-484C-9B86-B0A2835C71E7} = {F256F2A5-0DBF-4137-A7D6-21F08111BD4A}
+ {89A99CC5-ADFB-4FC2-9136-7B0029EEA2D8} = {5237F8A4-67F5-4751-B8B2-B93A06791480}
+ {AC8F04FF-3164-41FB-9EDF-E468B8B77837} = {5237F8A4-67F5-4751-B8B2-B93A06791480}
+ {970903E9-BC53-436F-BA77-C62349546425} = {5237F8A4-67F5-4751-B8B2-B93A06791480}
+ {58C42283-68DA-477F-915D-C67597543546} = {5237F8A4-67F5-4751-B8B2-B93A06791480}
+ {1C724CD5-CD24-46CD-835A-A83F673F97B5} = {5237F8A4-67F5-4751-B8B2-B93A06791480}
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {3433C5DD-DC49-4A96-A1AE-90C1A1EBA87C}
+ EndGlobalSection
+EndGlobal
diff --git a/source/backend/scheduler/Scheduler/IJobRescheduler.cs b/source/backend/scheduler/Scheduler/IJobRescheduler.cs
new file mode 100644
index 0000000000..f1acaa8866
--- /dev/null
+++ b/source/backend/scheduler/Scheduler/IJobRescheduler.cs
@@ -0,0 +1,11 @@
+namespace Pims.Scheduler.Rescheduler
+{
+ public interface IJobRescheduler
+ {
+ ///
+ /// Load from configuration and override the schdule set via code.
+ /// If configuration is null, it will get from the singleton instance of JobConfiguration
+ ///
+ void LoadSchedules(JobScheduleOptions options);
+ }
+}
diff --git a/source/backend/scheduler/Scheduler/JobRescheduler.cs b/source/backend/scheduler/Scheduler/JobRescheduler.cs
new file mode 100644
index 0000000000..791942711c
--- /dev/null
+++ b/source/backend/scheduler/Scheduler/JobRescheduler.cs
@@ -0,0 +1,59 @@
+using System;
+using System.Linq;
+using CsvHelper.Configuration;
+using Hangfire;
+using Hangfire.Storage;
+
+namespace Pims.Scheduler.Rescheduler
+{
+ public class JobRescheduler : IJobRescheduler
+ {
+ private readonly JobStorage _jobStorage;
+ private readonly IRecurringJobManager _recurringJobManager;
+
+ public JobRescheduler(JobStorage jobStorage, IRecurringJobManager recurringJobManager)
+ {
+ _jobStorage = jobStorage;
+ _recurringJobManager = recurringJobManager;
+ }
+
+ public void LoadSchedules(JobScheduleOptions options)
+ {
+ var storageConnection = _jobStorage.GetConnection();
+
+ foreach (var scheduling in options.Schedules)
+ {
+ var recurringJob = storageConnection.GetRecurringJobs().FirstOrDefault(j => j.Id == scheduling.JobId);
+ if (recurringJob == null)
+ {
+ continue;
+ }
+
+ if (!scheduling.IsEnabled)
+ {
+ _recurringJobManager.RemoveIfExists(scheduling.JobId);
+ continue;
+ }
+
+ var timezoneId = scheduling.TimeZoneId ?? recurringJob.TimeZoneId ?? TimeZoneInfo.Local.Id;
+ var timezone = TimeZoneInfo.FindSystemTimeZoneById(timezoneId);
+ if (timezone == null)
+ {
+ throw new ConfigurationException($"Unable to find TimeZoneInfo : {timezoneId}");
+ }
+
+ var cron = scheduling.Cron;
+ if (cron == null)
+ {
+ throw new ConfigurationException($"Cron is required");
+ }
+
+ _recurringJobManager.AddOrUpdate(
+ recurringJob.Id,
+ recurringJob.Job,
+ scheduling.Cron,
+ new RecurringJobOptions() { TimeZone = timezone });
+ }
+ }
+ }
+}
diff --git a/source/backend/scheduler/Scheduler/JobScheduleOption.cs b/source/backend/scheduler/Scheduler/JobScheduleOption.cs
new file mode 100644
index 0000000000..07bb74c470
--- /dev/null
+++ b/source/backend/scheduler/Scheduler/JobScheduleOption.cs
@@ -0,0 +1,28 @@
+namespace Pims.Scheduler.Rescheduler
+{
+ ///
+ /// JobSchedule.
+ ///
+ public class JobScheduleOption
+ {
+ ///
+ /// Recurring JobId.
+ ///
+ public string JobId { get; set; } = default!;
+
+ ///
+ /// Cron Expression.
+ ///
+ public string Cron { get; set; } = default!;
+
+ ///
+ /// If enabled == false, the job will be removed.
+ ///
+ public bool IsEnabled { get; set; } = true;
+
+ ///
+ /// TimezoneId.
+ ///
+ public string TimeZoneId { get; set; }
+ }
+}
diff --git a/source/backend/scheduler/Scheduler/JobScheduleOptions.cs b/source/backend/scheduler/Scheduler/JobScheduleOptions.cs
new file mode 100644
index 0000000000..726e40462d
--- /dev/null
+++ b/source/backend/scheduler/Scheduler/JobScheduleOptions.cs
@@ -0,0 +1,12 @@
+using System.Collections.Generic;
+
+namespace Pims.Scheduler.Rescheduler
+{
+ public class JobScheduleOptions
+ {
+ ///
+ /// Schedules. This will override the runtime registration defined in Jobs.Registry module.
+ ///
+ public List Schedules { get; set; } = new List();
+ }
+}
diff --git a/source/backend/scheduler/Services/DocumentQueueService.cs b/source/backend/scheduler/Services/DocumentQueueService.cs
new file mode 100644
index 0000000000..6e2c2a01c3
--- /dev/null
+++ b/source/backend/scheduler/Services/DocumentQueueService.cs
@@ -0,0 +1,29 @@
+using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
+using Pims.Api.Models.CodeTypes;
+using Pims.Core.Api.Services;
+using Pims.Scheduler.Repositories.Pims;
+
+namespace Pims.Scheduler.Services
+{
+ public class DocumentQueueService : BaseService, IDocumentQueueService
+ {
+ private readonly ILogger _logger;
+ private readonly IPimsDocumentQueueRepository _pimsDocumentQueueRepository;
+
+ public DocumentQueueService(
+ ILogger logger,
+ IPimsDocumentQueueRepository pimsDocumentQueueRepository)
+ : base(null, logger)
+ {
+ _logger = logger;
+ _pimsDocumentQueueRepository = pimsDocumentQueueRepository;
+ }
+
+ public async Task UploadQueuedDocuments()
+ {
+ var queuedDocuments = await _pimsDocumentQueueRepository.SearchQueuedDocumentsAsync(new Dal.Entities.Models.DocumentQueueFilter() { Quantity = 50, DocumentQueueStatusTypeCode = DocumentQueueStatusTypes.PENDING.ToString() });
+ _logger.LogInformation("retrieved {queuedDocuments} documents", queuedDocuments?.Payload?.Count);
+ }
+ }
+}
diff --git a/source/backend/scheduler/Services/Interfaces/IDocumentQueueService.cs b/source/backend/scheduler/Services/Interfaces/IDocumentQueueService.cs
new file mode 100644
index 0000000000..8820b3913b
--- /dev/null
+++ b/source/backend/scheduler/Services/Interfaces/IDocumentQueueService.cs
@@ -0,0 +1,9 @@
+using System.Threading.Tasks;
+
+namespace Pims.Scheduler.Services
+{
+ public interface IDocumentQueueService
+ {
+ public Task UploadQueuedDocuments();
+ }
+}
diff --git a/source/backend/scheduler/Startup.cs b/source/backend/scheduler/Startup.cs
new file mode 100644
index 0000000000..86b192e7c7
--- /dev/null
+++ b/source/backend/scheduler/Startup.cs
@@ -0,0 +1,350 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.IdentityModel.Tokens.Jwt;
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using System.Security.Claims;
+using System.Text;
+using System.Text.Json;
+using System.Text.Json.Serialization;
+using System.Threading.Tasks;
+using Hangfire;
+using Hangfire.Console;
+using Hangfire.Console.Extensions;
+using Hangfire.Redis.StackExchange;
+using HealthChecks.UI.Client;
+using Microsoft.AspNetCore.Authentication.JwtBearer;
+using Microsoft.AspNetCore.Builder;
+using Microsoft.AspNetCore.Diagnostics.HealthChecks;
+using Microsoft.AspNetCore.Hosting;
+using Microsoft.AspNetCore.Http;
+using Microsoft.AspNetCore.HttpOverrides;
+using Microsoft.AspNetCore.Mvc.Versioning;
+using Microsoft.Extensions.Configuration;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Hosting;
+using Microsoft.Extensions.Options;
+using Microsoft.IdentityModel.Tokens;
+using Microsoft.OpenApi.Models;
+using Pims.Core.Api.Exceptions;
+using Pims.Core.Api.Handlers;
+using Pims.Core.Api.Helpers;
+using Pims.Core.Api.Middleware;
+using Pims.Core.Converters;
+using Pims.Core.Http;
+using Pims.Core.Json;
+using Pims.Core.Security;
+using Pims.Keycloak.Configuration;
+using Pims.Scheduler.Http.Configuration;
+using Pims.Scheduler.Policies;
+using Pims.Scheduler.Repositories.Pims;
+using Pims.Scheduler.Rescheduler;
+using Pims.Scheduler.Services;
+using Prometheus;
+using StackExchange.Redis;
+
+namespace Pims.Scheduler
+{
+ ///
+ /// Startup class, provides a way to startup the .netcore RESTful API and configure it.
+ ///
+ [ExcludeFromCodeCoverage]
+ public class Startup
+ {
+ #region Properties
+
+ ///
+ /// get - The application configuration settings.
+ ///
+ public IConfiguration Configuration { get; }
+
+ ///
+ /// get/set - The environment settings for the application.
+ ///
+ public IWebHostEnvironment Environment { get; }
+ #endregion
+
+ #region Constructors
+
+ ///
+ /// Creates a new instances of a Startup class.
+ ///
+ ///
+ ///
+ public Startup(IConfiguration configuration, IWebHostEnvironment env)
+ {
+ this.Configuration = configuration;
+ this.Environment = env;
+ }
+ #endregion
+
+ #region Methods
+
+ ///
+ /// This method gets called by the runtime. Use this method to add services to the container.
+ ///
+ ///
+ public void ConfigureServices(IServiceCollection services)
+ {
+ services.AddScoped();
+ services.AddScoped();
+ services.AddSingleton();
+ services.AddScoped();
+ services.AddSingleton();
+
+ services.AddSerilogging(this.Configuration);
+ var jsonSerializerOptions = this.Configuration.GenerateJsonSerializerOptions();
+ services.Configure(options =>
+ {
+ options.ReferenceHandler = ReferenceHandler.IgnoreCycles;
+ options.DefaultIgnoreCondition = jsonSerializerOptions.DefaultIgnoreCondition;
+ options.PropertyNameCaseInsensitive = jsonSerializerOptions.PropertyNameCaseInsensitive;
+ options.PropertyNamingPolicy = jsonSerializerOptions.PropertyNamingPolicy;
+ options.WriteIndented = jsonSerializerOptions.WriteIndented;
+ options.Converters.Add(new JsonStringEnumMemberConverter());
+ options.Converters.Add(new Int32ToStringJsonConverter());
+ });
+ services.Configure(this.Configuration.GetSection("Keycloak"));
+ services.Configure(this.Configuration.GetSection("OpenIdConnect"));
+ services.Configure(this.Configuration.GetSection("Keycloak"));
+ services.Configure(this.Configuration.GetSection("Pims:Environment"));
+ services.AddOptions();
+ services.AddApiVersioning(options =>
+ {
+ options.ReportApiVersions = true;
+ options.AssumeDefaultVersionWhenUnspecified = true;
+ options.ApiVersionReader = new HeaderApiVersionReader("api-version");
+ });
+
+ services.AddControllers()
+ .AddJsonOptions(options =>
+ {
+ options.JsonSerializerOptions.ReferenceHandler = ReferenceHandler.IgnoreCycles;
+ options.JsonSerializerOptions.DefaultIgnoreCondition = jsonSerializerOptions.DefaultIgnoreCondition;
+ options.JsonSerializerOptions.PropertyNameCaseInsensitive = jsonSerializerOptions.PropertyNameCaseInsensitive;
+ options.JsonSerializerOptions.PropertyNamingPolicy = jsonSerializerOptions.PropertyNamingPolicy;
+ options.JsonSerializerOptions.WriteIndented = jsonSerializerOptions.WriteIndented;
+ options.JsonSerializerOptions.Converters.Add(new JsonStringEnumConverter());
+ options.JsonSerializerOptions.Converters.Add(new Int32ToStringJsonConverter());
+ options.JsonSerializerOptions.Converters.Add(new DateOnlyJsonConverter());
+ });
+
+ services.AddMvcCore()
+ .AddJsonOptions(options =>
+ {
+ options.JsonSerializerOptions.DefaultIgnoreCondition = jsonSerializerOptions.DefaultIgnoreCondition;
+ options.JsonSerializerOptions.PropertyNameCaseInsensitive = jsonSerializerOptions.PropertyNameCaseInsensitive;
+ options.JsonSerializerOptions.PropertyNamingPolicy = jsonSerializerOptions.PropertyNamingPolicy;
+ options.JsonSerializerOptions.WriteIndented = jsonSerializerOptions.WriteIndented;
+ options.JsonSerializerOptions.Converters.Add(new JsonStringEnumConverter());
+ options.JsonSerializerOptions.Converters.Add(new Int32ToStringJsonConverter());
+ options.JsonSerializerOptions.Converters.Add(new DateOnlyJsonConverter());
+ });
+
+ services.AddAuthentication(options =>
+ {
+ options.DefaultAuthenticateScheme = JwtBearerDefaults.AuthenticationScheme;
+ options.DefaultChallengeScheme = JwtBearerDefaults.AuthenticationScheme;
+ })
+ .AddJwtBearer(options =>
+ {
+ var key = Encoding.ASCII.GetBytes(Configuration["Keycloak:Secret"]);
+ options.RequireHttpsMetadata = false;
+ options.Authority = Configuration["OpenIdConnect:Authority"];
+ options.Audience = Configuration["Keycloak:Audience"];
+ options.SaveToken = true;
+ options.TokenValidationParameters = new Microsoft.IdentityModel.Tokens.TokenValidationParameters()
+ {
+ ValidateIssuerSigningKey = true,
+ ValidateIssuer = false,
+ ValidateAudience = false,
+ ValidAlgorithms = new List() { "RS256" },
+ };
+ if (key.Length > 0)
+ {
+ options.TokenValidationParameters.IssuerSigningKey = new SymmetricSecurityKey(key);
+ }
+
+ options.Events = new JwtBearerEvents()
+ {
+ OnTokenValidated = context =>
+ {
+ return Task.CompletedTask;
+ },
+ OnAuthenticationFailed = context =>
+ {
+ context.NoResult();
+ context.Response.StatusCode = StatusCodes.Status401Unauthorized;
+ throw new AuthenticationException("Failed to authenticate", context.Exception);
+ },
+ OnForbidden = context =>
+ {
+ return Task.CompletedTask;
+ },
+ };
+ });
+
+ services.AddHttpClient();
+ services.AddTransient();
+ services.AddHttpClient("Pims.Api.Logging").AddHttpMessageHandler();
+ services.AddHttpContextAccessor();
+
+ services.AddTransient(s => s.GetService()?.HttpContext?.User);
+ services.AddResponseCaching();
+ services.AddMemoryCache();
+
+ // Export metrics from all HTTP clients registered in services
+ services.UseHttpClientMetrics();
+
+ services.AddHealthChecks().ForwardToPrometheus();
+
+ services.Configure(Configuration.GetSection(nameof(OpenApiInfo)));
+ services.AddSwaggerGen(options =>
+ {
+ options.EnableAnnotations(false, true);
+ options.CustomSchemaIds(o => o.FullName);
+ options.OperationFilter();
+ options.DocumentFilter();
+ options.AddSecurityDefinition("Bearer", new OpenApiSecurityScheme
+ {
+ Name = "Authorization",
+ In = ParameterLocation.Header,
+ Description = "Please enter into field the word 'Bearer' following by space and JWT",
+ Type = SecuritySchemeType.ApiKey,
+ });
+ options.AddSecurityRequirement(new OpenApiSecurityRequirement()
+ {
+ {
+ new OpenApiSecurityScheme
+ {
+ Reference = new OpenApiReference
+ {
+ Type = ReferenceType.SecurityScheme,
+ Id = "Bearer",
+ },
+ Scheme = "oauth2",
+ Name = "Bearer",
+ In = ParameterLocation.Header,
+ },
+ new List()
+ },
+ });
+
+ var xmlFile = $"{Assembly.GetExecutingAssembly().GetName().Name}.xml";
+ var xmlPath = Path.Combine(AppContext.BaseDirectory, xmlFile);
+ options.IncludeXmlComments(xmlPath);
+ });
+
+ ConnectionMultiplexer redisConnection = ConnectionMultiplexer.Connect(Configuration.GetConnectionString("Redis"));
+ services.AddHangfire(options =>
+ {
+ options.UseSimpleAssemblyNameTypeSerializer().UseRecommendedSerializerSettings().UseRedisStorage(redisConnection).UseSerilogLogProvider().UseConsole();
+ });
+ services.AddHangfireServer();
+ services.AddHangfireConsoleExtensions();
+
+ services.Configure(options =>
+ {
+ options.ForwardedHeaders = ForwardedHeaders.All;
+ options.AllowedHosts = this.Configuration.GetValue("AllowedHosts")?.Split(';').ToList();
+ });
+ }
+
+ ///
+ /// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
+ ///
+ ///
+ ///
+ public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
+ {
+ app.UseMetricServer();
+ app.UseHttpMetrics();
+
+ if (!env.IsProduction())
+ {
+ app.UseDeveloperExceptionPage();
+ }
+
+ var baseUrl = this.Configuration.GetValue("BaseUrl");
+ app.UsePathBase(baseUrl);
+ app.UseForwardedHeaders();
+
+ app.UseMiddleware();
+ app.UseMiddleware();
+ app.UseMiddleware();
+
+ app.UseRouting();
+ app.UseCors();
+
+ // Exception handler middleware that changes HTTP response codes must be registered after UseHttpMetrics()
+ // in order to ensure that prometheus-net reports the correct HTTP response status code.
+ app.UseHttpMetrics();
+
+ // Set responses secure headers.
+ ConfigureSecureHeaders(app);
+
+ app.UseResponseCaching();
+
+ app.UseAuthentication();
+ app.UseAuthorization();
+
+ app.UseHangfireDashboard(options: new DashboardOptions
+ {
+ Authorization = new[] { new HangfireDashboardAuthorizationFilter(app.ApplicationServices.GetRequiredService>(), Permissions.SystemAdmin) },
+ });
+
+ var healthPort = this.Configuration.GetValue("HealthChecks:Port");
+ app.UseHealthChecks(this.Configuration.GetValue("HealthChecks:LivePath"), healthPort, new HealthCheckOptions
+ {
+ Predicate = r => r.Name.Contains("SqlServer"),
+ ResponseWriter = UIResponseWriter.WriteHealthCheckUIResponse,
+ });
+ app.UseHealthChecks(this.Configuration.GetValue("HealthChecks:ReadyPath"), healthPort, new HealthCheckOptions
+ {
+ Predicate = r => r.Tags.Contains("services") && !r.Tags.Contains("external"),
+ ResponseWriter = UIResponseWriter.WriteHealthCheckUIResponse,
+ });
+
+ app.UseEndpoints(config =>
+ {
+ config.MapControllers();
+ config.MapHangfireDashboard();
+
+ // Enable the /metrics page to export Prometheus metrics
+ config.MapMetrics();
+ });
+
+ ScheduleHangfireJobs(app.ApplicationServices);
+ }
+
+ private void ScheduleHangfireJobs(IServiceProvider services)
+ {
+ // provide default definition of all jobs.
+ RecurringJob.AddOrUpdate(nameof(DocumentQueueService.UploadQueuedDocuments), x => x.UploadQueuedDocuments(), Cron.Hourly);
+
+ // override scheduled jobs with configuration.
+ JobScheduleOptions jobOptions = this.Configuration.GetSection("JobOptions").Get();
+ services.GetService().LoadSchedules(jobOptions);
+ }
+
+ ///
+ /// Configures the app to to use content security policies.
+ ///
+ /// The application builder provider.
+ private static void ConfigureSecureHeaders(IApplicationBuilder app)
+ {
+ app.Use(
+ async (context, next) =>
+ {
+ context.Response.Headers.Add("Strict-Transport-Security", "max-age=86400; includeSubDomains");
+ context.Response.Headers.Add("X-Content-Type-Options", "nosniff");
+ context.Response.Headers.Add("X-XSS-Protection", "1");
+ context.Response.Headers.Add("X-Frame-Options", " DENY");
+ await next().ConfigureAwait(true);
+ });
+ }
+ #endregion
+ }
+}
diff --git a/source/backend/scheduler/appsettings.Development.json b/source/backend/scheduler/appsettings.Development.json
new file mode 100644
index 0000000000..7b96433f86
--- /dev/null
+++ b/source/backend/scheduler/appsettings.Development.json
@@ -0,0 +1,38 @@
+{
+ "Serilog": {
+ "MinimumLevel": {
+ "Default": "Debug",
+ "Override": {
+ "Microsoft": "Warning",
+ "System": "Warning",
+ "Microsoft.Hosting.Lifetime": "Information",
+ "System.Net.Http.HttpClient": "Error",
+ "HealthChecks.UI": "Error"
+ }
+ }
+ },
+ "Pims": {
+ "Environment": {
+ "Uri": "http://pims-api-dev:8080",
+ "Name": "Development"
+ },
+ "Notifications": {
+ "ThrowExceptions": true
+ }
+ },
+ "OpenIdConnect": {
+ "Authority": "https://dev.loginproxy.gov.bc.ca/auth/realms/standard"
+ },
+ "Keycloak": {
+ "Authority": "https://dev.loginproxy.gov.bc.ca/auth/realms/standard",
+ "Audience": "property-services-project-api-dev-only-4700",
+ "Client": "property-services-project-api-dev-only-4700",
+ "ServiceAccount": {
+ "Environment": "dev",
+ "Integration": "4699"
+ }
+ },
+ "ConnectionStrings": {
+ "Redis": "scheduler-redis:6379"
+ }
+}
diff --git a/source/backend/scheduler/appsettings.Docker.json b/source/backend/scheduler/appsettings.Docker.json
new file mode 100644
index 0000000000..b14ef17c6e
--- /dev/null
+++ b/source/backend/scheduler/appsettings.Docker.json
@@ -0,0 +1,51 @@
+{
+ "HealthChecks": {
+ "Port": "5004"
+ },
+ "Logging": {
+ "LogLevel": {
+ "Default": "Debug",
+ "Microsoft": "Warning",
+ "Microsoft.Hosting.Lifetime": "Information",
+ "System.Net.Http.HttpClient": "Error",
+ "HealthChecks.UI": "Error",
+ "Pims.Api.Handlers": "Trace"
+ }
+ },
+ "Serilog": {
+ "MinimumLevel": {
+ "Default": "Debug",
+ "Override": {
+ "Microsoft": "Warning",
+ "System": "Warning",
+ "Microsoft.Hosting.Lifetime": "Information",
+ "System.Net.Http.HttpClient": "Error",
+ "HealthChecks.UI": "Error"
+ }
+ }
+ },
+ "Pims": {
+ "Environment": {
+ "Uri": "http://pims-api:8080",
+ "Name": "Local"
+ },
+ "Notifications": {
+ "ThrowExceptions": true
+ }
+ },
+ "OpenIdConnect": {
+ "Authority": "https://dev.loginproxy.gov.bc.ca/auth/realms/standard"
+ },
+ "Keycloak": {
+ "Authority": "https://dev.loginproxy.gov.bc.ca/auth/realms/standard",
+ "Audience": "property-services-project-api-dev-only-4700",
+ "Client": "property-services-project-api-dev-only-4700",
+ "ServiceAccount": {
+ "Environment": "dev",
+ "Integration": "4699"
+ }
+ },
+ "ConnectionStrings": {
+ "Redis": "scheduler-redis:6379"
+ }
+}
diff --git a/source/backend/scheduler/appsettings.Local.json b/source/backend/scheduler/appsettings.Local.json
new file mode 100644
index 0000000000..eb5a828660
--- /dev/null
+++ b/source/backend/scheduler/appsettings.Local.json
@@ -0,0 +1,51 @@
+{
+ "HealthChecks": {
+ "Port": "5004"
+ },
+ "Logging": {
+ "LogLevel": {
+ "Default": "Debug",
+ "Microsoft": "Warning",
+ "Microsoft.Hosting.Lifetime": "Information",
+ "System.Net.Http.HttpClient": "Error",
+ "HealthChecks.UI": "Error",
+ "Pims.Api.Handlers": "Trace"
+ }
+ },
+ "Serilog": {
+ "MinimumLevel": {
+ "Default": "Debug",
+ "Override": {
+ "Microsoft": "Warning",
+ "System": "Warning",
+ "Microsoft.Hosting.Lifetime": "Information",
+ "System.Net.Http.HttpClient": "Error",
+ "HealthChecks.UI": "Error"
+ }
+ }
+ },
+ "Pims": {
+ "Environment": {
+ "Uri": "http://localhost:5000",
+ "Name": "Local"
+ },
+ "Notifications": {
+ "ThrowExceptions": true
+ }
+ },
+ "OpenIdConnect": {
+ "Authority": "https://dev.loginproxy.gov.bc.ca/auth/realms/standard"
+ },
+ "Keycloak": {
+ "Authority": "https://dev.loginproxy.gov.bc.ca/auth/realms/standard",
+ "Audience": "property-services-project-api-dev-only-4700",
+ "Client": "property-services-project-api-dev-only-4700",
+ "ServiceAccount": {
+ "Environment": "dev",
+ "Integration": "4699"
+ }
+ },
+ "ConnectionStrings": {
+ "Redis": "localhost:6379"
+ }
+}
diff --git a/source/backend/scheduler/appsettings.Test.json b/source/backend/scheduler/appsettings.Test.json
new file mode 100644
index 0000000000..736444393b
--- /dev/null
+++ b/source/backend/scheduler/appsettings.Test.json
@@ -0,0 +1,29 @@
+{
+ "Serilog": {
+ "MinimumLevel": {
+ "Default": "Information",
+ "Override": {
+ "Microsoft": "Warning",
+ "Microsoft.Hosting.Lifetime": "Information"
+ }
+ }
+ },
+ "OpenIdConnect": {
+ "Authority": "https://dev.loginproxy.gov.bc.ca/auth/realms/standard"
+ },
+ "Keycloak": {
+ "Authority": "https://dev.loginproxy.gov.bc.ca/auth/realms/standard",
+ "ServiceAccount": {
+ "Environment": "dev"
+ }
+ },
+ "Pims": {
+ "Environment": {
+ "Uri": "http://pims-api-test:8080",
+ "Name": "Testing"
+ }
+ },
+ "ConnectionStrings": {
+ "Redis": "scheduler-redis:6379"
+ }
+}
diff --git a/source/backend/scheduler/appsettings.Uat.json b/source/backend/scheduler/appsettings.Uat.json
new file mode 100644
index 0000000000..279ae8c1a7
--- /dev/null
+++ b/source/backend/scheduler/appsettings.Uat.json
@@ -0,0 +1,29 @@
+{
+ "Serilog": {
+ "MinimumLevel": {
+ "Default": "Information",
+ "Override": {
+ "Microsoft": "Warning",
+ "Microsoft.Hosting.Lifetime": "Information"
+ }
+ }
+ },
+ "OpenIdConnect": {
+ "Authority": "https://test.loginproxy.gov.bc.ca/auth/realms/standard"
+ },
+ "Keycloak": {
+ "Authority": "https://test.loginproxy.gov.bc.ca/auth/realms/standard",
+ "ServiceAccount": {
+ "Environment": "test"
+ }
+ },
+ "Pims": {
+ "Environment": {
+ "Uri": "http://pims-api-uat:8080",
+ "Name": "Testing"
+ }
+ },
+ "ConnectionStrings": {
+ "Redis": "scheduler-redis:6379"
+ }
+}
diff --git a/source/backend/scheduler/appsettings.json b/source/backend/scheduler/appsettings.json
new file mode 100644
index 0000000000..8bf86b8d30
--- /dev/null
+++ b/source/backend/scheduler/appsettings.json
@@ -0,0 +1,132 @@
+{
+ "BaseUrl": "/scheduler",
+ "HealthChecks": {
+ "Port": "8080",
+ "LivePath": "/health/live",
+ "ReadyPath": "/health/ready",
+ "ApiMetrics": {
+ "Enabled": true,
+ "Period": 1
+ }
+ },
+ "Swagger": {
+ "RoutePrefix": "scheduler-docs",
+ "RouteTemplate": "/scheduler-docs/swagger/{documentname}/swagger.json",
+ "EndpointPath": "/scheduler-docs/swagger/{0}/swagger.json"
+ },
+ "https_port": 443,
+ "Serilog": {
+ "MinimumLevel": {
+ "Default": "Information",
+ "Override": {
+ "Microsoft": "Warning",
+ "Microsoft.Hosting.Lifetime": "Information",
+ "System.Net.Http.HttpClient": "Error",
+ "HealthChecks.UI": "Error"
+ }
+ },
+ "WriteTo": [
+ {
+ "Name": "Hangfire",
+ "Args": {
+ "restrictedToMinimumLevel": "Debug"
+ }
+ }
+ ],
+ "Enrich": [
+ "FromLogContext",
+ "WithMachineName",
+ "WithThreadId",
+ "WithHangfireContext"
+ ]
+ },
+ "Hangfire": {
+ "Dashboard": {
+ "AppPath": "/",
+ "StatsPollingInterval": 2000
+ },
+ "Server": {
+ "HeartbeatInterval": "00:00:30",
+ "Queues": [ "default" ],
+ "SchedulePollingInterval": "00:00:15",
+ "ServerCheckInterval": "00:05:00",
+ "ServerName": null,
+ "ServerTimeout": "00:05:00",
+ "ShutdownTimeout": "00:00:15",
+ "WorkerCount": 20
+ }
+ },
+ "JobOptions": {
+ "Schedules": [
+ {
+ "JobId": "UploadQueuedDocuments",
+ "IsEnabled": true,
+ "Cron": "*/5 * * * *"
+ }
+ ]
+ },
+ "AllowedHosts": "*",
+ "ContentSecurityPolicy": {
+ "Base": "'none'",
+ "DefaultSource": "'none'",
+ "ScriptSource": "'none'",
+ "ConnectSource": "'none'",
+ "ImageSource": "'none'",
+ "StyleSource": "'none'",
+ "FormAction": "'none'",
+ "FontSource": "'none'",
+ "FrameSource": "'none'",
+ "FrameAncestors": "'none'"
+ },
+ "Keycloak": {
+ "Authority": "https://loginproxy.gov.bc.ca/auth/realms/standard",
+ "Audience": "property-services-project-api-4380",
+ "Secret": "[USE SECRETS]",
+ "Client": "property-services-project-api-4380",
+ "ServiceAccount": {
+ "Authority": "https://loginproxy.gov.bc.ca/auth/realms/standard",
+ "Audience": "service-account-team-1295-4381",
+ "Secret": "[USE SECRETS]",
+ "Client": "service-account-team-1295-4381",
+ "API": "https://api.loginproxy.gov.bc.ca/api/v1",
+ "Integration": "4379",
+ "Environment": "prod"
+ }
+ },
+ "OpenIdConnect": {
+ "Authority": "https://loginproxy.gov.bc.ca/auth/realms/standard",
+ "Login": "/protocol/openid-connect/auth",
+ "Logout": "/protocol/openid-connect/logout",
+ "Register": "/protocol/openid-connect/registrations",
+ "Token": "/protocol/openid-connect/token",
+ "TokenIntrospect": "/protocol/openid-connect/token/introspect",
+ "UserInfo": "/protocol/openid-connect/userinfo"
+ },
+ "OpenApiInfo": {
+ "Description": "PSP Scheduler service",
+ "Title": "PIMS scheduler",
+ "Contact": {
+ "Email": "support@pims.gov.bc.ca",
+ "Name": "Support"
+ },
+ "License": {
+ "Name": "APACHE",
+ "Url": "https://github.com/bcgov/PSP/blob/dev/LICENSE"
+ }
+ },
+ "Serialization": {
+ "Json": {
+ "PropertyNameCaseInsensitive": true,
+ "PropertyNamingPolicy": "CamelCase",
+ "IgnoreNullValues": true
+ }
+ },
+ "Pims": {
+ "Environment": {
+ "Uri": "http://pims-api-prod:8080"
+ }
+ },
+ "ConnectionStrings": {
+ "Redis": "scheduler-redis:6379"
+ }
+}
diff --git a/source/backend/scheduler/omnisharp.json b/source/backend/scheduler/omnisharp.json
new file mode 100644
index 0000000000..8982c08dc5
--- /dev/null
+++ b/source/backend/scheduler/omnisharp.json
@@ -0,0 +1,5 @@
+{
+ "RoslynExtensionsOptions": {
+ "enableAnalyzersSupport": true
+ }
+}
diff --git a/source/backend/scheduler/tests/.editorconfig b/source/backend/scheduler/tests/.editorconfig
new file mode 100644
index 0000000000..3c1051119d
--- /dev/null
+++ b/source/backend/scheduler/tests/.editorconfig
@@ -0,0 +1,72 @@
+# Editor configuration, see https://editorconfig.org
+root = true
+
+[*]
+end_of_line = lf
+charset = utf-8
+indent_style = space
+insert_final_newline = true
+trim_trailing_whitespace = true
+
+[*.ts]
+indent_size = 2
+
+[*.md]
+max_line_length = off
+trim_trailing_whitespace = false
+
+[*.env]
+insert_final_newline = false
+
+[{Makefile,**.mk}]
+# Use tabs for indentation (Makefiles require tabs)
+indent_style = tab
+
+[*.cs]
+indent_size = 4
+
+
+# Test files
+# SA1515 Single-line comment should be preceded by blank line
+dotnet_diagnostic.SA1515.severity = none
+# SA1513: Closing brace should be followed by blank line
+dotnet_diagnostic.SA1513.severity = none
+# SA1633 The file header is missing or not located at the top of the file
+dotnet_diagnostic.SA1633.severity = none
+# SA1200 Using directive should appear within a namespace declaration
+dotnet_diagnostic.SA1200.severity = none
+# SA1124 Do not use regions
+dotnet_diagnostic.SA1124.severity = none
+# SA1201 A constructor should not follow a property
+dotnet_diagnostic.SA1201.severity = none
+# SA1309 Field 'X' should not begin with an underscore
+dotnet_diagnostic.SA1309.severity = none
+# SA1117 The parameters should all be placed on the same line or each parameter should be placed on its own line.
+dotnet_diagnostic.SA1117.severity = none
+
+# -- Set to 'error' before running formatter
+# dotnet format --severity error --exclude entities/ef/** --exclude entities/PimsBaseContext.cs
+# SA1208: Using directive for X should appear before directive for Y
+dotnet_diagnostic.SA1208.severity = warning
+# SA1121 Use built-in type alias
+dotnet_diagnostic.SA1121.severity = warning
+# SA1413 Use trailing comma in multi-line initializers.
+dotnet_diagnostic.SA1413.severity = warning
+# SA1122 Use string.Empty for empty strings
+dotnet_diagnostic.SA1122.severity = warning
+# SA1518 Code should not contain blank lines at the end of the file.
+dotnet_diagnostic.SA1518.severity = warning
+# SA1101 Prefix local calls with this
+dotnet_diagnostic.SA1101.severity = warning
+# SA1507 Code should not contain multiple blank lines in a row
+dotnet_diagnostic.SA1507.severity = warning
+# SA1127 Generic type constraints should be on their own line
+dotnet_diagnostic.SA1127.severity = warning
+# SA1002 Semicolons should be followed by a space.
+dotnet_diagnostic.SA1002.severity = warning
+# SA1009 Closing parenthesis should not be preceded by a space.
+dotnet_diagnostic.SA1009.severity = warning
+# SA1508 A closing brace should not be preceded by a blank line
+dotnet_diagnostic.SA1508.severity = warning
+# SA1005 Single line comment should begin with a space.
+dotnet_diagnostic.SA1005.severity = warning
\ No newline at end of file
diff --git a/source/backend/scheduler/tests/.gitignore b/source/backend/scheduler/tests/.gitignore
new file mode 100644
index 0000000000..5923334c0e
--- /dev/null
+++ b/source/backend/scheduler/tests/.gitignore
@@ -0,0 +1,49 @@
+# Environment variables
+.env
+# Build
+.obj
+
+*.swp
+*.*~
+project.lock.json
+.DS_Store
+*.pyc
+nupkg/
+
+# IDE - VSCode
+.vscode/*
+!.vscode/settings.json
+!.vscode/tasks.json
+!.vscode/launch.json
+!.vscode/extensions.json
+
+# Rider
+.idea
+
+# User-specific files
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+build/
+bld/
+[Bb]in/
+[Oo]bj/
+[Oo]ut/
+msbuild.log
+msbuild.err
+msbuild.wrn
+
+# Visual Studio 2015
+.vs/
+
+# SonarQube
+.sonarqube/
diff --git a/source/backend/scheduler/tests/Directory.Build.props b/source/backend/scheduler/tests/Directory.Build.props
new file mode 100644
index 0000000000..4357d5e289
--- /dev/null
+++ b/source/backend/scheduler/tests/Directory.Build.props
@@ -0,0 +1,9 @@
+
+
+ net8.0
+ 9.0
+
+
+
+
+
diff --git a/source/backend/tests/core/Entities/AcquisitionFileHelper.cs b/source/backend/tests/core/Entities/AcquisitionFileHelper.cs
index cf999a514b..6975e85891 100644
--- a/source/backend/tests/core/Entities/AcquisitionFileHelper.cs
+++ b/source/backend/tests/core/Entities/AcquisitionFileHelper.cs
@@ -30,7 +30,6 @@ public static Entity.PimsAcquisitionFile CreateAcquisitionFile(long? acqFileId =
acquisitionFile.AcquisitionTypeCodeNavigation = acquisitionType ?? new Entity.PimsAcquisitionType() { Id = "SECTN3", DbCreateUserid = "test", DbLastUpdateUserid = "test", Description = "test" };
acquisitionFile.RegionCodeNavigation = region ?? new Entity.PimsRegion("Northern") { RegionCode = 1, ConcurrencyControlNumber = 1, DbCreateUserid = "test", DbLastUpdateUserid = "test" };
acquisitionFile.RegionCode = acquisitionFile.RegionCodeNavigation.RegionCode;
- acquisitionFile.FileNumber = acquisitionFile.FileNumberFormatted; // TODO: Remove this once FILE_NUMBER column is removed from schema
return acquisitionFile;
}
diff --git a/source/backend/tests/unit/api/Services/AcquisitionFileServiceTest.cs b/source/backend/tests/unit/api/Services/AcquisitionFileServiceTest.cs
index e93f7aeb68..5369e25725 100644
--- a/source/backend/tests/unit/api/Services/AcquisitionFileServiceTest.cs
+++ b/source/backend/tests/unit/api/Services/AcquisitionFileServiceTest.cs
@@ -1369,7 +1369,7 @@ public void UpdateProperties_MatchProperties_Success()
userRepository.Setup(x => x.GetUserInfoByKeycloakUserId(It.IsAny())).Returns(EntityHelper.CreateUser(1, Guid.NewGuid(), "Test", regionCode: 1));
var propertyService = this._helper.GetService>();
- propertyService.Setup(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>()));
+ propertyService.Setup(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>(), false));
var solver = this._helper.GetService>();
solver.Setup(x => x.CanEditProperties(It.IsAny())).Returns(true);
@@ -1380,8 +1380,48 @@ public void UpdateProperties_MatchProperties_Success()
// Assert
filePropertyRepository.Verify(x => x.GetPropertiesByAcquisitionFileId(It.IsAny()), Times.Once);
filePropertyRepository.Verify(x => x.Update(It.IsAny()), Times.Once);
- propertyService.Verify(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>()), Times.Once);
+ propertyService.Verify(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>(), false), Times.Once);
propertyService.Verify(x => x.UpdateFilePropertyLocation(It.IsAny(), It.IsAny()), Times.Once);
+ }
+
+ [Fact]
+ public void UpdateProperties_MatchProperties_Success_NoInternalId()
+ {
+ // Arrange
+ var service = this.CreateAcquisitionServiceWithPermissions(Permissions.AcquisitionFileEdit, Permissions.PropertyAdd, Permissions.PropertyView);
+
+ var acqFile = EntityHelper.CreateAcquisitionFile();
+ acqFile.ConcurrencyControlNumber = 1;
+
+ var property = EntityHelper.CreateProperty(1, regionCode: 1);
+ acqFile.PimsPropertyAcquisitionFiles = new List() { new PimsPropertyAcquisitionFile() { Internal_Id = 0, Property = property, PropertyId = 1 } };
+ var propertyAcquisitionFiles = new List() { new PimsPropertyAcquisitionFile() { Internal_Id = 1, Property = property, PropertyId = 1 } };
+
+ var repository = this._helper.GetService>();
+ repository.Setup(x => x.GetRowVersion(It.IsAny())).Returns(1);
+ repository.Setup(x => x.GetById(It.IsAny())).Returns(acqFile);
+
+ var propertyRepository = this._helper.GetService>();
+ propertyRepository.Setup(x => x.GetByPid(It.IsAny(), true)).Returns(property);
+ propertyRepository.Setup(x => x.GetPropertyRegion(It.IsAny())).Returns(1);
+
+ var filePropertyRepository = this._helper.GetService>();
+ filePropertyRepository.Setup(x => x.GetPropertiesByAcquisitionFileId(It.IsAny())).Returns(propertyAcquisitionFiles);
+
+ var userRepository = this._helper.GetService>();
+ userRepository.Setup(x => x.GetUserInfoByKeycloakUserId(It.IsAny())).Returns(EntityHelper.CreateUser(1, Guid.NewGuid(), "Test", regionCode: 1));
+
+ var propertyService = this._helper.GetService>();
+ propertyService.Setup(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>(), false));
+
+ var solver = this._helper.GetService>();
+ solver.Setup(x => x.CanEditProperties(It.IsAny())).Returns(true);
+
+ // Act
+ var response = service.UpdateProperties(acqFile, new List() { UserOverrideCode.AddLocationToProperty });
+
+ // Assert
+ var updatedProperty = response.PimsPropertyAcquisitionFiles.FirstOrDefault().Internal_Id.Should().Be(1);
}
@@ -1848,7 +1888,7 @@ public void UpdateProperties_WithProperty_SelectedForCompensation_Should_Fail()
userRepository.Setup(x => x.GetUserInfoByKeycloakUserId(It.IsAny())).Returns(EntityHelper.CreateUser(1, Guid.NewGuid(), "Test", regionCode: 1));
var propertyService = this._helper.GetService>();
- propertyService.Setup(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>()));
+ propertyService.Setup(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>(), false));
var solver = this._helper.GetService>();
solver.Setup(x => x.CanEditProperties(It.IsAny())).Returns(true);
diff --git a/source/backend/tests/unit/api/Services/DispositionFileServiceTest.cs b/source/backend/tests/unit/api/Services/DispositionFileServiceTest.cs
index b51dcb1ab8..3672d5bf98 100644
--- a/source/backend/tests/unit/api/Services/DispositionFileServiceTest.cs
+++ b/source/backend/tests/unit/api/Services/DispositionFileServiceTest.cs
@@ -866,6 +866,7 @@ public void Update_Success()
repository.Verify(x => x.Update(It.IsAny(), It.IsAny()), Times.Once);
}
+
[Fact]
public void Update_Success_FinalButAdmin()
{
@@ -987,7 +988,7 @@ public void UpdateProperties_MatchProperties_Success()
userRepository.Setup(x => x.GetUserInfoByKeycloakUserId(It.IsAny())).Returns(EntityHelper.CreateUser(1, Guid.NewGuid(), "Test", regionCode: 1));
var propertyService = this._helper.GetService>();
- propertyService.Setup(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>()));
+ propertyService.Setup(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>(), false));
propertyService.Setup(x => x.UpdateFilePropertyLocation(It.IsAny(), It.IsAny()));
// Act
@@ -996,10 +997,48 @@ public void UpdateProperties_MatchProperties_Success()
// Assert
filePropertyRepository.Verify(x => x.GetPropertiesByDispositionFileId(It.IsAny()), Times.Once);
filePropertyRepository.Verify(x => x.Update(It.IsAny()), Times.Once);
- propertyService.Verify(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>()), Times.Once);
+ propertyService.Verify(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>(), false), Times.Once);
propertyService.Verify(x => x.UpdateFilePropertyLocation(It.IsAny(), It.IsAny()), Times.Once);
}
+ [Fact]
+ public void UpdateProperties_MatchProperties_Success_NoInternalId()
+ {
+ // Arrange
+ var service = this.CreateDispositionServiceWithPermissions(Permissions.DispositionEdit, Permissions.PropertyAdd, Permissions.PropertyView);
+
+ var dspFile = EntityHelper.CreateDispositionFile();
+ dspFile.ConcurrencyControlNumber = 1;
+
+ var property = EntityHelper.CreateProperty(1, regionCode: 1);
+ dspFile.PimsDispositionFileProperties = new List() { new PimsDispositionFileProperty() { Internal_Id = 0, Property = property, PropertyId = 1 } };
+ var dispositionFileProperties = new List() { new PimsDispositionFileProperty() { Internal_Id = 1, Property = property, PropertyId = 1 } };
+
+ var repository = this._helper.GetService>();
+ repository.Setup(x => x.GetRowVersion(It.IsAny())).Returns(1);
+ repository.Setup(x => x.GetById(It.IsAny())).Returns(dspFile);
+
+ var propertyRepository = this._helper.GetService>();
+ propertyRepository.Setup(x => x.GetByPid(It.IsAny(), true)).Returns(property);
+ propertyRepository.Setup(x => x.GetPropertyRegion(It.IsAny())).Returns(1);
+
+ var filePropertyRepository = this._helper.GetService>();
+ filePropertyRepository.Setup(x => x.GetPropertiesByDispositionFileId(It.IsAny())).Returns(dispositionFileProperties);
+
+ var userRepository = this._helper.GetService>();
+ userRepository.Setup(x => x.GetUserInfoByKeycloakUserId(It.IsAny())).Returns(EntityHelper.CreateUser(1, Guid.NewGuid(), "Test", regionCode: 1));
+
+ var propertyService = this._helper.GetService>();
+ propertyService.Setup(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>(), false));
+ propertyService.Setup(x => x.UpdateFilePropertyLocation(It.IsAny(), It.IsAny()));
+
+ // Act
+ var updatedDispositionFile = service.UpdateProperties(dspFile, new List() { UserOverrideCode.AddLocationToProperty });
+
+ // Assert
+ var updatedProperty = updatedDispositionFile.PimsDispositionFileProperties.FirstOrDefault().Internal_Id.Should().Be(1);
+ }
+
[Fact]
public void UpdateProperties_MatchProperties_NewProperty_UserOverride()
{
diff --git a/source/backend/tests/unit/api/Services/LeaseServiceTest.cs b/source/backend/tests/unit/api/Services/LeaseServiceTest.cs
index 206eedb102..fccfdbaf4e 100644
--- a/source/backend/tests/unit/api/Services/LeaseServiceTest.cs
+++ b/source/backend/tests/unit/api/Services/LeaseServiceTest.cs
@@ -184,6 +184,63 @@ public void Add_WithRetiredProperty_Should_Fail()
propertyService.Verify(x => x.PopulateNewFileProperty(It.IsAny()), Times.Never);
}
+ [Fact]
+ public void Add_WithDisposedProperty_Should_Fail()
+ {
+ // Arrange
+ var lease = EntityHelper.CreateLease(1);
+ lease.RegionCode = 1;
+ var user = EntityHelper.CreateUser("Test");
+ user.PimsRegionUsers.Add(new PimsRegionUser() { RegionCode = lease.RegionCode.Value });
+
+ PimsProperty newProperty = new PimsProperty()
+ {
+ PropertyId = 100,
+ Pid = 1000,
+ };
+
+ PimsProperty disposedProperty = new PimsProperty()
+ {
+ PropertyId = 100,
+ Pid = 1000,
+ PimsDispositionFileProperties = new List()
+ {
+ new PimsDispositionFileProperty()
+ {
+ DispositionFile = new PimsDispositionFile()
+ {
+ DispositionFileId = 1,
+ DispositionFileStatusTypeCode = DispositionFileStatusTypes.COMPLETE.ToString(),
+ },
+ },
+ },
+ };
+
+ var service = this.CreateLeaseService(Permissions.LeaseAdd);
+
+ var leaseRepository = this._helper.GetService>();
+ leaseRepository.Setup(x => x.Add(It.IsAny())).Returns(lease);
+
+ var propertyRepository = this._helper.GetService>();
+ propertyRepository.Setup(x => x.GetByPid(It.IsAny(), true)).Returns(newProperty);
+ propertyRepository.Setup(x => x.GetAllAssociationsById(It.IsAny())).Returns(disposedProperty);
+
+ var userRepository = this._helper.GetService>();
+ userRepository.Setup(x => x.GetByKeycloakUserId(It.IsAny())).Returns(user);
+
+ var propertyService = this._helper.GetService>();
+
+ // Act
+ Action act = () => service.Add(lease, new List());
+
+ // Assert
+ var ex = act.Should().Throw();
+ ex.WithMessage("Disposed or retired properties may not be added to a Lease. Remove any disposed or retired properties before continuing.");
+
+ leaseRepository.Verify(x => x.Add(It.IsAny()), Times.Never);
+ propertyService.Verify(x => x.PopulateNewFileProperty(It.IsAny()), Times.Never);
+ }
+
#endregion
#region Properties
@@ -453,18 +510,18 @@ public void UpdateProperties_Success()
userRepository.Setup(x => x.GetByKeycloakUserId(It.IsAny())).Returns(EntityHelper.CreateUser("Test"));
var propertyService = this._helper.GetService>();
- propertyService.Setup(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>()));
+ propertyService.Setup(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>(), false));
// Act
var updatedLease = service.Update(lease, new List() { UserOverrideCode.AddLocationToProperty });
// Assert
leaseRepository.Verify(x => x.Update(lease, false), Times.Once);
- propertyService.Verify(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>()), Times.Once);
+ propertyService.Verify(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny>(), false), Times.Once);
}
[Fact]
- public void UpdateProperties_WithRetiredProperty_Should_Fail()
+ public void UpdateProperties_WithDisposedProperty_Should_Fail()
{
// Arrange
var lease = EntityHelper.CreateLease(1);
@@ -479,10 +536,114 @@ public void UpdateProperties_WithRetiredProperty_Should_Fail()
{
PropertyId = 100,
Pid = 1,
- IsRetired = true,
+ };
+
+ PimsProperty disposedProperty = new PimsProperty()
+ {
+ PropertyId = 100,
+ Pid = 1000,
+ PimsDispositionFileProperties = new List()
+ {
+ new PimsDispositionFileProperty()
+ {
+ DispositionFile = new PimsDispositionFile()
+ {
+ DispositionFileId = 1,
+ DispositionFileStatusTypeCode = DispositionFileStatusTypes.COMPLETE.ToString(),
+ },
+ },
+ },
};
propertyRepository.Setup(x => x.GetByPid(It.IsAny(), true)).Returns(property);
+ propertyRepository.Setup(x => x.GetAllAssociationsById(It.IsAny())).Returns(disposedProperty);
+ leaseRepository.Setup(x => x.GetNoTracking(It.IsAny())).Returns(lease);
+ leaseRepository.Setup(x => x.Get(It.IsAny())).Returns(EntityHelper.CreateLease(1));
+ userRepository.Setup(x => x.GetByKeycloakUserId(It.IsAny())).Returns(EntityHelper.CreateUser("Test"));
+
+ // Act
+ Action act = () => service.Update(lease, new List() { UserOverrideCode.AddLocationToProperty });
+
+ // Assert
+ var ex = act.Should().Throw();
+ ex.WithMessage("Disposed or retired properties may not be added to a Lease. Remove any disposed or retired properties before continuing.");
+ }
+
+ [Fact]
+ public void UpdateProperties_WithExistingDisposedProperty_Should_Pass()
+ {
+ // Arrange
+ var lease = EntityHelper.CreateLease(1);
+
+ var service = this.CreateLeaseService(Permissions.LeaseEdit, Permissions.PropertyAdd, Permissions.PropertyView);
+ var leaseRepository = this._helper.GetService>();
+ var propertyLeaseRepository = this._helper.GetService>();
+ var propertyRepository = this._helper.GetService>();
+ var userRepository = this._helper.GetService>();
+
+ PimsProperty property = new PimsProperty()
+ {
+ PropertyId = 100,
+ Pid = 1,
+ };
+
+ PimsProperty disposedProperty = new PimsProperty()
+ {
+ PropertyId = 100,
+ Pid = 1000,
+ PimsDispositionFileProperties = new List()
+ {
+ new PimsDispositionFileProperty()
+ {
+ DispositionFile = new PimsDispositionFile()
+ {
+ DispositionFileId = 1,
+ DispositionFileStatusTypeCode = DispositionFileStatusTypes.COMPLETE.ToString(),
+ },
+ },
+ },
+ };
+ PimsPropertyLease propertyLease = new PimsPropertyLease()
+ {
+ Property = disposedProperty,
+ LeaseId = lease.LeaseId
+ };
+
+ propertyLeaseRepository.Setup(x => x.GetAllByPropertyId(It.IsAny())).Returns(new List() { propertyLease });
+ propertyRepository.Setup(x => x.GetByPid(It.IsAny(), true)).Returns(property);
+ propertyRepository.Setup(x => x.GetAllAssociationsById(It.IsAny())).Returns(disposedProperty);
+ leaseRepository.Setup(x => x.GetNoTracking(It.IsAny())).Returns(lease);
+ leaseRepository.Setup(x => x.Get(It.IsAny())).Returns(EntityHelper.CreateLease(1));
+ userRepository.Setup(x => x.GetByKeycloakUserId(It.IsAny())).Returns(EntityHelper.CreateUser("Test"));
+
+ // Act
+ Action act = () => service.Update(lease, new List() { UserOverrideCode.AddLocationToProperty });
+
+ // Assert
+ var ex = act.Should().NotThrow();
+ leaseRepository.Verify(x => x.Update(lease, false), Times.Once);
+ }
+
+ [Fact]
+ public void UpdateProperties_WithRetiredProperty_Should_Fail()
+ {
+ // Arrange
+ var lease = EntityHelper.CreateLease(1);
+
+ var service = this.CreateLeaseService(Permissions.LeaseEdit, Permissions.PropertyAdd, Permissions.PropertyView);
+ var leaseRepository = this._helper.GetService>();
+ var propertyLeaseRepository = this._helper.GetService>();
+ var propertyRepository = this._helper.GetService>();
+ var userRepository = this._helper.GetService>();
+
+ PimsProperty retiredProperty = new PimsProperty()
+ {
+ PropertyId = 100,
+ Pid = 1,
+ IsRetired = true,
+ };
+
+ propertyRepository.Setup(x => x.GetByPid(It.IsAny(), true)).Returns(retiredProperty);
leaseRepository.Setup(x => x.GetNoTracking(It.IsAny())).Returns(lease);
leaseRepository.Setup(x => x.Get(It.IsAny())).Returns(EntityHelper.CreateLease(1));
userRepository.Setup(x => x.GetByKeycloakUserId(It.IsAny())).Returns(EntityHelper.CreateUser("Test"));
@@ -513,14 +674,50 @@ public void UpdateProperties_MatchProperties_Success()
userRepository.Setup(x => x.GetByKeycloakUserId(It.IsAny())).Returns(EntityHelper.CreateUser("Test"));
var propertyService = this._helper.GetService>();
- propertyService.Setup(x => x.UpdateLocation(It.IsAny(), ref It.Ref.IsAny, It.IsAny