From b82d1b1b75d88e2ce87c1252915aebbbb5ad8c66 Mon Sep 17 00:00:00 2001 From: sax Date: Tue, 28 May 2024 17:08:58 +0200 Subject: [PATCH 01/57] updates CI --- .github/actions/hash/action.yml | 19 ++++++ .github/workflows/ci.yml | 107 +++++++------------------------- .github/workflows/debug.yml | 43 +++++++++++++ .github/workflows/test.yml | 62 ++++++++++++++++++ .gitignore | 3 +- 5 files changed, 148 insertions(+), 86 deletions(-) create mode 100644 .github/actions/hash/action.yml create mode 100644 .github/workflows/debug.yml create mode 100644 .github/workflows/test.yml diff --git a/.github/actions/hash/action.yml b/.github/actions/hash/action.yml new file mode 100644 index 00000000..85a6aee8 --- /dev/null +++ b/.github/actions/hash/action.yml @@ -0,0 +1,19 @@ +# ref: https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action +name: 'Calculate version hash' +description: 'Calculate deps and os hash' +inputs: + files: + description: 'Files to use to calculate the hash' + required: true + default: "pdm.lock docker/bin/* docker/conf/* docker/Dockerfile" +outputs: + hash: # id of output + description: 'The time we greeted you' +runs: + using: 'composite' +# args: +# - ${{ inputs.files }} + steps: + - run: | + LOCK_SHA=$(echo sha1sum ${{ inputs.files }} | sha1sum | awk '{print $1}' | cut -c 1-8) + echo "hash=$LOCK_SHA" >> "$GITHUB_OUTPUT" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f643f682..76f396d9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,7 +8,7 @@ on: branches: - develop env: - HASH_SEEDS: pdm.lock docker/bin/* docker/conf/* docker/Dockerfile +# HASH_SEEDS: pdm.lock docker/bin/* docker/conf/* docker/Dockerfile DOCKER_CACHE_IMAGE: ${{ vars.DOCKERHUB_ORGANIZATION }}/hope-support-images BUILD_DATE: $(date +"%Y-%m-%d %H:%M") DOCKER_DEFAULT_PLATFORM: linux/amd64 @@ -17,43 +17,43 @@ jobs: build: runs-on: ubuntu-20.04 + outputs: + docker-image: ${{ steps.image_name.outputs.name }} steps: - name: Checkout code uses: actions/checkout@v4 - - name: DockerHub login - uses: docker/login-action@v3 + + - uses: ./.github/actions/hash + id: release_hash + + - name: Image name + id: image_name + run: | + image_name="$DOCKER_CACHE_IMAGE:hde-dev-${{ steps.release_hash.outputs.hash }}" + image_name_latest="$DOCKER_CACHE_IMAGE:hde-dev-latest" + echo "name=$image_name" >> $GITHUB_OUTPUT + echo "latest=$image_name_latest" >> $GITHUB_OUTPUT + - name: Check if image exists + uses: ./.github/actions/image_exists + id: image_exists with: + image: ${{ steps.image_name.outputs.name }} username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Check Dev Image - continue-on-error: true - id: image_exists - run: | - set +e - LOCK_SHA=$(echo sha1sum ${{env.HASH_SEEDS}}| sha1sum | awk '{print $1}' | cut -c 1-8) - exists=$(docker manifest inspect ${DOCKER_CACHE_IMAGE}:hde-dev-${LOCK_SHA} > /dev/null 2>&1 && echo "exists" || echo "not_found") - exists="exists" - echo "result=$exists" >> "$GITHUB_OUTPUT" - echo "SHA=$LOCK_SHA" >> "$GITHUB_OUTPUT" - if [ $exists = "exists" ];then - echo "Tag ${{ steps.image_exists.outputs.SHA }} found" - else - echo "Tag ${{ steps.image_exists.outputs.SHA }} does not exist" - fi - name: Build Dev Image - if: ${{ steps.image_exists.outputs.result == 'not_found' || contains(github.event.head_commit.message, 'rebuild') }} + if: ${{ !steps.image_exists.outputs.exists || contains(github.event.head_commit.message, 'rebuild') }} + id: docker_build run: | - LOCK_SHA=${{ steps.image_exists.outputs.SHA }} docker buildx create --use --platform x86_64 --name builder --driver docker-container - docker buildx build \ + docker buildx build \ --platform x86_64 \ --builder builder \ --build-arg BUILD_DATE="${BUILD_DATE}" \ --progress=plain \ --cache-to type=local,ref=${DOCKER_CACHE_IMAGE}:hde-dev-latest,dest=./.AAA \ --cache-from ${DOCKER_CACHE_IMAGE}:hde-dev-latest \ - -t ${DOCKER_CACHE_IMAGE}:hde-dev-${LOCK_SHA} \ - -t ${DOCKER_CACHE_IMAGE}:hde-dev-latest \ + -t ${{ !steps.image_name.outputs.name }} \ + -t ${{ !steps.image_name.outputs.latest }} \ -f ./docker/Dockerfile \ --push \ --target python_dev_deps . @@ -73,64 +73,3 @@ jobs: run: black --check src/ - name: Flake8 run: flake8 src/ - - test: - runs-on: ubuntu-20.04 - needs: [build] - container: - image: unicef/hope-support-images:hde-dev-latest - credentials: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - ports: - - 8000:8000 - services: - redis: - image: redis - db: - image: postgres:14 - env: - POSTGRES_DATABASE: dedupe - POSTGRES_PASSWORD: postgres - POSTGRES_USERNAME: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - env: - DATABASE_URL: postgres://postgres:postgres@db:5432/dedupe - SECRET_KEY: secret_key - CACHE_URL: redis://redis:6379/0 - CELERY_BROKER_URL: redis://redis:6379/0 - PYTHONPATH: "/hde/code/src:/hde/__pypackages__/3.12/lib" - steps: - - name: Checkout code - uses: actions/checkout@v2 - - name: Run tests - run: | - pytest tests -# echo "===================================" -# ls -al -# docker run --rm \ -# -e PYTHONPATH=/hde/code/src:/hde/__pypackages__/3.12/lib \ -# -e CACHE_URL="${CACHE_URL}" \ -# -e DATABASE_URL="${DATABASE_URL}" \ -# -e SECRET_KEY="${SECRET_KEY}" \ -# -e CELERY_BROKER_URL="${CELERY_BROKER_URL}" \ -# -v ${PWD}:/hde/code/ \ -# -w /hde/code/ \ -# -t ${DOCKER_CACHE_IMAGE}:hde-dev-latest \ -# pytest tests/ --create-db -v --cov --cov-report xml:coverage.xml -# -# - name: Upload coverage to Codecov -# uses: codecov/codecov-action@v4 -# with: -# directory: ./coverage/reports/ -# env_vars: OS,PYTHON -# fail_ci_if_error: true -# files: /hde/code/coverage1.xml -# flags: unittests -# name: codecov-umbrella -# token: ${{ secrets.CODECOV_TOKEN }} -# verbose: true \ No newline at end of file diff --git a/.github/workflows/debug.yml b/.github/workflows/debug.yml new file mode 100644 index 00000000..339f73fe --- /dev/null +++ b/.github/workflows/debug.yml @@ -0,0 +1,43 @@ +name: CI + +on: always + +env: + DOCKER_DEFAULT_PLATFORM: linux/amd64 + DOCKER_CACHE_IMAGE: ${{ vars.DOCKERHUB_ORGANIZATION }}/hope-support-images + +jobs: + + debug: + runs-on: ubuntu-20.04 + if: ${{ github.event.act }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Calculate hash + uses: ./.github/actions/hash + id: release_hash + + - name: Image name + id: image_name + run: | + image_name="$DOCKER_CACHE_IMAGE:hde-dev-${{ steps.release_hash.outputs.hash }}" + image_name_latest="$DOCKER_CACHE_IMAGE:hde-dev-latest" + echo "name=$image_name" >> $GITHUB_OUTPUT + echo "latest=$image_name_latest" >> $GITHUB_OUTPUT + + - name: Check if image exists + uses: ./.github/actions/image_exists + id: image_exists + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + image: ${{ vars.DOCKERHUB_ORGANIZATION }}/hope-support-images + - name: Dump info + run: | + echo "=======================================================" + echo "Name : ${{steps.image_name.outputs.name}}" + echo "Hash : ${{steps.release_hash.outputs.hash}}" + echo "Exists: ${{steps.image_exists.outputs.exists}}" + echo "=======================================================" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..9f0178a2 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,62 @@ +name: CI + +on: + push: + branches: + - develop + pull_request: + branches: + - develop + workflow_run: + workflows: [ci] + types: + - completed + +jobs: + + test: + runs-on: ubuntu-20.04 + container: + image: unicef/hope-support-images:hde-dev-latest + credentials: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + ports: + - 8000:8000 + services: + redis: + image: redis + db: + image: postgres:14 + env: + POSTGRES_DATABASE: dedupe + POSTGRES_PASSWORD: postgres + POSTGRES_USERNAME: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + env: + DATABASE_URL: postgres://postgres:postgres@db:5432/dedupe + SECRET_KEY: secret_key + CACHE_URL: redis://redis:6379/0 + CELERY_BROKER_URL: redis://redis:6379/0 + PYTHONPATH: "/hde/code/src:/hde/__pypackages__/3.12/lib" + steps: + - name: Checkout code + uses: actions/checkout@v2 + - name: Run tests + run: | + pytest tests --create-db -v --cov --cov-report xml:coverage.xml + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + with: + directory: ./coverage/reports/ + env_vars: OS,PYTHON + fail_ci_if_error: true + files: /hde/code/coverage1.xml + flags: unittests + name: codecov-umbrella + token: ${{ secrets.CODECOV_TOKEN }} + verbose: true \ No newline at end of file diff --git a/.gitignore b/.gitignore index e72af819..72e9d96d 100644 --- a/.gitignore +++ b/.gitignore @@ -8,10 +8,9 @@ __pycache__/ !.dockerignore !.flake8 !.gitignore -!.gitlab/ +!.github/* !.tx/config !.mypy.ini -!.gitlab-ci.yml !.pre-commit-config.yaml !.bumpversion.cfg !.trivyignore From e0ab1c7f61aea108a7f23f39e9ec6c117a3731b8 Mon Sep 17 00:00:00 2001 From: sax Date: Tue, 28 May 2024 17:10:48 +0200 Subject: [PATCH 02/57] updates CI --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9f0178a2..b2307f2c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,4 +1,4 @@ -name: CI +name: Test on: push: From 1fd5e8762fbbc9b01426428480f57b46706c1c16 Mon Sep 17 00:00:00 2001 From: sax Date: Tue, 28 May 2024 17:11:39 +0200 Subject: [PATCH 03/57] updates CI --- .github/workflows/debug.yml | 43 ------------------------------------- 1 file changed, 43 deletions(-) delete mode 100644 .github/workflows/debug.yml diff --git a/.github/workflows/debug.yml b/.github/workflows/debug.yml deleted file mode 100644 index 339f73fe..00000000 --- a/.github/workflows/debug.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: CI - -on: always - -env: - DOCKER_DEFAULT_PLATFORM: linux/amd64 - DOCKER_CACHE_IMAGE: ${{ vars.DOCKERHUB_ORGANIZATION }}/hope-support-images - -jobs: - - debug: - runs-on: ubuntu-20.04 - if: ${{ github.event.act }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Calculate hash - uses: ./.github/actions/hash - id: release_hash - - - name: Image name - id: image_name - run: | - image_name="$DOCKER_CACHE_IMAGE:hde-dev-${{ steps.release_hash.outputs.hash }}" - image_name_latest="$DOCKER_CACHE_IMAGE:hde-dev-latest" - echo "name=$image_name" >> $GITHUB_OUTPUT - echo "latest=$image_name_latest" >> $GITHUB_OUTPUT - - - name: Check if image exists - uses: ./.github/actions/image_exists - id: image_exists - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - image: ${{ vars.DOCKERHUB_ORGANIZATION }}/hope-support-images - - name: Dump info - run: | - echo "=======================================================" - echo "Name : ${{steps.image_name.outputs.name}}" - echo "Hash : ${{steps.release_hash.outputs.hash}}" - echo "Exists: ${{steps.image_exists.outputs.exists}}" - echo "=======================================================" From 3d9a12224df26c68d423f6642cfa2d3235a462fe Mon Sep 17 00:00:00 2001 From: sax Date: Tue, 28 May 2024 17:15:09 +0200 Subject: [PATCH 04/57] updates CI --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b2307f2c..f7fa27e9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -59,4 +59,4 @@ jobs: flags: unittests name: codecov-umbrella token: ${{ secrets.CODECOV_TOKEN }} - verbose: true \ No newline at end of file + verbose: true From 63f2f2296ded5157b107cae620fb59cfa3a7aa38 Mon Sep 17 00:00:00 2001 From: sax Date: Tue, 28 May 2024 17:17:36 +0200 Subject: [PATCH 05/57] updates CI --- .github/workflows/ci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 76f396d9..ee4aeb7f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,9 +8,7 @@ on: branches: - develop env: -# HASH_SEEDS: pdm.lock docker/bin/* docker/conf/* docker/Dockerfile DOCKER_CACHE_IMAGE: ${{ vars.DOCKERHUB_ORGANIZATION }}/hope-support-images - BUILD_DATE: $(date +"%Y-%m-%d %H:%M") DOCKER_DEFAULT_PLATFORM: linux/amd64 jobs: @@ -44,6 +42,7 @@ jobs: if: ${{ !steps.image_exists.outputs.exists || contains(github.event.head_commit.message, 'rebuild') }} id: docker_build run: | + BUILD_DATE=$(date +"%Y-%m-%d %H:%M") docker buildx create --use --platform x86_64 --name builder --driver docker-container docker buildx build \ --platform x86_64 \ From b2da4af888eb285ed2918b942cfb22a70ed16f48 Mon Sep 17 00:00:00 2001 From: sax Date: Tue, 28 May 2024 17:36:57 +0200 Subject: [PATCH 06/57] updates CI --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ee4aeb7f..23c014f2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -51,8 +51,8 @@ jobs: --progress=plain \ --cache-to type=local,ref=${DOCKER_CACHE_IMAGE}:hde-dev-latest,dest=./.AAA \ --cache-from ${DOCKER_CACHE_IMAGE}:hde-dev-latest \ - -t ${{ !steps.image_name.outputs.name }} \ - -t ${{ !steps.image_name.outputs.latest }} \ + -t ${{ steps.image_name.outputs.name }} \ + -t ${{ steps.image_name.outputs.latest }} \ -f ./docker/Dockerfile \ --push \ --target python_dev_deps . From 86bb8cd06e657588966518b8c2a89b074f5fdbe2 Mon Sep 17 00:00:00 2001 From: sax Date: Tue, 28 May 2024 17:40:26 +0200 Subject: [PATCH 07/57] updates CI --- .github/actions/hash/action.yml | 4 ++- .github/actions/image_exists/action.yml | 40 +++++++++++++++++++++++++ 2 files changed, 43 insertions(+), 1 deletion(-) create mode 100644 .github/actions/image_exists/action.yml diff --git a/.github/actions/hash/action.yml b/.github/actions/hash/action.yml index 85a6aee8..9f7d031f 100644 --- a/.github/actions/hash/action.yml +++ b/.github/actions/hash/action.yml @@ -14,6 +14,8 @@ runs: # args: # - ${{ inputs.files }} steps: - - run: | + - name: Calculate release hash + shell: bash --noprofile --norc -eo pipefail -ux {0} + run: | LOCK_SHA=$(echo sha1sum ${{ inputs.files }} | sha1sum | awk '{print $1}' | cut -c 1-8) echo "hash=$LOCK_SHA" >> "$GITHUB_OUTPUT" diff --git a/.github/actions/image_exists/action.yml b/.github/actions/image_exists/action.yml new file mode 100644 index 00000000..ee671b65 --- /dev/null +++ b/.github/actions/image_exists/action.yml @@ -0,0 +1,40 @@ +# ref: https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action +name: 'Che cif image exists in dockerhub' +description: 'Calculate deps and os hash' +inputs: + image: + description: 'Docker Image ' + required: true + username: + required: true + password: + required: true + +outputs: + exists: + description: 'The time we greeted you' + type: bool + +runs: + using: 'composite' +# args: +# - ${{ inputs.files }} + steps: + - name: DockerHub login + uses: docker/login-action@v3 + with: + username: ${{ inputs.username }} + password: ${{ inputs.password }} + - name: Check Image Exists + continue-on-error: true + shell: bash --noprofile --norc -eo pipefail -ux {0} + run: | + set +e + exists=$(docker manifest inspect ${{inputs.image}} > /dev/null 2>&1 && echo "exists" || echo "not_found") + if [ $exists = "exists" ];then + echo "exists=true" >> "$GITHUB_OUTPUT" + echo "Image ${{inputs.image}} found" + else + echo "exists=false" >> "$GITHUB_OUTPUT" + echo "Image ${{inputs.image}} does not exist" + fi \ No newline at end of file From 3913d4819544739ab6813f5679104eadd511528b Mon Sep 17 00:00:00 2001 From: sax Date: Tue, 28 May 2024 17:43:31 +0200 Subject: [PATCH 08/57] updates CI --- .github/actions/hash/action.yml | 3 +++ .github/actions/image_exists/action.yml | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/actions/hash/action.yml b/.github/actions/hash/action.yml index 9f7d031f..00da682f 100644 --- a/.github/actions/hash/action.yml +++ b/.github/actions/hash/action.yml @@ -9,12 +9,15 @@ inputs: outputs: hash: # id of output description: 'The time we greeted you' + value: ${{ steps.calc.outputs.hash }} + runs: using: 'composite' # args: # - ${{ inputs.files }} steps: - name: Calculate release hash + id: calc shell: bash --noprofile --norc -eo pipefail -ux {0} run: | LOCK_SHA=$(echo sha1sum ${{ inputs.files }} | sha1sum | awk '{print $1}' | cut -c 1-8) diff --git a/.github/actions/image_exists/action.yml b/.github/actions/image_exists/action.yml index ee671b65..9c0c528d 100644 --- a/.github/actions/image_exists/action.yml +++ b/.github/actions/image_exists/action.yml @@ -6,14 +6,16 @@ inputs: description: 'Docker Image ' required: true username: + description: 'DockerHub username ' required: true password: + description: 'DockerHub password ' required: true outputs: exists: description: 'The time we greeted you' - type: bool + value: ${{ steps.check.outputs.exixts }} runs: using: 'composite' @@ -26,6 +28,7 @@ runs: username: ${{ inputs.username }} password: ${{ inputs.password }} - name: Check Image Exists + id: check continue-on-error: true shell: bash --noprofile --norc -eo pipefail -ux {0} run: | From 1b6ce75446f5cb1c15e2d94d5dbf98bc7d431f06 Mon Sep 17 00:00:00 2001 From: sax Date: Tue, 28 May 2024 17:51:00 +0200 Subject: [PATCH 09/57] updates CI --- .github/workflows/test.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f7fa27e9..44e92204 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -49,14 +49,14 @@ jobs: - name: Run tests run: | pytest tests --create-db -v --cov --cov-report xml:coverage.xml - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4 - with: - directory: ./coverage/reports/ - env_vars: OS,PYTHON - fail_ci_if_error: true - files: /hde/code/coverage1.xml - flags: unittests - name: codecov-umbrella - token: ${{ secrets.CODECOV_TOKEN }} - verbose: true +# - name: Upload coverage to Codecov +# uses: codecov/codecov-action@v4 +# with: +# directory: ./coverage/reports/ +# env_vars: OS,PYTHON +# fail_ci_if_error: true +# files: /hde/code/coverage1.xml +# flags: unittests +# name: codecov-umbrella +# token: ${{ secrets.CODECOV_TOKEN }} +# verbose: true From 327c80ce83d68b54cf917ded05a33689921ae6c9 Mon Sep 17 00:00:00 2001 From: sax Date: Tue, 11 Jun 2024 12:03:10 +0200 Subject: [PATCH 10/57] add ops workflows --- .bumpversion.cfg | 23 + .github/actions/image_exists/action.yml | 2 +- .github/file-filters.yml | 28 ++ .github/workflows/_build.yml | 136 ++++++ .github/workflows/add-reviewer.yml | 30 ++ .github/workflows/assign_to_project.yml | 24 + .github/workflows/ci.yml | 74 --- .github/workflows/delete_image.yml | 27 ++ .github/workflows/label-pullrequest.yml | 37 ++ .github/workflows/lint.yml | 101 +++++ .github/workflows/mypy.yml | 62 +++ .github/workflows/rc.ym_ | 6 + .github/workflows/test.ym_ | 84 ++++ .github/workflows/test.yml | 164 ++++--- .github/workflows/towncrier.yml | 21 + .gitignore | 32 +- .mypy.ini | 4 +- .pylintrc | 569 ++++++++++++++++++++++++ codecov.yml | 0 docker/Dockerfile | 264 +++++++---- docker/Makefile | 19 +- docker/bin/docker-entrypoint.sh | 97 ++-- docker/bin/release-info.sh | 5 +- docker/conf/circus.conf | 4 +- docker/conf/config.toml | 8 + docker/conf/uwsgi.ini | 8 +- docker/entrypoint.sh | 38 -- history/.gitignore | 0 pytest.ini | 12 +- 29 files changed, 1526 insertions(+), 353 deletions(-) create mode 100644 .bumpversion.cfg create mode 100644 .github/file-filters.yml create mode 100644 .github/workflows/_build.yml create mode 100644 .github/workflows/add-reviewer.yml create mode 100644 .github/workflows/assign_to_project.yml delete mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/delete_image.yml create mode 100644 .github/workflows/label-pullrequest.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/mypy.yml create mode 100644 .github/workflows/rc.ym_ create mode 100644 .github/workflows/test.ym_ create mode 100644 .github/workflows/towncrier.yml create mode 100644 .pylintrc create mode 100644 codecov.yml create mode 100644 docker/conf/config.toml delete mode 100755 docker/entrypoint.sh create mode 100644 history/.gitignore diff --git a/.bumpversion.cfg b/.bumpversion.cfg new file mode 100644 index 00000000..75e558fe --- /dev/null +++ b/.bumpversion.cfg @@ -0,0 +1,23 @@ +[bumpversion] +commit = False +tag = False +allow_dirty = True +tag_name = {new_version} +current_version = 1.3.1-rc3 +parse = ^ + (?P\d+)\.(?P\d+)\.(?P\d+) + (-?(?P(rc|final)) + (?P
\d+)  # pre-release version num
+	)?
+serialize = 
+	{major}.{minor}.{patch}-{prekind}{pre}
+	{major}.{minor}.{patch}
+
+[bumpversion:file:pyproject.toml]
+
+[bumpversion:part:prekind]
+optional_value = _
+values = 
+	_
+	rc
+	_
diff --git a/.github/actions/image_exists/action.yml b/.github/actions/image_exists/action.yml
index 9c0c528d..6457dfb3 100644
--- a/.github/actions/image_exists/action.yml
+++ b/.github/actions/image_exists/action.yml
@@ -40,4 +40,4 @@ runs:
         else
           echo "exists=false" >> "$GITHUB_OUTPUT"
           echo "Image ${{inputs.image}} does not exist"
-        fi
\ No newline at end of file
+        fi
diff --git a/.github/file-filters.yml b/.github/file-filters.yml
new file mode 100644
index 00000000..2a632551
--- /dev/null
+++ b/.github/file-filters.yml
@@ -0,0 +1,28 @@
+# This is used by the action https://github.com/dorny/paths-filter
+docker: &docker
+  - added|modified: './docker/**'
+
+dependencies: &dependencies
+  - 'pdm.lock'
+  - 'pyproject.toml'
+
+python: &python
+  - added|modified: 'src/**'
+  - added|modified: 'tests/**'
+  - 'manage.py'
+
+docker_base:
+  - *docker
+  - *dependencies
+
+run_tests:
+  -  *python
+  - 'pytest.ini'
+
+migrations:
+  - added|modified: 'src/**/migrations/*'
+
+lint:
+  -  *python
+  - '.flake8'
+  - 'pyproject.toml'
\ No newline at end of file
diff --git a/.github/workflows/_build.yml b/.github/workflows/_build.yml
new file mode 100644
index 00000000..c72a4957
--- /dev/null
+++ b/.github/workflows/_build.yml
@@ -0,0 +1,136 @@
+name: "[CALL] Build"
+
+on:
+  workflow_call:
+    inputs:
+      image:
+        description: "Image name to build and push"
+        required: true
+        type: string
+        default: false
+      cache-from:
+        description: "Image name to build and push"
+        required: false
+        type: string
+      checksum:
+        description: "Dependencies checksum to check to evaluate build"
+        required: false
+        type: string
+        default: false
+      target:
+        description: "Dockerfile stage to stop to"
+        required: true
+        type: string
+        default: "-"
+      force:
+        description: "Force rebuild event is checksum matches"
+        required: false
+        type: boolean
+        default: false
+      dry_run:
+        description: "Force rebuild event is checksum matches"
+        required: false
+        type: boolean
+        default: false
+
+defaults:
+  run:
+    shell: bash
+
+jobs:
+  builder:
+    name: Build Image
+    runs-on: ubuntu-latest
+    outputs:
+      hash: ${{ steps.release_hash.outputs.hash }}
+      updated: ${{ steps.image_updated.outputs.exists }}
+      image: ${{ steps.setup.outputs.image }}
+      built: ${{ steps.release_hash.outputs.hash }}
+    steps:
+      - run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
+      - name: Checkout code
+        uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+      - name: Calculate Release Hash
+        id: release_hash
+        uses: ./.github/actions/distro_hash
+      - id: image_updated
+        name: Check if image exists and updated
+        uses: ./.github/actions/image_updated
+        with:
+          image: ${{inputs.image}}
+          checksum: ${{ steps.release_hash.outputs.hash }}
+          username: ${{ secrets.username }}
+          password: ${{ secrets.password }}
+      - id: setup
+        name: setup
+        run: |
+          tag="${{ inputs.image }}"
+          build_date=$(date +"%Y-%m-%d %H:%M")
+          echo "date=$build_date" >> $GITHUB_OUTPUT
+          echo "image=${tag%:*}" >> $GITHUB_OUTPUT
+      - name: Setup Environment (PR)
+        if: ${{ github.event_name == 'pull_request' }}
+#        shell: bash
+        run: |
+          SHA=${{ github.event.pull_request.head.sha }}
+          echo "LAST_COMMIT_SHA=${SHA::7}" >> ${GITHUB_ENV}
+      - name: Setup Environment (Push)
+        if: ${{ github.event_name == 'push' }}
+#        shell: bash
+        run: |
+          echo "LAST_COMMIT_SHA=${GITHUB_SHA::7}" >> ${GITHUB_ENV}
+      - run: |
+          echo ""
+          echo date            : ${{steps.setup.outputs.date}}
+          echo image           : ${{steps.setup.outputs.image}}
+          echo tag             : ${{inputs.image}}
+          echo target          : ${{inputs.target}}
+          echo target-checksum : ${{inputs.checksum}}
+          echo docker-checksum : ${{steps.image_updated.outputs.checksum }}
+          echo image_exists    : ${{steps.image_updated.outputs.exists}}
+          echo image_updated   : ${{steps.image_updated.outputs.updated}}
+          echo build_number    : ${{steps.image_updated.outputs.build_number}}
+          echo build_date      : ${{steps.image_updated.outputs.build_date}}
+          echo force           : ${{inputs.force}}
+          echo build           : ${{steps.image_updated.outputs.updated != 'true' || inputs.force == 'true'}}
+          echo sha             : ${{env.LAST_COMMIT_SHA}}
+          echo "====================================="
+      - uses: docker/login-action@v3
+        with:
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_TOKEN }}
+      - name: Set up Docker Buildx
+        uses: docker/setup-buildx-action@v3
+        with:
+          platforms: linux/amd64
+          driver: docker-container
+          driver-opts: 'image=moby/buildkit:v0.13.2'
+      - name: Build and push
+        if: ${{ steps.image_updated.outputs.updated != 'true' || inputs.force == 'true' }}
+        uses: docker/build-push-action@v5
+        with:
+          context: .
+          tags: ${{ inputs.image }}
+          target: ${{ inputs.target }}
+          file: ./docker/Dockerfile
+          platforms: linux/amd64
+          outputs: type=registry
+          cache-from: "type=registry,ref=${{inputs.image}}-buildcache,ref=${{steps.setup.outputs.image}}:test-develop,mode=max"
+          cache-to: "type=registry,ref=${{inputs.image}}-buildcache,mode=max"
+          labels: BuildNumber=${{ steps.setup.outputs.build_number }},
+          build-args: |
+            BUILD_DATE=${{ steps.setup.outputs.date }}
+            CHECKSUM=${{ steps.release_hash.outputs.hash }}
+            VERSION=${{ steps.version.outputs.version }}
+            SOURCE_COMMIT=${{ env.LAST_COMMIT_SHA }}
+            GITHUB_SERVER_URL=${{ github.server_url }}
+            GITHUB_REPOSITORY=${{ github.repository }}
+      - name: Dump Image Info
+        run: |
+          echo "Pulling... ${{ inputs.image }}"
+          docker pull --platform linux/amd64 ${{ inputs.image }}
+          docker inspect --format='{{json .Config.Labels}}' ${{ inputs.image }}
+          docker run --platform linux/amd64 -t ${{ inputs.image }} release-info.sh
+          echo "----------"
diff --git a/.github/workflows/add-reviewer.yml b/.github/workflows/add-reviewer.yml
new file mode 100644
index 00000000..7b3834d7
--- /dev/null
+++ b/.github/workflows/add-reviewer.yml
@@ -0,0 +1,30 @@
+# Adds labels to pull requests for the type of change the PR makes
+name: Adds Reviewers
+
+on:
+  pull_request:
+    types: [opened, synchronize, edited, ready_for_review]
+
+jobs:
+  add-reviewer:
+    steps:
+      - name: Check for file changes
+        uses: dorny/paths-filter@0bc4621a3135347011ad047f9ecf449bf72ce2bd # v3.0.0
+        id: changes
+        with:
+          token: ${{ github.token }}
+          filters: .github/file-filters.yml
+
+      - name: Add Pull Request Reviewer
+        if: steps.changes.outputs.migrations == 'true'
+        uses: AveryCameronUofR/add-reviewer-gh-action@1.0.3
+        with:
+          reviewers: "saxix"
+          token: ${{ secrets.GITHUB_TOKEN }}
+
+      - name: Add Pull Request Reviewer
+        if: steps.changes.outputs.dependencies == 'true'
+        uses: AveryCameronUofR/add-reviewer-gh-action@1.0.3
+        with:
+          reviewers: "saxix"
+          token: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
diff --git a/.github/workflows/assign_to_project.yml b/.github/workflows/assign_to_project.yml
new file mode 100644
index 00000000..803ad7bb
--- /dev/null
+++ b/.github/workflows/assign_to_project.yml
@@ -0,0 +1,24 @@
+name: Auto Assign to Project(s)
+
+on:
+  issues:
+    types: [opened, labeled]
+  pull_request:
+    types: [opened, labeled]
+  issue_comment:
+    types: [created]
+env:
+  GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+jobs:
+  assign_one_project:
+    if: vars.DEFAULT_PROJECT
+    runs-on: ubuntu-latest
+    name: Assign to Project
+    steps:
+    - name: Assign NEW issues and NEW pull requests to '${{ vars.DEFAULT_PROJECT }}'
+      uses: srggrs/assign-one-project-github-action@1.2.1
+      if: ${{ vars.DEFAULT_PROJECT }}
+      with:
+        project: ${{ vars.DEFAULT_PROJECT }}
+        column_name: 'New'
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
deleted file mode 100644
index 23c014f2..00000000
--- a/.github/workflows/ci.yml
+++ /dev/null
@@ -1,74 +0,0 @@
-name: CI
-
-on:
-  push:
-    branches:
-    - develop
-  pull_request:
-    branches:
-    - develop
-env:
-  DOCKER_CACHE_IMAGE: ${{ vars.DOCKERHUB_ORGANIZATION }}/hope-support-images
-  DOCKER_DEFAULT_PLATFORM: linux/amd64
-
-jobs:
-
-    build:
-      runs-on: ubuntu-20.04
-      outputs:
-        docker-image: ${{ steps.image_name.outputs.name }}
-      steps:
-        - name: Checkout code
-          uses: actions/checkout@v4
-
-        - uses: ./.github/actions/hash
-          id: release_hash
-
-        - name: Image name
-          id: image_name
-          run: |
-            image_name="$DOCKER_CACHE_IMAGE:hde-dev-${{ steps.release_hash.outputs.hash }}"
-            image_name_latest="$DOCKER_CACHE_IMAGE:hde-dev-latest"
-            echo "name=$image_name" >> $GITHUB_OUTPUT
-            echo "latest=$image_name_latest" >> $GITHUB_OUTPUT
-        - name: Check if image exists
-          uses: ./.github/actions/image_exists
-          id: image_exists
-          with:
-            image: ${{ steps.image_name.outputs.name }}
-            username: ${{ secrets.DOCKERHUB_USERNAME }}
-            password: ${{ secrets.DOCKERHUB_TOKEN }}
-        - name: Build Dev Image
-          if: ${{ !steps.image_exists.outputs.exists || contains(github.event.head_commit.message, 'rebuild') }}
-          id: docker_build
-          run: |
-            BUILD_DATE=$(date +"%Y-%m-%d %H:%M")
-            docker buildx create --use --platform x86_64 --name builder --driver docker-container
-            docker buildx build \
-              --platform x86_64 \
-              --builder builder \
-              --build-arg BUILD_DATE="${BUILD_DATE}" \
-              --progress=plain \
-              --cache-to type=local,ref=${DOCKER_CACHE_IMAGE}:hde-dev-latest,dest=./.AAA \
-              --cache-from ${DOCKER_CACHE_IMAGE}:hde-dev-latest \
-              -t ${{ steps.image_name.outputs.name }} \
-              -t ${{ steps.image_name.outputs.latest }} \
-              -f ./docker/Dockerfile \
-              --push \
-              --target python_dev_deps .
-
-    lint:
-      runs-on: ubuntu-20.04
-      needs: [build]
-      container:
-        image: unicef/hope-support-images:hde-dev-latest
-        credentials:
-          username: ${{ secrets.DOCKERHUB_USERNAME }}
-          password: ${{ secrets.DOCKERHUB_TOKEN }}
-      steps:
-        - name: Checkout code
-          uses: actions/checkout@v2
-        - name: Black
-          run: black --check src/
-        - name: Flake8
-          run: flake8 src/
diff --git a/.github/workflows/delete_image.yml b/.github/workflows/delete_image.yml
new file mode 100644
index 00000000..aa38b9b4
--- /dev/null
+++ b/.github/workflows/delete_image.yml
@@ -0,0 +1,27 @@
+name: Branch Deleted
+on: delete
+jobs:
+  delete:
+    if: github.event.ref_type == 'branch'
+    runs-on: ubuntu-latest
+    steps:
+      - name: Docker meta
+        id: meta
+        uses: docker/metadata-action@v5
+      - name: Define target image name
+        id: image_name
+        run: |
+          echo "name=${{vars.DOCKER_IMAGE}}:test-${{steps.meta.outputs.version}}" >> $GITHUB_OUTPUT
+      - name: Clean up
+        run: |
+          registry='https://registry-1.docker.io'
+          name=${{steps.image_name.outputs.name}}
+          curl -v -sSL -X DELETE "http://${registry}/v2/${name}/manifests/$(
+              curl -sSL -I \
+                  -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \
+                  "http://${registry}/v2/${name}/manifests/$(
+                      curl -sSL "http://${registry}/v2/${name}/tags/list" | jq -r '.tags[0]'
+                  )" \
+              | awk '$1 == "Docker-Content-Digest:" { print $2 }' \
+              | tr -d $'\r' \
+          )"
diff --git a/.github/workflows/label-pullrequest.yml b/.github/workflows/label-pullrequest.yml
new file mode 100644
index 00000000..cb473a99
--- /dev/null
+++ b/.github/workflows/label-pullrequest.yml
@@ -0,0 +1,37 @@
+# Adds labels to pull requests for the type of change the PR makes
+name: Adds labels
+
+on:
+  pull_request:
+    types: [opened, synchronize, edited, ready_for_review]
+
+jobs:
+  label-pullrequest:
+    permissions:
+      contents: read
+      pull-requests: write
+    name: labels pull requests
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+        with:
+          persist-credentials: false
+
+      - name: Check for file changes
+        uses: dorny/paths-filter@0bc4621a3135347011ad047f9ecf449bf72ce2bd # v3.0.0
+        id: changes
+        with:
+          token: ${{ github.token }}
+          filters: .github/file-filters.yml
+
+      - name: Add Migration label
+        uses: actions-ecosystem/action-add-labels@v1
+        if: steps.changes.outputs.migrations == 'true'
+        with:
+          labels: 'Contains new migration(s)'
+
+      - name: Add Dependencies label
+        uses: actions-ecosystem/action-add-labels@v1
+        if: steps.changes.outputs.dependencies == 'true'
+        with:
+          labels: 'Add/Change dependencies'
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 00000000..3d7a99b6
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,101 @@
+name: Lint
+on:
+  push:
+    branches:
+      - develop
+#      - master
+#      - staging
+#      - releases/*
+  pull_request:
+    branches: [develop, master]
+    types: [synchronize, opened, reopened, ready_for_review]
+
+defaults:
+  run:
+    shell: bash
+
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
+
+permissions:
+  contents: read
+
+jobs:
+  changes:
+#    if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
+    name: check files
+    runs-on: ubuntu-latest
+    timeout-minutes: 3
+    outputs:
+      lint: ${{ steps.changes.outputs.lint }}
+      docker: ${{ steps.changes.outputs.docker_base }}
+    steps:
+      - run: git config --global --add safe.directory $(realpath .)
+      - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+      - id: changes
+        name: Check for backend file changes
+        uses: dorny/paths-filter@0bc4621a3135347011ad047f9ecf449bf72ce2bd # v3.0.0
+        with:
+          base: ${{ github.ref }}
+          token: ${{ github.token }}
+          filters: .github/file-filters.yml
+
+  flake8:
+    needs: changes
+    runs-on: ubuntu-latest
+#    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    steps:
+      - uses: actions/checkout@v2
+      - uses: actions/setup-python@v2
+        with:
+          python-version: '3.9'
+      - name: Install requirements
+        run: pip install flake8 pycodestyle
+      - name: Check syntax
+        # Stop the build if there are Python syntax errors or undefined names
+        run: flake8 src/ --count --statistics --max-line-length=127
+
+      - name: Warnings
+        run: flake8 src --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --extend-exclude=""
+  isort:
+    needs: changes
+    runs-on: ubuntu-latest
+#    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    steps:
+      - uses: actions/checkout@v2
+      - uses: actions/setup-python@v2
+        with:
+          python-version: '3.9'
+      - name: Install requirements
+        run: pip install isort
+      - name: iSort
+        run: isort src/ --check-only
+  black:
+    needs: changes
+    runs-on: ubuntu-latest
+#    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    steps:
+      - uses: actions/checkout@v2
+      - uses: actions/setup-python@v2
+        with:
+          python-version: '3.9'
+      - name: Install requirements
+        run: pip install black
+      - name: Black
+        run: black src/ --check
+  bandit:
+    needs: changes
+    runs-on: ubuntu-latest
+#    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    steps:
+      - uses: actions/checkout@v2
+      - uses: actions/setup-python@v2
+        with:
+          python-version: '3.9'
+      - name: Install requirements
+        run: pip install bandit
+      - name: bandit
+        run: bandit src/
diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml
new file mode 100644
index 00000000..aba253bc
--- /dev/null
+++ b/.github/workflows/mypy.yml
@@ -0,0 +1,62 @@
+name: MyPy
+
+on:
+  workflow_run:
+    workflows: [ 'Test' ]
+    types: [ completed ]
+    branches:
+      - develop
+      - master
+      - staging
+      - releases/*
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
+defaults:
+  run:
+    shell: bash
+
+
+jobs:
+  setup:
+    name: check files
+    runs-on: ubuntu-latest
+    timeout-minutes: 3
+    outputs:
+      python_files: ${{ steps.changes.outputs.python }}
+    steps:
+#      - run: git config --global --add safe.directory $(realpath .)
+      - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+      - id: changes
+        name: Check for backend file changes
+        uses: dorny/paths-filter@0bc4621a3135347011ad047f9ecf449bf72ce2bd # v3.0.0
+        with:
+          base: ${{ github.ref }}
+          token: ${{ github.token }}
+          filters: .github/file-filters.yml
+#      - id: release_hash
+#        uses: ./.github/actions/distro_hash
+      - name: Docker meta
+        id: meta
+        uses: docker/metadata-action@v5
+      - name: Define target image name
+        id: image_name
+        run: |
+          echo "name=${{vars.DOCKER_IMAGE}}:test-${{steps.meta.outputs.version}}" >> $GITHUB_OUTPUT
+
+  mypy:
+    needs: [setup]
+    if: needs.setup.outputs.python_files
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout code
+        uses: actions/checkout@v2
+      - name: Run check
+        run: |
+          docker run --rm \
+            -v $PWD:/code/app \
+            -w /code/app \
+            -t ${{needs.setup.outputs.image_name}} \
+            mypy src/
diff --git a/.github/workflows/rc.ym_ b/.github/workflows/rc.ym_
new file mode 100644
index 00000000..d0d16b33
--- /dev/null
+++ b/.github/workflows/rc.ym_
@@ -0,0 +1,6 @@
+name: Tag
+
+on:
+  push:
+    tags:
+      - '*'           # Push events to every tag not containing /    - develop
diff --git a/.github/workflows/test.ym_ b/.github/workflows/test.ym_
new file mode 100644
index 00000000..d253a90d
--- /dev/null
+++ b/.github/workflows/test.ym_
@@ -0,0 +1,84 @@
+name: Test
+
+on:
+  push:
+    branches:
+    - develop
+  pull_request:
+    branches:
+    - develop
+  workflow_run:
+    workflows: [ci]
+    types:
+      - completed
+
+env:
+  HASH_SEEDS: pdm.lock docker/bin/* docker/conf/* docker/Dockerfile
+  DOCKER_CACHE_IMAGE: ${{ vars.DOCKERHUB_ORGANIZATION }}/hope-support-images
+  BUILD_DATE: $(date +"%Y-%m-%d %H:%M")
+  DOCKER_DEFAULT_PLATFORM: linux/amd64
+
+jobs:
+    test:
+      runs-on: ubuntu-20.04
+      container:
+        image: ${DOCKER_CACHE_IMAGE}:hde-dev-latest
+        credentials:
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_PASSWORD }}
+        ports:
+          - 8000:8000
+      services:
+        redis:
+          image: redis
+        db:
+          image: postgres:14
+          env:
+            POSTGRES_DATABASE: dedupe
+            POSTGRES_PASSWORD: postgres
+            POSTGRES_USERNAME: postgres
+          options: >-
+            --health-cmd pg_isready
+            --health-interval 10s
+            --health-timeout 5s
+            --health-retries 5
+      env:
+        DATABASE_URL: postgres://postgres:postgres@localhost:5432/dedupe
+        SECRET_KEY: secret_key
+        CACHE_URL: redis://redis:6379/0
+        CELERY_BROKER_URL: redis://redis:6379/0
+        PYTHONPATH: /hde/code/src
+      steps:
+        - name: Checkout code
+          uses: actions/checkout@v2
+        - name: DockerHub login
+          uses: docker/login-action@v3
+          with:
+            username: ${{ secrets.DOCKERHUB_USERNAME }}
+            password: ${{ secrets.DOCKERHUB_TOKEN }}
+        - name: Pull
+          run: docker pull ${DOCKER_CACHE_IMAGE}:hde-dev-latest
+        - name: Run tests
+          run: |
+                docker run --rm \
+                -e PYTHONPATH=/hde/code/src:/hde/__pypackages__/3.12/lib \
+                -e CACHE_URL="${CACHE_URL}" \
+                -e DATABASE_URL="${DATABASE_URL}" \
+                -e SECRET_KEY="${SECRET_KEY}" \
+                -e CELERY_BROKER_URL="${CELERY_BROKER_URL}" \
+                -v ${PWD}:/hde/code/ \
+                -w /hde/code/ \
+                -t ${DOCKER_CACHE_IMAGE}:hde-dev-latest \
+                pytest tests/ --create-db -v --cov --cov-report xml:coverage.xml
+
+        - name: Upload coverage to Codecov
+          uses: codecov/codecov-action@v4
+          with:
+            directory: ./coverage/reports/
+            env_vars: OS,PYTHON
+            fail_ci_if_error: true
+            files: /hde/code/coverage1.xml
+            flags: unittests
+            name: codecov-umbrella
+            token: ${{ secrets.CODECOV_TOKEN }}
+            verbose: true
\ No newline at end of file
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 44e92204..ebd0931b 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -3,60 +3,118 @@ name: Test
 on:
   push:
     branches:
-    - develop
+      - develop
+      - master
+      - staging
+      - release/*
+      - feature/*
+      - bugfix/*
+      - hotfix/*
   pull_request:
-    branches:
-    - develop
-  workflow_run:
-    workflows: [ci]
-    types:
-      - completed
+    branches: [develop, master]
+    types: [synchronize, opened, reopened, ready_for_review]
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
+defaults:
+  run:
+    shell: bash
+
 
 jobs:
+  setup:
+    if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
+    name: check files
+    runs-on: ubuntu-latest
+    timeout-minutes: 3
+    outputs:
+      test_files: ${{ steps.changes.outputs.run_tests }}
+      docker: ${{ steps.changes.outputs.docker_base }}
+      python_files: ${{ steps.changes.outputs.python }}
+      branch: ${{ steps.extract_branch.outputs.branch }}
+      hash: ${{ steps.release_hash.outputs.hash }}
+      tags: ${{ steps.meta.outputs.tags }}
+      version: ${{ steps.meta.outputs.version }}
+      image_name: ${{ steps.image_name.outputs.name }}
+    steps:
+      - run: git config --global --add safe.directory $(realpath .)
+      - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+      - id: changes
+        name: Check for file changes
+        uses: dorny/paths-filter@0bc4621a3135347011ad047f9ecf449bf72ce2bd # v3.0.0
+        with:
+          base: ${{ github.ref }}
+          token: ${{ github.token }}
+          filters: .github/file-filters.yml
+      - id: extract_branch
+        name: Extract branch name
+#        shell: bash
+        run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
+      - id: release_hash
+        uses: ./.github/actions/distro_hash
+      - name: Docker meta
+        id: meta
+        uses: docker/metadata-action@v5
+      - name: Define target image name
+        id: image_name
+        run: |
+          echo "name=${{vars.DOCKER_IMAGE}}:test-${{steps.meta.outputs.version}}" >> $GITHUB_OUTPUT
+  build:
+    if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
+    uses: ./.github/workflows/_build.yml
+    needs: [setup]
+    secrets: inherit
+    with:
+      image: ${{needs.setup.outputs.image_name}}
+      cache-from: ${{needs.setup.outputs.image_name}}
+      checksum: ${{needs.setup.outputs.hash}}
+      target: "python_dev_deps"
 
-    test:
-      runs-on: ubuntu-20.04
-      container:
-        image: unicef/hope-support-images:hde-dev-latest
-        credentials:
-          username: ${{ secrets.DOCKERHUB_USERNAME }}
-          password: ${{ secrets.DOCKERHUB_TOKEN }}
-        ports:
-          - 8000:8000
-      services:
-        redis:
-          image: redis
-        db:
-          image: postgres:14
-          env:
-            POSTGRES_DATABASE: dedupe
-            POSTGRES_PASSWORD: postgres
-            POSTGRES_USERNAME: postgres
-          options: >-
-            --health-cmd pg_isready
-            --health-interval 10s
-            --health-timeout 5s
-            --health-retries 5
-      env:
-        DATABASE_URL: postgres://postgres:postgres@db:5432/dedupe
-        SECRET_KEY: secret_key
-        CACHE_URL: redis://redis:6379/0
-        CELERY_BROKER_URL: redis://redis:6379/0
-        PYTHONPATH: "/hde/code/src:/hde/__pypackages__/3.12/lib"
-      steps:
-        - name: Checkout code
-          uses: actions/checkout@v2
-        - name: Run tests
-          run: |
-            pytest tests --create-db -v --cov --cov-report xml:coverage.xml
-#        - name: Upload coverage to Codecov
-#          uses: codecov/codecov-action@v4
-#          with:
-#            directory: ./coverage/reports/
-#            env_vars: OS,PYTHON
-#            fail_ci_if_error: true
-#            files: /hde/code/coverage1.xml
-#            flags: unittests
-#            name: codecov-umbrella
-#            token: ${{ secrets.CODECOV_TOKEN }}
-#            verbose: true
+  test:
+    if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
+    needs: [build, setup]
+    runs-on: ubuntu-latest
+    services:
+      redis:
+        image: redis
+      db:
+        image: postgres:14
+        env:
+          POSTGRES_DATABASE: dedupe
+          POSTGRES_PASSWORD: postgres
+          POSTGRES_USERNAME: postgres
+        options: >-
+          --health-cmd pg_isready
+          --health-interval 10s
+          --health-timeout 5s
+          --health-retries 5
+    env:
+      DOCKER_DEFAULT_PLATFORM: linux/amd64
+    steps:
+      - name: Checkout code
+        uses: actions/checkout@v2
+      - name: Run tests
+        run: |
+          ls -al $PWD
+          touch __PIPPO__
+          docker run --rm \
+            -e DATABASE_URL=postgres://postgres:postgres@db:5432/dedupe \
+            -e SECRET_KEY=secret_key \
+            -e CACHE_URL=redis://redis:6379/0 \
+            -e CELERY_BROKER_URL=redis://redis:6379/0 \
+            -v $PWD:/code/app \
+            -w /code/app \
+            -t ${{needs.setup.outputs.image_name}} \
+            pytest tests -v 
+          ls -al $PWD
+      - name: Upload coverage to Codecov
+        uses: codecov/codecov-action@v4
+        with:
+          env_vars: OS,PYTHON
+          fail_ci_if_error: true
+          files: coverage.xml
+          token: ${{ secrets.CODECOV_TOKEN }}
+          verbose: false
+          name: codecov-${{env.GITHUB_REF_NAME}}
diff --git a/.github/workflows/towncrier.yml b/.github/workflows/towncrier.yml
new file mode 100644
index 00000000..40b6f350
--- /dev/null
+++ b/.github/workflows/towncrier.yml
@@ -0,0 +1,21 @@
+name: Changelog entries
+on: [pull_request]
+
+permissions:
+  contents: read
+
+jobs:
+  towncrier:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v2
+        with:
+          fetch-depth: 0
+      - uses: actions/setup-python@v2
+        with:
+          python-version: '3.9'
+      - name: Install python deps
+        run: pip install towncrier
+
+      - name: Check that changelog is updated
+        run: towncrier check >> $GITHUB_STEP_SUMMARY
diff --git a/.gitignore b/.gitignore
index 72e9d96d..e107f5d6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,27 +1,19 @@
 .*
 ~*
-*.sh
 *.egg-info
-__pycache__/
-*.py[cod]
-!tests/.coveragerc
-!.dockerignore
-!.flake8
+dist/
+__pycache__
+.tox
 !.gitignore
-!.github/*
-!.tx/config
+!.github
+!.pylintrc
+!.isort.cfg
+!.git
 !.mypy.ini
-!.pre-commit-config.yaml
+!.dockerignore
 !.bumpversion.cfg
-!.trivyignore
-!docker/bin/*.sh
-!bandit.yaml
-build
-dist
+!.flake8
+!tests/.coveragerc
+pdm.lock
 coverage.xml
-Makefile
-site
-black.txt
-flake8
-act.*
-
+.pdm-python
diff --git a/.mypy.ini b/.mypy.ini
index 30ef8c64..ada05c9d 100644
--- a/.mypy.ini
+++ b/.mypy.ini
@@ -14,8 +14,8 @@ mypy_path = "$MYPY_CONFIG_FILE_DIR/stubs/:$MYPY_CONFIG_FILE_DIR/src/:"
 strict = true
 ignore_missing_imports = True
 namespace_packages = true
-plugins =
-    mypy_django_plugin.main
+;plugins =
+;    mypy_django_plugin.main
 
 [mypy.plugins.django-stubs]
 django_settings_module = "hope_dedup_engine.config.settings"
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 00000000..dbbee982
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,569 @@
+[MAIN]
+
+# Specify a configuration file.
+#rcfile=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Files or directories to be skipped. They should be base names, not
+# paths.
+ignore=CVS
+
+# Add files or directories matching the regex patterns to the ignore-list. The
+# regex matches against paths and can be in Posix or Windows format.
+ignore-paths=
+
+# Files or directories matching the regex patterns are skipped. The regex
+# matches against base names, not paths.
+ignore-patterns=^\.#
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=
+    pylint.extensions.check_elif,
+    pylint.extensions.bad_builtin,
+    pylint.extensions.docparams,
+    pylint.extensions.for_any_all,
+    pylint.extensions.set_membership,
+    pylint.extensions.code_style,
+    pylint.extensions.overlapping_exceptions,
+    pylint.extensions.typing,
+    pylint.extensions.redefined_variable_type,
+    pylint.extensions.comparison_placement,
+    pylint.extensions.mccabe,
+
+# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
+# number of processors available to use.
+jobs=0
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages.
+suggestion-mode=yes
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code
+extension-pkg-allow-list=
+
+# Minimum supported python version
+py-version = 3.7.2
+
+# Control the amount of potential inferred values when inferring a single
+# object. This can help the performance when dealing with large functions or
+# complex, nested conditions.
+limit-inference-results=100
+
+# Specify a score threshold to be exceeded before program exits with error.
+fail-under=10.0
+
+# Return non-zero exit code if any of these messages/categories are detected,
+# even if score is above --fail-under value. Syntax same as enable. Messages
+# specified are enabled, while categories only check already-enabled messages.
+fail-on=
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
+# confidence=
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=
+    use-symbolic-message-instead,
+    useless-suppression,
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once).You can also use "--disable=all" to
+# disable everything first and then re-enable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use"--disable=all --enable=classes
+# --disable=W"
+
+disable=
+    attribute-defined-outside-init,
+    invalid-name,
+    missing-docstring,
+    protected-access,
+    too-few-public-methods,
+    # handled by black
+    format,
+    # We anticipate #3512 where it will become optional
+    fixme,
+    cyclic-import,
+    import-error,
+
+
+[REPORTS]
+
+# Set the output format. Available formats are text, parseable, colorized, msvs
+# (visual studio) and html. You can also give a reporter class, eg
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Tells whether to display a full report or only the messages
+reports=no
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables 'fatal', 'error', 'warning', 'refactor', 'convention'
+# and 'info', which contain the number of messages in each category, as
+# well as 'statement', which is the total number of statements analyzed. This
+# score is used by the global evaluation report (RP0004).
+evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details
+#msg-template=
+
+# Activate the evaluation score.
+score=yes
+
+
+[LOGGING]
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format
+logging-modules=logging
+
+# The type of string formatting that logging methods do. `old` means using %
+# formatting, `new` is for `{}` formatting.
+logging-format-style=old
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,XXX,TODO
+
+# Regular expression of note tags to take in consideration.
+#notes-rgx=
+
+
+[SIMILARITIES]
+
+# Minimum lines number of a similarity.
+min-similarity-lines=6
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=yes
+
+# Signatures are removed from the similarity computation
+ignore-signatures=yes
+
+
+[VARIABLES]
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# A regular expression matching the name of dummy variables (i.e. expectedly
+# not used).
+dummy-variables-rgx=_$|dummy
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid defining new builtins when possible.
+additional-builtins=
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,_cb
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of names allowed to shadow builtins
+allowed-redefined-builtins=
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore.
+ignored-argument-names=_.*
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
+
+
+[FORMAT]
+
+# Maximum number of characters on a single line.
+max-line-length=120
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )??$
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt=no
+
+# Maximum number of lines in a module
+max-module-lines=1000
+
+# String used as indentation unit. This is usually "    " (4 spaces) or "\t" (1
+# tab).
+indent-string='    '
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+
+[BASIC]
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=i,j,k,ex,Run,_
+
+# Good variable names regexes, separated by a comma. If names match any regex,
+# they will always be accepted
+good-names-rgxs=
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=foo,bar,baz,toto,tutu,tata
+
+# Bad variable names regexes, separated by a comma. If names match any regex,
+# they will always be refused
+bad-names-rgxs=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Include a hint for the correct naming format with invalid-name
+include-naming-hint=no
+
+# Naming style matching correct function names.
+function-naming-style=snake_case
+
+# Regular expression matching correct function names
+function-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming style matching correct variable names.
+variable-naming-style=snake_case
+
+# Regular expression matching correct variable names
+variable-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming style matching correct constant names.
+const-naming-style=UPPER_CASE
+
+# Regular expression matching correct constant names
+const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Naming style matching correct attribute names.
+attr-naming-style=snake_case
+
+# Regular expression matching correct attribute names
+attr-rgx=[a-z_][a-z0-9_]{2,}$
+
+# Naming style matching correct argument names.
+argument-naming-style=snake_case
+
+# Regular expression matching correct argument names
+argument-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming style matching correct class attribute names.
+class-attribute-naming-style=any
+
+# Regular expression matching correct class attribute names
+class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+
+# Naming style matching correct class constant names.
+class-const-naming-style=UPPER_CASE
+
+# Regular expression matching correct class constant names. Overrides class-
+# const-naming-style.
+#class-const-rgx=
+
+# Naming style matching correct inline iteration names.
+inlinevar-naming-style=any
+
+# Regular expression matching correct inline iteration names
+inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+
+# Naming style matching correct class names.
+class-naming-style=PascalCase
+
+# Regular expression matching correct class names
+class-rgx=[A-Z_][a-zA-Z0-9]+$
+
+
+# Naming style matching correct module names.
+module-naming-style=snake_case
+
+# Regular expression matching correct module names
+module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+
+# Naming style matching correct method names.
+method-naming-style=snake_case
+
+# Regular expression matching correct method names
+method-rgx=[a-z_][a-z0-9_]{2,}$
+
+# Regular expression which can overwrite the naming style set by typevar-naming-style.
+#typevar-rgx=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring. Use ^(?!__init__$)_ to also check __init__.
+no-docstring-rgx=__.*__
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# List of decorators that define properties, such as abc.abstractproperty.
+property-classes=abc.abstractproperty
+
+
+[TYPECHECK]
+
+# Regex pattern to define which classes are considered mixins if ignore-mixin-
+# members is set to 'yes'
+mixin-class-rgx=.*MixIn
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis). It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=SQLObject, optparse.Values, thread._local, _thread._local
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=REQUEST,acl_users,aq_parent,argparse.Namespace
+
+# List of decorators that create context managers from functions, such as
+# contextlib.contextmanager.
+contextmanager-decorators=contextlib.contextmanager
+
+# Tells whether to warn about missing members when the owner of the attribute
+# is inferred to be None.
+ignore-none=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+[SPELLING]
+
+# Spelling dictionary name. Available dictionaries: none. To make it working
+# install python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# List of comma separated words that should be considered directives if they
+# appear and the beginning of a comment and should not be checked.
+spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:,pragma:,# noinspection
+
+# A path to a file that contains private dictionary; one word per line.
+spelling-private-dict-file=.pyenchant_pylint_custom_dict.txt
+
+# Tells whether to store unknown words to indicated private dictionary in
+# --spelling-private-dict-file option instead of raising a message.
+spelling-store-unknown-words=no
+
+# Limits count of emitted suggestions for spelling mistakes.
+max-spelling-suggestions=2
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+max-args=10
+
+# Maximum number of locals for function / method body
+max-locals=25
+
+# Maximum number of return / yield for function / method body
+max-returns=11
+
+# Maximum number of branch for function / method body
+max-branches=27
+
+# Maximum number of statements in function / method body
+max-statements=100
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# List of qualified class names to ignore when counting class parents (see R0901).
+ignored-parents=
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=11
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=25
+
+# Maximum number of boolean expressions in an if statement (see R0916).
+max-bool-expr=5
+
+# List of regular expressions of class ancestor names to
+# ignore when counting public methods (see R0903).
+exclude-too-few-public-methods=
+
+max-complexity=10
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,__new__,setUp,__post_init__
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=mcs
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,_fields,_replace,_source,_make
+
+# Warn about protected attribute access inside special methods
+check-protected-access-in-special-methods=no
+
+[IMPORTS]
+
+# List of modules that can be imported at any level, not just the top level
+# one.
+allow-any-import-level=
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=regsub,TERMIOS,Bastion,rexec
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+import-graph=
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+ext-import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+# Couples of modules and preferred modules, separated by a comma.
+preferred-modules=
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+overgeneral-exceptions=Exception
+
+
+[TYPING]
+
+# Set to ``no`` if the app / library does **NOT** need to support runtime
+# introspection of type annotations. If you use type annotations
+# **exclusively** for type checking of an application, you're probably fine.
+# For libraries, evaluate if some users what to access the type hints at
+# runtime first, e.g., through ``typing.get_type_hints``. Applies to Python
+# versions 3.7 - 3.9
+runtime-typing = no
+
+
+[DEPRECATED_BUILTINS]
+
+# List of builtins function names that should not be used, separated by a comma
+bad-functions=map,input
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+# Complete name of functions that never returns. When checking for
+# inconsistent-return-statements if a never returning function is called then
+# it will be considered as an explicit return statement and no message will be
+# printed.
+never-returning-functions=sys.exit,argparse.parse_error
+
+
+[STRING]
+
+# This flag controls whether inconsistent-quotes generates a warning when the
+# character used as a quote delimiter is used inconsistently within a module.
+check-quote-consistency=no
+
+# This flag controls whether the implicit-str-concat should generate a warning
+# on implicit string concatenation in sequences defined over several lines.
+check-str-concat-over-line-jumps=no
+
+
+[CODE_STYLE]
+
+# Max line length for which to sill emit suggestions. Used to prevent optional
+# suggestions which would get split by a code formatter (e.g., black). Will
+# default to the setting for ``max-line-length``.
+#max-line-length-suggestions=
diff --git a/codecov.yml b/codecov.yml
new file mode 100644
index 00000000..e69de29b
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 2bc0e571..04cc0aa0 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,46 +1,47 @@
+# syntax=docker/dockerfile:1
 ARG PYTHON_VER=3.12
-ARG BUILD_DATE=not_provided
-ARG VERSION=0.1.0
-ARG DEST_DIR=/hde
-ARG PKG_DIR=${DEST_DIR}/__pypackages__/${PYTHON_VER}
+ARG PKG_DIR=/code/__pypackages__/${PYTHON_VER}
 ARG CHECKSUM
+ARG VERSION=0.1.0
+ARG BUILD_DATE=not_provided
 
-FROM python:$PYTHON_VER-slim-bullseye AS python_base
-ARG DEST_DIR
+ARG APATH=${PKG_DIR}/bin
+ARG APYTHONPATH=${PKG_DIR}/lib/
+
+FROM python:${PYTHON_VER}-slim-bookworm AS python_base
+ARG APATH
+ENV APATH=$APATH
+ARG APYTHONPATH
+ENV APYTHONPATH=$APYTHONPATH
 ARG PKG_DIR
-ENV ADMINS="" \
-    BUILD_DATE=$BUILD_DATE \
-    CHECKSUM=$CHECKSUM \
-    DATABASE_URL="" \
-    DEST_DIR=${DEST_DIR} \
-    DJANGO_SETTINGS_MODULE="hope_dedup_engine.config.settings" \
-    INIT_RUN_CHECK=1 \
-    INIT_RUN_COLLECTSTATIC=1 \
-    INIT_RUN_MIGRATATIONS=1 \
-    INIT_RUN_UPGRADE=0 \
-    LOG_LEVEL="ERROR" \
-    MEDIA_ROOT="/var/hope_dedup_engine/media" \
-    MEDIA_URL="/media/" \
-    PATH=${PKG_DIR}/bin:${PATH}:/usr/local/bin/ \
-    PYTHONPATH=${PKG_DIR}/lib/:${PYTHONPATH} \
-    PYTHONDONTWRITEBYTECODE=1 \
-    PYTHONUNBUFFERED=1 \
-    SECRET_KEY="secret-key-just-for-build" \
-    SENTRY_DSN="" \
-    STATIC_ROOT="/var/hope_dedup_engine/static" \
-    STATIC_URL="/static/" \
-    UWSGI_PROCESSES=4 \
-    VERSION=$VERSION
+ENV PKG_DIR=$PKG_DIR
+
+ARG CHECKSUM
+ENV CHECKSUM=$CHECKSUM
+ARG VERSION
+ENV VERSION=$VERSION
+ARG BUILD_DATE
+ENV BUILD_DATE=$BUILD_DATE
+ARG SOURCE_COMMIT
+ENV SOURCE_COMMIT=$SOURCE_COMMIT
+ARG GITHUB_SERVER_URL
+ENV GITHUB_SERVER_URL=$GITHUB_SERVER_URL
+ARG GITHUB_REPOSITORY
+ENV GITHUB_REPOSITORY=$GITHUB_REPOSITORY
 
 ARG GOSU_VERSION=1.17
 ARG GOSU_SHA256=bbc4136d03ab138b1ad66fa4fc051bafc6cc7ffae632b069a53657279a450de3
 ARG TINI_VERSION=0.19.0
 ARG TINI_SHA256=93dcc18adc78c65a028a84799ecf8ad40c936fdfc5f2a57b1acda5a8117fa82c
+ARG WAITFOR_IT_VERSION=2.4.1
+ARG WAITFOR_IT_MD5=cd67c8e45436c4a7b2b707d7a5b15a66
+
+
 RUN set -x \
   && buildDeps=" \
   wget \
   " \
-  && apt-get update && apt-get install -y --no-install-recommends $buildDeps \
+  && apt-get update && apt-get install -y --no-install-recommends ${buildDeps} \
   && rm -rf /var/lib/apt/lists/* \
   && wget --quiet -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-amd64" \
   && echo "$GOSU_SHA256 /usr/local/bin/gosu" | sha256sum --check --status \
@@ -48,14 +49,18 @@ RUN set -x \
   && wget --quiet -O /usr/local/bin/tini "https://github.com/krallin/tini/releases/download/v$TINI_VERSION/tini-amd64" \
   && echo "$TINI_SHA256 /usr/local/bin/tini" | sha256sum --check --status \
   && chmod +x /usr/local/bin/tini \
+  && wget --quiet -O /usr/local/bin/waitforit "https://github.com/maxcnunes/waitforit/releases/download/v$WAITFOR_IT_VERSION/waitforit-linux_amd64" \
+  && echo "$WAITFOR_IT_MD5 /usr/local/bin/waitforit" | md5sum --check --status \
+  && chmod +x /usr/local/bin/waitforit  \
   && apt-get purge -y --auto-remove $buildDeps
-# sudo apt-get install libcairo2-dev libjpeg62-turbo-dev libpango1.0-dev libgif-dev build-essential g++
-# sudo apt-get install libopenblas-dev libwebp-dev
+
 
 RUN \
     --mount=type=cache,target=/var/cache/apt \
-    apt-get update  \
+    apt-get clean \
+    && apt-get update  \
     && apt-get install -y --no-install-recommends  \
+      postgresql-client \
       postgresql-client \
       libgl1  \
       libglib2.0-0 \
@@ -69,82 +74,157 @@ RUN \
       libwebp-dev \
       mime-support  \
     && apt-get clean \
-    && rm -rf /var/lib/apt/lists/* \
-    && ldconfig
-
-RUN ln -s -f /bin/true /usr/bin/chfn \
-    && groupadd --gid 1024 hope \
-    && adduser --disabled-login --disabled-password --no-create-home --ingroup hope -q hde \
-    && mkdir ${DEST_DIR} \
-    && chown hde:hope ${DEST_DIR} \
+    && rm -rf /var/lib/apt/lists/*
+
+RUN groupadd --gid 1024 app \
+    && adduser --disabled-login --disabled-password --no-create-home --ingroup app -q user \
     && echo $CHECKSUM > /CHECKSUM
 
+
 COPY docker/bin/* /usr/local/bin/
 COPY docker/conf/* /conf/
 
 FROM python_base AS build_deps
-ARG DEST_DIR
-ENV buildDeps="build-essential \
-                cmake \
-                curl \
-                gcc \
-                libgdal-dev \
-                liblapack-dev \
-                libpng-dev  \
-                libpq-dev \
-                libssl-dev \
-                python3-dev \
-                zlib1g-dev "
-
-RUN apt-get update  \
-    && apt-get install -y --no-install-recommends \
-    $buildDeps \
+
+RUN set -x \
+    && buildDeps="build-essential \
+        cmake \
+        curl \
+        gcc \
+        libgdal-dev \
+        libgif-dev \
+        libjpeg-dev \
+        liblapack-dev \
+        libopenblas-dev \
+        libpng-dev  \
+        libpq-dev \
+        libwebp-dev \
+        libssl-dev \
+        libxml2-dev  \
+        python3-dev \
+        zlib1g-dev  \
+    " \
+    && apt-get update \
+    && apt-get install -y --no-install-recommends $buildDeps \
     && apt-get clean \
     && rm -rf /var/lib/apt/lists/*
 
+ENV PATH=${APATH}:${PATH} \
+    PYTHONPATH=${APYTHONPATH}:/code/app/src \
+    PYTHONDONTWRITEBYTCODE=1
+
 
 FROM build_deps AS python_dev_deps
-ARG DEST_DIR
-WORKDIR ${DEST_DIR}
-COPY pyproject.toml .
-COPY pdm.lock .
-ENV PDM_CHECK_UPDATE=0 \
-    PDM_CACHE_DIR=/cache/ \
-    PDM_USE_VENV=0 \
-    PDM_INSTALL_PARALLEL=0
-RUN pip install -U pip setuptools pdm \
-    && mkdir -p ${DEST_DIR}/__pypackages__ \
+ARG CHECKSUM
+ENV CHECKSUM=$CHECKSUM
+ARG VERSION
+ENV VERSION=$VERSION
+ARG BUILD_DATE
+ENV BUILD_DATE=$BUILD_DATE
+ARG SOURCE_COMMIT
+ENV SOURCE_COMMIT=$SOURCE_COMMIT
+ARG GITHUB_SERVER_URL
+ENV GITHUB_SERVER_URL=$GITHUB_SERVER_URL
+ARG GITHUB_REPOSITORY
+ENV GITHUB_REPOSITORY=$GITHUB_REPOSITORY
+
+
+LABEL date=$BUILD_DATE
+LABEL version=$VERSION
+LABEL checksum=$CHECKSUM
+
+WORKDIR /code
+COPY pyproject.toml pdm.lock ./
+COPY docker/conf/config.toml /etc/xdg/pdm/config.toml
+RUN < /RELEASE
+{"version": "$VERSION",
+ "commit": "$SOURCE_COMMIT",
+ "date": "$BUILD_DATE",
+ "checksum": "$CHECKSUM",
+ "source": "${GITHUB_SERVER_URL}/$${GITHUB_REPOSITORY}/tree/${SOURCE_COMMIT:-master}/"
+}
+EOF
+
+RUN set -x \
+    && pip install -U pip pdm \
+    && mkdir -p $PKG_DIR \
     && pdm sync --no-editable -v --no-self
 
 
-FROM build_deps AS python_prod_deps
-ARG DEST_DIR
-WORKDIR ${DEST_DIR}
-COPY pyproject.toml .
-COPY pdm.lock .
-COPY ./src ${DEST_DIR}/src
-ENV PDM_CHECK_UPDATE=0 \
-    PDM_CACHE_DIR=/cache/ \
-    PDM_USE_VENV=0 \
-    PDM_INSTALL_PARALLEL=0
-RUN pip install -U pip setuptools pdm \
-    && mkdir -p ${DEST_DIR}/__pypackages__ \
-    && pdm sync --no-editable -v --prod \
-    && apt-get clean \
-    && rm -rf /var/lib/apt/lists/*
+FROM python_dev_deps AS python_prod_deps
+ARG PKG_DIR
+ARG CHECKSUM
+ENV CHECKSUM=$CHECKSUM
+ARG VERSION
+ENV VERSION=$VERSION
+ARG BUILD_DATE
+ENV BUILD_DATE=$BUILD_DATE
+ARG SOURCE_COMMIT
+ENV SOURCE_COMMIT=$SOURCE_COMMIT
+ARG GITHUB_SERVER_URL
+ENV GITHUB_SERVER_URL=$GITHUB_SERVER_URL
+ARG GITHUB_REPOSITORY
+ENV GITHUB_REPOSITORY=$GITHUB_REPOSITORY
 
-FROM python_base AS dev
-COPY --chown=hde:hope --from=python_dev_deps ${DEST_DIR}/__pypackages__ ${DEST_DIR}/__pypackages__
-COPY ./src ${DEST_DIR}/src
-ENV PYTHONPATH=${DEST_DIR}/src/:${PYTHONPATH}
+LABEL date=$BUILD_DATE
+LABEL version=$VERSION
+LABEL checksum=$CHECKSUM
+
+WORKDIR /code
+COPY docker/conf/config.toml /etc/xdg/pdm/config.toml
+COPY pyproject.toml pdm.lock ./
+COPY ./src /code/src
+
+
+RUN mkdir -p $PKG_DIR \
+    && pip install -U pdm \
+    && pdm sync --no-editable -v --prod
 
 
 FROM python_base AS dist
-ENV PYTHONPATH=${DEST_DIR}/src/:${PYTHONPATH}
-WORKDIR ${DEST_DIR}
-COPY --chown=hde:hope --from=python_prod_deps ${DEST_DIR}/__pypackages__ ${DEST_DIR}/__pypackages__
-USER hde
-CMD "run"
-ENTRYPOINT exec docker-entrypoint.sh "$0" "$@"
 
+ARG PKG_DIR
+ARG CHECKSUM
+ENV CHECKSUM=$CHECKSUM
+ARG VERSION
+ENV VERSION=$VERSION
+ARG BUILD_DATE
+ENV BUILD_DATE=$BUILD_DATE
+ARG SOURCE_COMMIT
+ENV SOURCE_COMMIT=$SOURCE_COMMIT
+ARG GITHUB_SERVER_URL
+ENV GITHUB_SERVER_URL=$GITHUB_SERVER_URL
+ARG GITHUB_REPOSITORY
+ENV GITHUB_REPOSITORY=$GITHUB_REPOSITORY
+
+RUN < /RELEASE
+{"version": "$VERSION",
+ "commit": "$SOURCE_COMMIT",
+ "date": "$BUILD_DATE",
+ "checksum": "$CHECKSUM",
+ "source": "${GITHUB_SERVER_URL}/$${GITHUB_REPOSITORY}/tree/${SOURCE_COMMIT:-master}/"
+}
+EOF
+
+WORKDIR /code
+COPY --chown=user:app --from=python_prod_deps /code /code
+COPY --chown=user:app --from=python_prod_deps /RELEASE /RELEASE
+
+VOLUME /var/run/app/
+EXPOSE 8000
+ENTRYPOINT exec docker-entrypoint.sh "$0" "$@"
+CMD ["run"]
+
+LABEL maintainer="mnt@app.io"
+LABEL org.opencontainers.image.authors="author@app.io"
+LABEL org.opencontainers.image.created="$BUILD_DATE"
+LABEL org.opencontainers.image.description="App runtime image"
+LABEL org.opencontainers.image.documentation="https://github.com/saxix/trash"
+LABEL org.opencontainers.image.licenses="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/blob/${SOURCE_COMMIT:-master}/LICENSE"
+LABEL org.opencontainers.image.revision=$SOURCE_COMMIT
+LABEL org.opencontainers.image.source="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/tree/${SOURCE_COMMIT:-master}/"
+LABEL org.opencontainers.image.title="App"
+LABEL org.opencontainers.image.url="https://app.io/"
+LABEL org.opencontainers.image.vendor="App ltd"
+LABEL org.opencontainers.image.version="$VERSION"
 
diff --git a/docker/Makefile b/docker/Makefile
index 55ec3542..0140b900 100644
--- a/docker/Makefile
+++ b/docker/Makefile
@@ -35,7 +35,7 @@ sha:
 	@echo ${LOCK_SHA}
 
 .build:
-	docker login -u saxix -p ${DOCKERHUB_TOKEN}
+	@echo  ${DOCKERHUB_TOKEN} | docker login -u saxix --password-stdin
 	cd .. && docker buildx build \
 			--progress=plain \
 			--build-arg BUILDKIT_INLINE_CACHE=1 \
@@ -43,7 +43,6 @@ sha:
 			--build-arg BUILD_DATE="${BUILD_DATE}" \
 			--build-arg CHECKSUM="${LOCK_SHA}" \
 			--build-arg COMMIT="${COMMIT_SHA}" \
-		    --cache-from ${DOCKER_CACHE_IMAGE}:hde-${STAGE}-latest \
 			${EXTRA} \
 			-t "${DOCKER_IMAGE}:${TAG}" \
 			--push \
@@ -67,9 +66,9 @@ buildDeps:  ## build 'builder' image
 
 pythonDevDeps:  ## build 'builder' image
 	STAGE="python_dev_deps" \
-	EXTRA='--cache-from "${CI_REGISTRY_IMAGE}:deps-${LOCK_SHA}" --target python_dev_deps' \
+	EXTRA='--cache-from "${CI_REGISTRY_IMAGE}:dev-${LOCK_SHA}" --target python_dev_deps' \
 	DOCKER_IMAGE="${CI_REGISTRY_IMAGE}" \
-	TAG="python_dev_deps-${LOCK_SHA}" \
+	TAG="dev-${LOCK_SHA}" \
 	$(MAKE) .build
 
 pythonProdDeps:  ## build 'builder' image
@@ -79,12 +78,12 @@ pythonProdDeps:  ## build 'builder' image
 	TAG="python_prod_deps-${LOCK_SHA}" \
 	$(MAKE) .build
 
-dev:  ## build dev image
-	STAGE='dev' \
-	EXTRA='--cache-from "${CI_REGISTRY_IMAGE}:python_dev_deps-${LOCK_SHA}" --target dev' \
-	DOCKER_IMAGE="${CI_REGISTRY_IMAGE}" \
-	TAG="dev" \
-	$(MAKE) .build
+#dev:  ## build dev image
+#	STAGE='dev' \
+#	EXTRA='--cache-from "${CI_REGISTRY_IMAGE}:python_dev_deps-${LOCK_SHA}" --target dev' \
+#	DOCKER_IMAGE="${CI_REGISTRY_IMAGE}" \
+#	TAG="dev" \
+#	$(MAKE) .build
 
 
 #test:  ## build test image
diff --git a/docker/bin/docker-entrypoint.sh b/docker/bin/docker-entrypoint.sh
index 0ce09c78..ddbc467e 100755
--- a/docker/bin/docker-entrypoint.sh
+++ b/docker/bin/docker-entrypoint.sh
@@ -1,55 +1,60 @@
 #!/bin/sh -e
 
-alias env='env|sort'
 
-export MEDIA_ROOT="${MEDIA_ROOT:-/var/media}"
-export STATIC_ROOT="${STATIC_ROOT:-/var/static}"
-export DJANGO_SETTINGS_MODULE="${DJANGO_SETTINGS_MODULE:-"hope_dedup_engine.config.settings"}"
+export MEDIA_ROOT="${MEDIA_ROOT:-/var/run/app/media}"
+export STATIC_ROOT="${STATIC_ROOT:-/var/run/app/static}"
+export UWSGI_PROCESSES="${UWSGI_PROCESSES:-"4"}"
+mkdir -p "${MEDIA_ROOT}" "${STATIC_ROOT}" || echo "Cannot create dirs ${MEDIA_ROOT} ${STATIC_ROOT}"
 
+echo 111, $1
+echo 222, "$@"
 
-mkdir -p /var/run "${MEDIA_ROOT}" "${STATIC_ROOT}" || echo "Cannot create dir"
-
-echo "Executing '$1'..."
-echo "INIT_RUN_UPGRADE         '$INIT_RUN_UPGRADE'"
-echo "  INIT_RUN_CHECK         '$INIT_RUN_CHECK'"
-echo "  INIT_RUN_COLLECTSTATIC '$INIT_RUN_COLLECTSTATIC'"
-echo "  INIT_RUN_MIGRATATIONS  '$INIT_RUN_MIGRATATIONS'"
 
 case "$1" in
     run)
-      if [ "$INIT_RUN_CHECK" = "1" ];then
-        echo "Running Django checks..."
-        django-admin check --deploy
-      fi
-      OPTS="--no-check -v 1"
-      if [ "$INIT_RUN_UPGRADE" = "1" ];then
-        if [ "$INIT_RUN_COLLECTSTATIC" != "1" ];then
-          OPTS="$OPTS --no-static"
-        fi
-        if [ "$INIT_RUN_MIGRATATIONS" != "1" ];then
-          OPTS="$OPTS --no-migrate"
-        fi
-        echo "Running 'upgrade $OPTS'"
-        django-admin upgrade $OPTS
-      fi
-      set -- tini -- "$@"
-      echo "Starting uwsgi..."
-      exec uwsgi --ini /conf/uwsgi.ini
-      ;;
-    worker)
-      exec celery -A hope_dedup_engine.celery worker -E --loglevel=ERROR --concurrency=4
-      ;;
-    beat)
-      exec celery -A hope_dedup_engine.celery beat -E --loglevel=ERROR ---scheduler django_celery_beat.schedulers:DatabaseScheduler
-      ;;
-    dev)
-      until pg_isready -h db -p 5432;
-        do echo "waiting for database"; sleep 2; done;
-      django-admin collectstatic --no-input
-      django-admin migrate
-      django-admin runserver 0.0.0.0:8000
-      ;;
-    *)
-      exec "$@"
-      ;;
+	    set -- tini -- "$@"
+  		set -- gosu user:app uwsgi --ini /conf/uwsgi.ini
+	    ;;
 esac
+
+exec "$@"
+
+#
+#case "$1" in
+#    run)
+#      if [ "$INIT_RUN_CHECK" = "1" ];then
+#        echo "Running Django checks..."
+#        django-admin check --deploy
+#      fi
+#      OPTS="--no-check -v 1"
+#      if [ "$INIT_RUN_UPGRADE" = "1" ];then
+#        if [ "$INIT_RUN_COLLECTSTATIC" != "1" ];then
+#          OPTS="$OPTS --no-static"
+#        fi
+#        if [ "$INIT_RUN_MIGRATATIONS" != "1" ];then
+#          OPTS="$OPTS --no-migrate"
+#        fi
+#        echo "Running 'upgrade $OPTS'"
+#        django-admin upgrade $OPTS
+#      fi
+#      set -- tini -- "$@"
+#      echo "Starting uwsgi..."
+#      exec uwsgi --ini /conf/uwsgi.ini
+#      ;;
+#    worker)
+#      exec celery -A hope_dedup_engine.celery worker -E --loglevel=ERROR --concurrency=4
+#      ;;
+#    beat)
+#      exec celery -A hope_dedup_engine.celery beat -E --loglevel=ERROR ---scheduler django_celery_beat.schedulers:DatabaseScheduler
+#      ;;
+#    dev)
+#      until pg_isready -h db -p 5432;
+#        do echo "waiting for database"; sleep 2; done;
+#      django-admin collectstatic --no-input
+#      django-admin migrate
+#      django-admin runserver 0.0.0.0:8000
+#      ;;
+#    *)
+#      exec "$@"
+#      ;;
+#esac
diff --git a/docker/bin/release-info.sh b/docker/bin/release-info.sh
index 734e2a4c..caf4d5f8 100755
--- a/docker/bin/release-info.sh
+++ b/docker/bin/release-info.sh
@@ -1,4 +1,5 @@
 #!/bin/bash
 
-echo "CHECKSUM ${CHECKSUM}"
-echo "uwsgi   " `uwsgi --version`
+cat /RELEASE
+uwsgi --version
+django-admin --version
diff --git a/docker/conf/circus.conf b/docker/conf/circus.conf
index 33972ce9..8d595440 100644
--- a/docker/conf/circus.conf
+++ b/docker/conf/circus.conf
@@ -17,7 +17,7 @@ stderr_stream.class = StdoutStream
 # copy_env = true
 # autostart = $(CIRCUS.ENV.INIT_START_WEB)
 
-[watcher:bob]
+[watcher:app]
 cmd = uwsgi
 args = --ini /conf/uwsgi.ini
 user = www
@@ -25,7 +25,7 @@ group = bitcaster
 use_sockets = True
 copy_env = true
 autostart = $(CIRCUS.ENV.INIT_START_BOB)
-numprocesses = 1
+numprocesses =  1
 send_hup = True
 stop_signal = QUIT
 warmup_delay = 0
diff --git a/docker/conf/config.toml b/docker/conf/config.toml
new file mode 100644
index 00000000..299f66f8
--- /dev/null
+++ b/docker/conf/config.toml
@@ -0,0 +1,8 @@
+check_update = false
+#cache_dir = "/pdm_cache/"
+install.parallel = false
+install.cache = false
+install.cache_method='symlinks'
+
+[python]
+use_venv = false
diff --git a/docker/conf/uwsgi.ini b/docker/conf/uwsgi.ini
index d28c0972..919740de 100644
--- a/docker/conf/uwsgi.ini
+++ b/docker/conf/uwsgi.ini
@@ -3,14 +3,14 @@ http=0.0.0.0:8000
 enable-threads=0
 honour-range=1
 master=1
-module=hope_dedup_engine.config.wsgi
-processes=4
+module=trash.wsgi
+processes=$(UWSGI_PROCESSES)
 ;virtualenv=/code/.venv/
 ;virtualenv=%(_)
 ;venv=%(_)
 ;chdir=code/
-username = hde
-gropuname = hope
+username = user
+gropuname = app
 ;offload-threads=%k
 ;static-gzip-all=true
 route = /static/(.*) static:$(STATIC_ROOT)/$1
diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh
deleted file mode 100755
index 88299244..00000000
--- a/docker/entrypoint.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-
-set -eou pipefail
-
-production() {
-    uwsgi \
-        --http :8000 \
-        --master \
-        --module=hope_dedup_engine.config.wsgi \
-        --processes=2 \
-        --buffer-size=8192
-}
-
-if [ $# -eq 0 ]; then
-    production
-fi
-
-case "$1" in
-    dev)
-        wait-for-it.sh db:5432
-        ./manage.py upgrade
-        ./manage.py runserver 0.0.0.0:8000
-    ;;
-    prd)
-        tail -f /dev/null
-        production
-    ;;
-    celery_worker)
-        export C_FORCE_ROOT=1
-        celery -A hope_dedup_engine.config.celery worker -l info
-    ;;
-    celery_beat)
-        celery -A hope_dedup_engine.config.celery beat -l info
-    ;;
-    *)
-        exec "$@"
-    ;;
-esac
\ No newline at end of file
diff --git a/history/.gitignore b/history/.gitignore
new file mode 100644
index 00000000..e69de29b
diff --git a/pytest.ini b/pytest.ini
index d0f60bde..4e18ed08 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -10,21 +10,15 @@ pythonpath=src
 testpaths=tests
 tmp_path_retention_policy=all
 tmp_path_retention_count=0
-;log_cli = 0
-;log_cli_level = CRITICAL
-;log_cli_format = [%(levelname)-8s] %(message)s (%(filename)s:%(lineno)s)
-;log_cli_date_format=%Y-%m-%d %H:%M:%S
-; Show extra test summary info as specified by chars: (f)ailed, (E)rror, (s)kipped, (x)failed, (X)passed, (p)assed, (P)assed with output, (a)ll except passed (p/P), or (A)ll. (w)arnings are
-;                        enabled by default (see --disable-warnings), 'N' can be used to reset the list. (default: 'fE').
+
 addopts =
         -rs
-        --reuse-db
         --tb=short
         --capture=sys
+        --cov trash
         --cov-config=tests/.coveragerc
         --cov-report html
-        --cov-report xml
-        --cov-append
+        --cov-report xml:coverage.xml
 
 
 markers =

From 948b950f6cc11b4abea8c03d8ca1daa29417e70a Mon Sep 17 00:00:00 2001
From: sax 
Date: Tue, 11 Jun 2024 12:08:26 +0200
Subject: [PATCH 11/57] updates lint config

---
 pyproject.toml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index dca935bb..8b2748a3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -82,7 +82,7 @@ dev = [
     "watchdog",
 ]
 [tool.black]
-line-length = 120
+line-length = 88
 include = '\.pyi?$'
 exclude = '''
 /(
@@ -99,7 +99,7 @@ exclude = '''
 
 [tool.isort]
 profile = "black"
-line_length = 120
+line_length = 88
 default_section = "THIRDPARTY"
 known_first_party = []
 known_django = "django"

From e1e1dfcc086dffdcb40d5cf52295cf13b7f7aa66 Mon Sep 17 00:00:00 2001
From: sax 
Date: Tue, 11 Jun 2024 12:12:12 +0200
Subject: [PATCH 12/57] updates add ops actions

---
 .github/.gitignore                       |   1 +
 .github/actions/distro_hash/action.yml   |  21 ++
 .github/actions/docker_build/action.yml  |  54 ++++++
 .github/actions/docker_info/action.yml   |  34 ++++
 .github/actions/env/action.yml           |  51 +++++
 .github/actions/image_updated/action.yml | 233 +++++++++++++++++++++++
 .github/actions/last_commit/action.yml   |  15 ++
 .github/actions/version/action.xyml      |  99 ++++++++++
 .github/actions/version/action.yml       |  82 ++++++++
 .gitignore                               |   2 +
 10 files changed, 592 insertions(+)
 create mode 100644 .github/.gitignore
 create mode 100644 .github/actions/distro_hash/action.yml
 create mode 100644 .github/actions/docker_build/action.yml
 create mode 100644 .github/actions/docker_info/action.yml
 create mode 100644 .github/actions/env/action.yml
 create mode 100644 .github/actions/image_updated/action.yml
 create mode 100644 .github/actions/last_commit/action.yml
 create mode 100644 .github/actions/version/action.xyml
 create mode 100644 .github/actions/version/action.yml

diff --git a/.github/.gitignore b/.github/.gitignore
new file mode 100644
index 00000000..51c73eb0
--- /dev/null
+++ b/.github/.gitignore
@@ -0,0 +1 @@
+_workflows/*
\ No newline at end of file
diff --git a/.github/actions/distro_hash/action.yml b/.github/actions/distro_hash/action.yml
new file mode 100644
index 00000000..53fc160d
--- /dev/null
+++ b/.github/actions/distro_hash/action.yml
@@ -0,0 +1,21 @@
+# ref: https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action
+name: 'Calculate Release Hash'
+description: 'Calculate deps and os hash'
+inputs:
+  files:
+    description: 'Files to use to calculate the hash'
+    required: true
+    default: "pdm.lock docker/bin/* docker/conf/* docker/Dockerfile"
+outputs:
+  hash: # id of output
+    description: 'The time we greeted you'
+    value: ${{ steps.calc.outputs.hash }}
+
+runs:
+  using: 'composite'
+  steps:
+    - id: calc
+      shell: bash --noprofile --norc -eo pipefail -ux {0}
+      run: |
+        LOCK_SHA=$(sha1sum ${{ inputs.files }} | sha1sum | awk '{print $1}' | cut -c 1-8)
+        echo "hash=$LOCK_SHA" >> "$GITHUB_OUTPUT"
diff --git a/.github/actions/docker_build/action.yml b/.github/actions/docker_build/action.yml
new file mode 100644
index 00000000..69fea0a1
--- /dev/null
+++ b/.github/actions/docker_build/action.yml
@@ -0,0 +1,54 @@
+# ref: https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action
+name: 'Calculate version hash'
+description: 'Calculate deps and os hash'
+inputs:
+  username:
+    description: ''
+    required: true
+  password:
+    description: ''
+    required: true
+  image:
+    description: ''
+    required: true
+  target:
+    description: ''
+    required: true
+
+
+runs:
+  using: 'composite'
+  steps:
+    - name: Checkout code
+      uses: actions/checkout@v4
+      with:
+        fetch-depth: 0
+    - uses: docker/login-action@v3
+      with:
+        username: ${{ secrets.DOCKERHUB_USERNAME }}
+        password: ${{ secrets.DOCKERHUB_TOKEN }}
+    - name: Build and push
+      uses: docker/build-push-action@v5
+      with:
+        context: .
+        tags: ${{vars.DOCKER_IMAGE}}:dev-latest
+        file: ./docker/Dockerfile
+        platforms: linux/amd64
+        cache-from: type=registry,ref=${{vars.DOCKER_IMAGE}}:dev-latest
+        cache-to: type=inline
+        outputs: type=registry
+        target: python_dev_deps
+        build-args: |
+          BUILD_DATE="${{ steps.build_date.outputs.date }}"
+          CHECKSUM="${{ steps.release_hash.outputs.hash }}"
+          VERSION="${{ steps.version.outputs.version }}"
+
+    - name: Calculate Release Hash
+      id: calc
+      shell: bash --noprofile --norc -eo pipefail {0}
+      run: |
+          docker pull ${{inputs.image}}
+          echo "----------"
+          docker inspect --format='{{json .Config.Labels}}' ${{inputs.image}}}}:dev-latest
+          docker run -t ${{inputs.image}}}}:dev-latest release-info.sh
+          echo "----------"
diff --git a/.github/actions/docker_info/action.yml b/.github/actions/docker_info/action.yml
new file mode 100644
index 00000000..a450f404
--- /dev/null
+++ b/.github/actions/docker_info/action.yml
@@ -0,0 +1,34 @@
+# ref: https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action
+name: 'Retrieve Docker Image Information'
+description: 'Calculate deps and os hash'
+inputs:
+  image:
+    description: 'Files to use to calculate the hash'
+    required: true
+  username:
+    description: ''
+    required: true
+  password:
+    description: ''
+    required: true
+
+runs:
+  using: 'composite'
+  steps:
+#    - name: Configure Git
+#      shell: bash
+#      run: git config --global --add safe.directory $(realpath .)
+#    - name: ch
+#      uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+    - uses: docker/login-action@v3
+      with:
+        username: ${{ inputs.username }}
+        password: ${{ inputs.password }}
+    - name: Calculate Release Hash
+      shell: bash
+      run: |
+          docker pull ${{inputs.image}}
+          echo "----------"
+          docker inspect --format='{{json .Config.Labels}}' ${{inputs.image}}
+          docker run -t ${{inputs.image}} release-info.sh
+          echo "----------"
diff --git a/.github/actions/env/action.yml b/.github/actions/env/action.yml
new file mode 100644
index 00000000..d45fdb19
--- /dev/null
+++ b/.github/actions/env/action.yml
@@ -0,0 +1,51 @@
+name: 'version'
+description: ''
+
+outputs:
+  stage:
+    description: ''
+    value: ${{ steps.version.outputs.stage }}
+  version:
+    description: ''
+    value: ${{ steps.version.outputs.version }}
+  commit:
+    description: ''
+    value: ${{ steps.version.outputs.commit }}
+  release:
+    description: ''
+    value: ${{ steps.version.outputs.release }}
+  date:
+    description: ''
+    value: ${{ steps.build_date.outputs.date }}
+  today:
+    description: ''
+    value: ${{ steps.build_date.outputs.today }}
+  timestamp:
+    description: ''
+    value: ${{ steps.build_date.outputs.timestamp }}
+  branch:
+    description: ''
+    value: ${{ steps.extract_branch.outputs.branch }}
+
+
+runs:
+  using: 'composite'
+  steps:
+    - shell: bash --noprofile --norc -eo pipefail {0}
+      run: git config --global --add safe.directory $(realpath .)
+    - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+    - id: version
+      uses: ./.github/actions/version
+    - id: extract_branch
+      name: Extract branch name
+      shell: bash
+      run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
+    - id: build_date
+      shell: bash --noprofile --norc -eo pipefail -ux {0}
+      run: |
+        d1=$(date +"%a,%e %b %Y %H:%M %Z")
+        d2=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
+        d3=$(date -u +"%Y%m%d%H%M%S")
+        echo "today=$d1" >> $GITHUB_OUTPUT
+        echo "date=$d2" >> $GITHUB_OUTPUT
+        echo "timestamp=$d3" >> $GITHUB_OUTPUT
diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
new file mode 100644
index 00000000..a6908112
--- /dev/null
+++ b/.github/actions/image_updated/action.yml
@@ -0,0 +1,233 @@
+# ref: https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action
+name: 'Check if image with valid checksum exists in dockerhub'
+description: 'WARNING: Only works for single platform images'
+inputs:
+  image:
+    description: 'Docker Image '
+    required: true
+  checksum:
+    description: 'checksum'
+    required: true
+  label:
+    description: 'checksum label'
+    default: 'checksum'
+    required: false
+  username:
+    description: 'DockerHub username '
+    required: false
+  password:
+    description: 'DockerHub password '
+    required: false
+  architecture:
+    description: 'DockerHub architecture to build '
+    required: false
+    default: amd64
+
+outputs:
+  updated:
+    description: 'Returns true if the image exists and the checksum is valid'
+    value: ${{ steps.check.outputs.updated }}
+  exists:
+    description: 'Returns true if the image exists'
+    value: ${{ steps.check.outputs.exists }}
+  build_number:
+      description: 'Returns build number for the current branch'
+      value: ${{ steps.check.outputs.build_number }}
+  build_date:
+      description: 'Returns tha image build date'
+      value: ${{ steps.check.outputs.build_date }}
+
+runs:
+  using: 'composite'
+  steps:
+    - name: Setup
+      id: setup
+      shell: bash
+      run: |
+        ref=${{ inputs.image }}
+        architecture=${{ inputs.architecture }}
+        repo="${ref%:*}"
+        tag="${ref##*:}"
+
+        res=$(curl -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull")
+        token=$(echo $res | jq -r '.token')
+        if [[ -z "$token" ]];then
+          echo "::error title=â›” error hint::Unable to get valid token"
+          exit 1
+        fi
+        echo "token=$token" >> $GITHUB_OUTPUT
+        echo "repo=$repo" >> $GITHUB_OUTPUT
+        echo "tag=$tag" >> $GITHUB_OUTPUT
+        echo "architecture=$architecture" >> $GITHUB_OUTPUT
+    - name: Check Checksum
+      id: check
+      shell: bash
+      run: |
+        url="https://registry-1.docker.io/v2/${{steps.setup.outputs.repo}}/manifests/${{steps.setup.outputs.tag}}"        
+        manifest=$(curl -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \
+                        -H 'Authorization: Bearer ${{steps.setup.outputs.token}}' \
+                        -s $url)
+
+        if [[ $manifest == *MANIFEST_UNKNOWN* ]];then
+          echo "exists=false" >> "$GITHUB_OUTPUT"
+          echo "updated=false" >> "$GITHUB_OUTPUT"
+          echo "build_number=1" >> "$GITHUB_OUTPUT"
+          echo "build_date=-" >> "$GITHUB_OUTPUT"
+          exit 0        
+        fi
+        if [[ $manifest == *errors\":* ]];then
+          code=$(echo $manifest | jq .errors[0].code)
+          message=$(echo $manifest | jq .errors[0].message)          
+          echo "::error title=$code error hint::$message https://registry-1.docker.io/v2/${repo}/manifests/${tag}"
+          exit 1
+        fi
+        echo "exists=true" >> $GITHUB_OUTPUT
+        
+        if [[ -z "$manifest" ]];then
+          echo "::error title=â›” error hint::Unable to get manifest from $url"
+          exit 1
+        fi
+        check1=$(echo $manifest | jq 'try(.manifests[])')        
+        check2=$(echo $manifest | jq 'try(.config.digest)')        
+        
+        if [[ -n "$check1" ]]; then
+          digest=$(echo $manifest | jq -r ".manifests| map(select(.platform.architecture | contains (\"${{steps.setup.outputs.architecture}}\"))) | .[].digest" 2>&1)
+        elif [[ -n "$check2" ]]; then
+          digest=$(echo $manifest | jq -r '.config.digest')
+        else
+          echo "::error title=â›” error hint::Unable to detect digest"
+          exit 1        
+        fi
+        if [[ $digest == null ]]; then
+          echo "::error title=â›” error hint::Digest is null"
+          exit 1
+        fi
+        if [[ -z "$digest" ]];then
+          echo "::error title=â›” error hint::Digest is empty"
+          exit 1
+        fi        
+        url=https://registry-1.docker.io/v2/${{steps.setup.outputs.repo}}/blobs/$digest      
+        blob=$(curl \
+              --silent \
+              --location \
+              -H "Accept: application/vnd.docker.disribution.manifest.v2+json" \
+              -H 'Authorization: Bearer ${{steps.setup.outputs.token}}' \
+              $url )        
+        if [[ -z "$blob" ]]; then
+          echo "Unable to get blob from: https://registry-1.docker.io/v2/$repo/blobs/$digest"
+          echo "updated=false" >> "$GITHUB_OUTPUT"
+          echo "build_number=1" >> "$GITHUB_OUTPUT"
+          echo "build_date=-" >> "$GITHUB_OUTPUT"        
+          echo "::error file={name},line={line},endLine={endLine},title={title}::{message}"
+          exit 1
+        fi        
+        build_number=$(echo $blob | jq '.config.Labels.BuildNumber')
+        checksum=$(echo $blob | jq -r '.config.Labels.checksum')
+        build_date=$(echo $blob | jq -r '.config.Labels.date')
+
+        if [[ $build_number =~ '^[0-9]+$' ]] ; then
+          build_number=$(( build_number + 1 ))
+        else
+          build_number=1
+        fi
+        if [[ $checksum == ${{inputs.checksum}} ]]; then
+          echo "updated=true" >> $GITHUB_OUTPUT
+        else
+          echo "updated=false" >> $GITHUB_OUTPUT
+        fi
+        echo "build_number=${build_number}" >> $GITHUB_OUTPUT
+        echo "build_date=${build_date}" >> $GITHUB_OUTPUT
+
+#
+#    - name: Check Image Updated
+#      id: check
+#      continue-on-error: true
+#      shell: bash --noprofile --norc -eo pipefail -ux {0}
+#      run: |
+#        trap 'echo "exitcode=$?" >> "$GITHUB_OUTPUT"' EXIT
+#
+#        ref=${{ inputs.image }}
+#        architecture=${{ inputs.architecture }}
+#        repo="${ref%:*}"
+#        tag="${ref##*:}"
+#
+#        echo $repo:$tag
+#
+#        res=$(curl -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull")
+#        token=$(echo $res | jq -r '.token')
+#
+#        if [[ -z "$token" ]];then
+#          echo "Unable to get Auth Token"
+#          echo $res
+#          echo "::error file={name},line={line},endLine={endLine},title={title}::{message}"
+#          exit 1
+#        fi
+#
+#        manifest=$(curl -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \
+#                      -H "Authorization: Bearer $token" \
+#                      -s "https://registry-1.docker.io/v2/${repo}/manifests/${tag}")
+#
+#        if [[ -z "$manifest" ]];then
+#          echo "Unable to get manifest "
+#          echo "updated=false" >> "$GITHUB_OUTPUT"
+#          echo "build_number=1" >> "$GITHUB_OUTPUT"
+#        fi
+#        check=$(echo $manifest | jq 'try(.manifests[])')
+#
+#        if [[ -n "$check" ]]; then
+#          echo "Multi platform manifest detected "
+#          digest=$(echo $manifest | jq -r ".manifests| map(select(.platform.architecture | contains (\"${architecture}\"))) | .[].digest" 2>&1)
+#        else
+#          echo "Single platform manifest found "
+#          digest=$(echo $manifest | jq -r '.config.digest' 2>&1)
+#        fi
+#        if [[ $digest == null ]]; then
+#          echo "Unable to get digest "
+#          echo $manifest | jq
+#          echo "updated=false" >> "$GITHUB_OUTPUT"
+#          echo "build_number=1" >> "$GITHUB_OUTPUT"
+#        fi
+#        if [[ -z "$digest" ]];then
+#          echo "Unable to get digest "
+#          echo $manifest | jq
+#          echo "updated=false" >> "$GITHUB_OUTPUT"
+#          echo "build_number=1" >> "$GITHUB_OUTPUT"
+#          echo "::error file={name},line={line},endLine={endLine},title={title}::{message}"
+#          exit 1
+#        fi
+#
+#        blob=$(curl \
+#              --silent \
+#              --location \
+#              --header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
+#              --header "Authorization: Bearer $token" \
+#              "https://registry-1.docker.io/v2/$repo/blobs/$digest")
+#
+#        if [[ -z "$blob" ]]; then
+#          echo "Unable to get blob from: https://registry-1.docker.io/v2/$repo/blobs/$digest"
+#          echo "updated=false" >> "$GITHUB_OUTPUT"
+#          echo "build_number=1" >> "$GITHUB_OUTPUT"
+#          echo "::error file={name},line={line},endLine={endLine},title={title}::{message}"
+#          exit 1
+#        fi
+#        checksum=$(echo $blob | jq '.config.Labels.checksum')
+#        build_number=$(echo $blob | jq '.config.Labels.BuildNumber')
+#
+#        if [[ $build_number == null ]]; then
+#          build_number=$((build_number+1))
+#        else
+#          build_number = 1
+#        fi
+#
+#        ret='???'
+#        if [[ $checksum == null ]]; then
+#          echo "Invalid Blob from: https://registry-1.docker.io/v2/$repo/blobs/$digest"
+#          ret=false
+#        elif [[ $checksum == ${{inputs.checksum}} ]]; then
+#          ret=true
+#        else
+#          echo "Checksums do not match $checksum <> ${{inputs.checksum}}"
+#          ret=false
+#        fi
+#        echo "updated=${ret}" >> "$GITHUB_OUTPUT"
+#        echo "build_number=${build_number}" >> "$GITHUB_OUTPUT"
diff --git a/.github/actions/last_commit/action.yml b/.github/actions/last_commit/action.yml
new file mode 100644
index 00000000..63a7d91c
--- /dev/null
+++ b/.github/actions/last_commit/action.yml
@@ -0,0 +1,15 @@
+name: 'Get Last commit'
+description: ''
+runs:
+  using: "composite"
+  steps:
+    - name: Setup Environment (PR)  
+      if: ${{ github.event_name == 'pull_request' }}  
+      shell: bash  
+      run: |  
+        echo "last_commit_sha=${{ github.event.pull_request.head.sha }}" >> ${GITHUB_ENV}
+    - name: Setup Environment (Push)  
+      if: ${{ github.event_name == 'push' }}  
+      shell: bash
+      run: |  
+        echo "last_commit_sha=${GITHUB_SHA}" >> ${GITHUB_ENV}
\ No newline at end of file
diff --git a/.github/actions/version/action.xyml b/.github/actions/version/action.xyml
new file mode 100644
index 00000000..217adc62
--- /dev/null
+++ b/.github/actions/version/action.xyml
@@ -0,0 +1,99 @@
+name: 'version'
+description: ''
+
+outputs:
+  stage:
+    description: 'The time we greeted you'
+    value: ${{ steps.parser.outputs.stage }}
+  version:
+    description: 'The time we greeted you'
+    value: ${{ steps.parser.outputs.version }}
+  commit:
+    description: 'The time we greeted you'
+    value: ${{ steps.parser.outputs.commit }}
+  sha:
+    description: 'The time we greeted you'
+    value: ${{ steps.parser.outputs.sha }}
+
+
+runs:
+  using: 'composite'
+  steps:
+    - name: Configure Git
+      shell: bash --noprofile --norc -eo pipefail -ux {0}
+      run: |
+        git config --global --add safe.directory $(realpath .)
+    - name: ch
+      uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+    - name: Parse branch
+      shell: bash --noprofile --norc -eo pipefail -ux {0}
+      id: parser
+      run: |
+        #        BASE=$(git describe --all --exact-match 2>/dev/null | sed 's=.*/==')
+        #        BASE="1.2.3-rc1"
+        #        VERSION="${BASE#[vV]}"
+        #        VERSION_MAJOR="${VERSION%%.*}"
+        #        VERSION_MINOR_PATCH="${VERSION#*.}"
+        #        VERSION_MINOR="${VERSION_MINOR_PATCH%%.*}"
+        #        VERSION_PATCH_PRE_RELEASE="${VERSION_MINOR_PATCH#*.}"
+        #        VERSION_PATCH="${VERSION_PATCH_PRE_RELEASE%%[-+]*}"
+        #        VERSION_PRE_RELEASE=""
+        #
+        #        case "$VERSION" in
+        #          *rc)
+        #            VERSION_PRE_RELEASE="${VERSION#rc}"
+        #            VERSION_PRE_RELEASE="${VERSION_PRE_RELEASE%%+*}"
+        #            ;;
+        #          *-*)
+        #            VERSION_PRE_RELEASE="${VERSION#*-}"
+        #            VERSION_PRE_RELEASE="${VERSION_PRE_RELEASE%%+*}"
+        #            ;;
+        #        esac
+        if [[ "${{ github.event_name }}" == pull_request ]]; then
+          sha=$(git rev-parse --short ${{ github.event.pull_request.head.sha }})
+        elif [[ "${{ github.event_name }}" == push ]]; then
+          sha="${GITHUB_SHA}"
+        else
+          sha="not-available"
+        fi
+        
+        commit="${{ github.sha }}"
+        
+        if [[ ${{github.ref_name}} == develop ]]; then
+          stage=dev
+          version="${GITHUB_SHA::7}"
+          release=snapshot
+        elif [[ ${{github.ref_name}} == master ]]; then
+          stage=final
+          version=11111
+        elif [[ ${{github.ref_name}} == releases/* ]]; then
+          stage=rc
+          version=${GITHUB_SHA::7}
+        elif [[ ${{github.ref_name}} == tags/* ]]; then
+          stage=final
+          version=${GITHUB_SHA::7}
+        else
+          stage=a
+          version=${GITHUB_SHA::7}
+        fi
+        
+        cat <> $GITHUB_OUTPUT
+        echo "stage=$stage" >> $GITHUB_OUTPUT
+        echo "release=$release" >> $GITHUB_OUTPUT
+        echo "sha=$sha" >> $GITHUB_OUTPUT
+        echo "commit=$commit" >> $GITHUB_OUTPUT
+#
+#    - name: Describe
+#      id: describe
+#      shell: bash --noprofile --norc -eo pipefail -ux {0}
+#      run: |
+#        echo ${{ steps.branch.version }}
+#        ver=$(`git symbolic-ref HEAD 2> /dev/null | cut -b 12-`-`git log --pretty=format:"%h" -1`)
+#        echo "version=$ver" >> $GITHUB_OUTPUT
diff --git a/.github/actions/version/action.yml b/.github/actions/version/action.yml
new file mode 100644
index 00000000..5b8628a9
--- /dev/null
+++ b/.github/actions/version/action.yml
@@ -0,0 +1,82 @@
+name: 'version'
+description: ''
+
+outputs:
+  stage:
+    description: 'Docker stage to use'
+    value: ${{ steps.parser.outputs.stage }}
+  version:
+    description: 'Docker T'
+    value: ${{ steps.parser.outputs.version }}
+  commit:
+    description: 'The time we greeted you'
+    value: ${{ steps.parser.outputs.commit }}
+  sha:
+    description: 'The time we greeted you'
+    value: ${{ steps.parser.outputs.sha }}
+
+
+runs:
+  using: 'composite'
+  steps:
+    - name: Configure Git
+      shell: bash --noprofile --norc -eo pipefail {0}
+      run: git config --global --add safe.directory $(realpath .)
+    - name: ch
+      uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+    - name: Parse branch
+      shell: bash --noprofile --norc -eo pipefail -ux {0}
+      id: parser
+      run: |
+        release="not-available"
+        sha="not-available"
+        
+        if [[ ${{ github.event_name }} == pull_request ]]; then
+          sha=$(git rev-parse --short ${{ github.event.pull_request.head.sha }})
+        elif [[ "${{ github.event_name }}" == push ]]; then
+          sha="${GITHUB_SHA}"
+        else
+          sha="not-available"
+        fi
+        
+        commit="${{ github.sha }}"
+        
+        if [[ ${{github.ref_name}} == develop ]]; then
+          stage=dev
+          version="${GITHUB_SHA::7}"
+          release=snapshot
+        elif [[ ${{github.ref_name}} == master ]]; then
+          stage=final
+          version=11111
+        elif [[ ${{github.ref_name}} == releases/* ]]; then
+          stage=rc
+          version=${GITHUB_SHA::7}
+        elif [[ ${{github.ref_name}} == tags/* ]]; then
+          stage=final
+          version=${GITHUB_SHA::7}
+        else
+          stage=a
+          version=${GITHUB_SHA::7}
+        fi
+        
+        cat <> $GITHUB_OUTPUT
+        echo "version=$version" >> $GITHUB_OUTPUT
+        echo "stage=$stage" >> $GITHUB_OUTPUT
+        echo "release=$release" >> $GITHUB_OUTPUT
+        echo "sha=$sha" >> $GITHUB_OUTPUT
+        echo "commit=$commit" >> $GITHUB_OUTPUT
+#
+#    - name: Describe
+#      id: describe
+#      shell: bash --noprofile --norc -eo pipefail -ux {0}
+#      run: |
+#        echo ${{ steps.branch.version }}
+#        ver=$(`git symbolic-ref HEAD 2> /dev/null | cut -b 12-`-`git log --pretty=format:"%h" -1`)
+#        echo "version=$ver" >> $GITHUB_OUTPUT
diff --git a/.gitignore b/.gitignore
index 2eaca4fe..50d6b3af 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,9 +4,11 @@
 *.egg-info
 __pycache__/
 *.py[cod]
+
 !tests/.coveragerc
 !.dockerignore
 !.flake8
+!.github
 !.gitignore
 !.gitlab/
 !.tx/config

From 9051f54660f6d1cd8885b75bd2650b3b4f5dfd8a Mon Sep 17 00:00:00 2001
From: sax 
Date: Tue, 11 Jun 2024 12:27:30 +0200
Subject: [PATCH 13/57] updates add ops actions

---
 .github/actions/image_updated/action.yml | 11 +++++++----
 1 file changed, 7 insertions(+), 4 deletions(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index a6908112..925396b4 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -48,17 +48,20 @@ runs:
         architecture=${{ inputs.architecture }}
         repo="${ref%:*}"
         tag="${ref##*:}"
+        echo "repo=$repo" >> $GITHUB_OUTPUT
+        echo "tag=$tag" >> $GITHUB_OUTPUT
+        echo "architecture=$architecture" >> $GITHUB_OUTPUT
 
-        res=$(curl -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull")
+        res=$(curl \
+                -H 'Authorization: Bearer ${{steps.setup.outputs.token}}'
+                -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull")
+        
         token=$(echo $res | jq -r '.token')
         if [[ -z "$token" ]];then
           echo "::error title=â›” error hint::Unable to get valid token"
           exit 1
         fi
         echo "token=$token" >> $GITHUB_OUTPUT
-        echo "repo=$repo" >> $GITHUB_OUTPUT
-        echo "tag=$tag" >> $GITHUB_OUTPUT
-        echo "architecture=$architecture" >> $GITHUB_OUTPUT
     - name: Check Checksum
       id: check
       shell: bash

From a85e61b1cf3e77108e1fd4be34edf479170b165c Mon Sep 17 00:00:00 2001
From: sax 
Date: Tue, 11 Jun 2024 12:32:12 +0200
Subject: [PATCH 14/57] updates add ops actions

---
 .github/actions/image_updated/action.yml | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index 925396b4..6b7a3dc2 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -52,9 +52,7 @@ runs:
         echo "tag=$tag" >> $GITHUB_OUTPUT
         echo "architecture=$architecture" >> $GITHUB_OUTPUT
 
-        res=$(curl \
-                -H 'Authorization: Bearer ${{steps.setup.outputs.token}}'
-                -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull")
+        res=$(curl -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull")
         
         token=$(echo $res | jq -r '.token')
         if [[ -z "$token" ]];then

From da3746a37287659438bff98c48bb99a894634403 Mon Sep 17 00:00:00 2001
From: sax 
Date: Tue, 11 Jun 2024 12:45:45 +0200
Subject: [PATCH 15/57] add codeql

---
 .github/codeql/codeql-config.yml | 4 ++++
 1 file changed, 4 insertions(+)
 create mode 100644 .github/codeql/codeql-config.yml

diff --git a/.github/codeql/codeql-config.yml b/.github/codeql/codeql-config.yml
new file mode 100644
index 00000000..979cf373
--- /dev/null
+++ b/.github/codeql/codeql-config.yml
@@ -0,0 +1,4 @@
+name: 'App CodeQL Config'
+
+paths-ignore:
+  - '**/tests/**'
\ No newline at end of file

From fb279023516b1a8d21cf0817b039a94f897dec9b Mon Sep 17 00:00:00 2001
From: sax 
Date: Tue, 11 Jun 2024 13:03:59 +0200
Subject: [PATCH 16/57] updates ops

---
 .github/workflows/lint.yml                    | 30 +++----
 src/hope_dedup_engine/__init__.py             |  1 -
 src/hope_dedup_engine/apps/api/admin.py       |  7 +-
 src/hope_dedup_engine/apps/api/auth.py        |  4 +-
 .../apps/api/models/__init__.py               |  6 +-
 src/hope_dedup_engine/apps/api/models/auth.py |  4 +-
 .../apps/api/models/deduplication.py          | 23 ++++-
 src/hope_dedup_engine/apps/api/serializers.py |  9 +-
 src/hope_dedup_engine/apps/api/urls.py        | 28 ++++--
 src/hope_dedup_engine/apps/api/views.py       | 69 +++++++++++---
 .../core/management/commands/createsystem.py  |  4 +-
 .../apps/core/management/commands/env.py      | 24 +++--
 .../apps/core/management/commands/upgrade.py  | 13 ++-
 .../apps/faces/celery_tasks.py                |  9 +-
 .../apps/faces/utils/celery_utils.py          |  8 +-
 .../apps/faces/utils/duplication_detector.py  | 90 +++++++++++++++----
 .../apps/faces/validators.py                  |  4 +-
 src/hope_dedup_engine/apps/security/models.py |  4 +-
 src/hope_dedup_engine/apps/social/pipeline.py |  4 +-
 src/hope_dedup_engine/config/__init__.py      | 76 +++++++++++++---
 .../config/fragments/constance.py             |  6 +-
 src/hope_dedup_engine/config/fragments/csp.py | 26 +++++-
 src/hope_dedup_engine/state.py                | 11 ++-
 src/hope_dedup_engine/utils/http.py           |  4 +-
 src/hope_dedup_engine/utils/security.py       |  5 +-
 25 files changed, 370 insertions(+), 99 deletions(-)

diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 3d7a99b6..5cefaa6f 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -68,7 +68,7 @@ jobs:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
         with:
-          python-version: '3.9'
+          python-version: '3.11'
       - name: Install requirements
         run: pip install isort
       - name: iSort
@@ -81,21 +81,21 @@ jobs:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
         with:
-          python-version: '3.9'
+          python-version: '3.11'
       - name: Install requirements
         run: pip install black
       - name: Black
         run: black src/ --check
-  bandit:
-    needs: changes
-    runs-on: ubuntu-latest
-#    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
-    steps:
-      - uses: actions/checkout@v2
-      - uses: actions/setup-python@v2
-        with:
-          python-version: '3.9'
-      - name: Install requirements
-        run: pip install bandit
-      - name: bandit
-        run: bandit src/
+#  bandit:
+#    needs: changes
+#    runs-on: ubuntu-latest
+##    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+#    steps:
+#      - uses: actions/checkout@v2
+#      - uses: actions/setup-python@v2
+#        with:
+#          python-version: '3.11'
+#      - name: Install requirements
+#        run: pip install bandit
+#      - name: bandit
+#        run: bandit src/
diff --git a/src/hope_dedup_engine/__init__.py b/src/hope_dedup_engine/__init__.py
index 0e2df472..42920d47 100644
--- a/src/hope_dedup_engine/__init__.py
+++ b/src/hope_dedup_engine/__init__.py
@@ -1,6 +1,5 @@
 from hope_dedup_engine.config.celery import app as celery_app
 
-
 VERSION = __version__ = "0.1.0"
 
 __all__ = ("celery_app",)
diff --git a/src/hope_dedup_engine/apps/api/admin.py b/src/hope_dedup_engine/apps/api/admin.py
index 9f384e0a..7753cfe2 100644
--- a/src/hope_dedup_engine/apps/api/admin.py
+++ b/src/hope_dedup_engine/apps/api/admin.py
@@ -1,6 +1,11 @@
 from django.contrib import admin
 
-from hope_dedup_engine.apps.api.models import DeduplicationSet, Duplicate, HDEToken, Image
+from hope_dedup_engine.apps.api.models import (
+    DeduplicationSet,
+    Duplicate,
+    HDEToken,
+    Image,
+)
 
 admin.site.register(DeduplicationSet)
 admin.site.register(Duplicate)
diff --git a/src/hope_dedup_engine/apps/api/auth.py b/src/hope_dedup_engine/apps/api/auth.py
index 4a78ffcc..a63dd6c2 100644
--- a/src/hope_dedup_engine/apps/api/auth.py
+++ b/src/hope_dedup_engine/apps/api/auth.py
@@ -14,7 +14,9 @@ def has_permission(self, request: Request, view: View) -> bool:
 
 class UserAndDeduplicationSetAreOfTheSameSystem(BasePermission):
     def has_permission(self, request: Request, view: View) -> bool:
-        if deduplication_set_pk := view.kwargs.get("deduplication_set_pk") or view.kwargs.get("pk"):
+        if deduplication_set_pk := view.kwargs.get(
+            "deduplication_set_pk"
+        ) or view.kwargs.get("pk"):
             return DeduplicationSet.objects.filter(
                 external_system=request.user.external_system, pk=deduplication_set_pk
             ).exists()
diff --git a/src/hope_dedup_engine/apps/api/models/__init__.py b/src/hope_dedup_engine/apps/api/models/__init__.py
index 571a4bfd..40bdb2fa 100644
--- a/src/hope_dedup_engine/apps/api/models/__init__.py
+++ b/src/hope_dedup_engine/apps/api/models/__init__.py
@@ -1,2 +1,6 @@
 from hope_dedup_engine.apps.api.models.auth import HDEToken  # noqa: F401
-from hope_dedup_engine.apps.api.models.deduplication import DeduplicationSet, Duplicate, Image  # noqa: F401
+from hope_dedup_engine.apps.api.models.deduplication import (  # noqa: F401
+    DeduplicationSet,
+    Duplicate,
+    Image,
+)
diff --git a/src/hope_dedup_engine/apps/api/models/auth.py b/src/hope_dedup_engine/apps/api/models/auth.py
index 025370bd..050a852b 100644
--- a/src/hope_dedup_engine/apps/api/models/auth.py
+++ b/src/hope_dedup_engine/apps/api/models/auth.py
@@ -5,4 +5,6 @@
 
 
 class HDEToken(Token):
-    user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="auth_tokens", on_delete=models.CASCADE)
+    user = models.ForeignKey(
+        settings.AUTH_USER_MODEL, related_name="auth_tokens", on_delete=models.CASCADE
+    )
diff --git a/src/hope_dedup_engine/apps/api/models/deduplication.py b/src/hope_dedup_engine/apps/api/models/deduplication.py
index 77da90e4..83020756 100644
--- a/src/hope_dedup_engine/apps/api/models/deduplication.py
+++ b/src/hope_dedup_engine/apps/api/models/deduplication.py
@@ -11,7 +11,10 @@
 class DeduplicationSet(models.Model):
     class State(models.IntegerChoices):
         CLEAN = 0, "Clean"  # Deduplication set is created or already processed
-        DIRTY = 1, "Dirty"  # Images are added to deduplication set, but not yet processed
+        DIRTY = (
+            1,
+            "Dirty",
+        )  # Images are added to deduplication set, but not yet processed
         PROCESSING = 2, "Processing"  # Images are being processed
         ERROR = 3, "Error"  # Error occurred
 
@@ -26,11 +29,19 @@ class State(models.IntegerChoices):
     external_system = models.ForeignKey(ExternalSystem, on_delete=models.CASCADE)
     error = models.CharField(max_length=255, null=True, blank=True)
     created_by = models.ForeignKey(
-        settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True, blank=True, related_name="+"
+        settings.AUTH_USER_MODEL,
+        on_delete=models.CASCADE,
+        null=True,
+        blank=True,
+        related_name="+",
     )
     created_at = models.DateTimeField(auto_now_add=True)
     updated_by = models.ForeignKey(
-        settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True, blank=True, related_name="+"
+        settings.AUTH_USER_MODEL,
+        on_delete=models.CASCADE,
+        null=True,
+        blank=True,
+        related_name="+",
     )
     updated_at = models.DateTimeField(auto_now=True)
     notification_url = models.CharField(max_length=255, null=True, blank=True)
@@ -42,7 +53,11 @@ class Image(models.Model):
     reference_pk = models.CharField(max_length=REFERENCE_PK_LENGTH)
     filename = models.CharField(max_length=255)
     created_by = models.ForeignKey(
-        settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True, blank=True, related_name="+"
+        settings.AUTH_USER_MODEL,
+        on_delete=models.CASCADE,
+        null=True,
+        blank=True,
+        related_name="+",
     )
     created_at = models.DateTimeField(auto_now_add=True)
 
diff --git a/src/hope_dedup_engine/apps/api/serializers.py b/src/hope_dedup_engine/apps/api/serializers.py
index ba5ac220..205532e3 100644
--- a/src/hope_dedup_engine/apps/api/serializers.py
+++ b/src/hope_dedup_engine/apps/api/serializers.py
@@ -10,7 +10,14 @@ class DeduplicationSetSerializer(serializers.ModelSerializer):
     class Meta:
         model = DeduplicationSet
         exclude = ("deleted",)
-        read_only_fields = "external_system", "created_at", "created_by", "deleted", "updated_at", "updated_by"
+        read_only_fields = (
+            "external_system",
+            "created_at",
+            "created_by",
+            "deleted",
+            "updated_at",
+            "updated_by",
+        )
 
 
 class ImageSerializer(serializers.ModelSerializer):
diff --git a/src/hope_dedup_engine/apps/api/urls.py b/src/hope_dedup_engine/apps/api/urls.py
index 78b1ff4f..b238604b 100644
--- a/src/hope_dedup_engine/apps/api/urls.py
+++ b/src/hope_dedup_engine/apps/api/urls.py
@@ -10,14 +10,30 @@
     DUPLICATE_LIST,
     IMAGE_LIST,
 )
-from hope_dedup_engine.apps.api.views import BulkImageViewSet, DeduplicationSetViewSet, DuplicateViewSet, ImageViewSet
+from hope_dedup_engine.apps.api.views import (
+    BulkImageViewSet,
+    DeduplicationSetViewSet,
+    DuplicateViewSet,
+    ImageViewSet,
+)
 
 router = routers.SimpleRouter()
-router.register(DEDUPLICATION_SET_LIST, DeduplicationSetViewSet, basename=DEDUPLICATION_SET_LIST)
+router.register(
+    DEDUPLICATION_SET_LIST, DeduplicationSetViewSet, basename=DEDUPLICATION_SET_LIST
+)
 
-deduplication_sets_router = nested_routers.NestedSimpleRouter(router, DEDUPLICATION_SET_LIST, lookup=DEDUPLICATION_SET)
+deduplication_sets_router = nested_routers.NestedSimpleRouter(
+    router, DEDUPLICATION_SET_LIST, lookup=DEDUPLICATION_SET
+)
 deduplication_sets_router.register(IMAGE_LIST, ImageViewSet, basename=IMAGE_LIST)
-deduplication_sets_router.register(BULK_IMAGE_LIST, BulkImageViewSet, basename=BULK_IMAGE_LIST)
-deduplication_sets_router.register(DUPLICATE_LIST, DuplicateViewSet, basename=DUPLICATE_LIST)
+deduplication_sets_router.register(
+    BULK_IMAGE_LIST, BulkImageViewSet, basename=BULK_IMAGE_LIST
+)
+deduplication_sets_router.register(
+    DUPLICATE_LIST, DuplicateViewSet, basename=DUPLICATE_LIST
+)
 
-urlpatterns = [path("", include(router.urls)), path("", include(deduplication_sets_router.urls))]
+urlpatterns = [
+    path("", include(router.urls)),
+    path("", include(deduplication_sets_router.urls)),
+]
diff --git a/src/hope_dedup_engine/apps/api/views.py b/src/hope_dedup_engine/apps/api/views.py
index 9afb7f70..622f3549 100644
--- a/src/hope_dedup_engine/apps/api/views.py
+++ b/src/hope_dedup_engine/apps/api/views.py
@@ -18,10 +18,17 @@
     HDETokenAuthentication,
     UserAndDeduplicationSetAreOfTheSameSystem,
 )
-from hope_dedup_engine.apps.api.const import DEDUPLICATION_SET_FILTER, DEDUPLICATION_SET_PARAM
+from hope_dedup_engine.apps.api.const import (
+    DEDUPLICATION_SET_FILTER,
+    DEDUPLICATION_SET_PARAM,
+)
 from hope_dedup_engine.apps.api.models import DeduplicationSet
 from hope_dedup_engine.apps.api.models.deduplication import Duplicate, Image
-from hope_dedup_engine.apps.api.serializers import DeduplicationSetSerializer, DuplicateSerializer, ImageSerializer
+from hope_dedup_engine.apps.api.serializers import (
+    DeduplicationSetSerializer,
+    DuplicateSerializer,
+    ImageSerializer,
+)
 from hope_dedup_engine.apps.api.utils import delete_model_data, start_processing
 
 MESSAGE = "message"
@@ -31,17 +38,29 @@
 
 
 class DeduplicationSetViewSet(
-    mixins.ListModelMixin, mixins.CreateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet
+    mixins.ListModelMixin,
+    mixins.CreateModelMixin,
+    mixins.DestroyModelMixin,
+    viewsets.GenericViewSet,
 ):
     authentication_classes = (HDETokenAuthentication,)
-    permission_classes = IsAuthenticated, AssignedToExternalSystem, UserAndDeduplicationSetAreOfTheSameSystem
+    permission_classes = (
+        IsAuthenticated,
+        AssignedToExternalSystem,
+        UserAndDeduplicationSetAreOfTheSameSystem,
+    )
     serializer_class = DeduplicationSetSerializer
 
     def get_queryset(self) -> QuerySet:
-        return DeduplicationSet.objects.filter(external_system=self.request.user.external_system, deleted=False)
+        return DeduplicationSet.objects.filter(
+            external_system=self.request.user.external_system, deleted=False
+        )
 
     def perform_create(self, serializer: Serializer) -> None:
-        serializer.save(created_by=self.request.user, external_system=self.request.user.external_system)
+        serializer.save(
+            created_by=self.request.user,
+            external_system=self.request.user.external_system,
+        )
 
     def perform_destroy(self, instance: DeduplicationSet) -> None:
         instance.updated_by = self.request.user
@@ -65,7 +84,9 @@ def process(self, request: Request, pk: UUID | None = None) -> Response:
                 self._start_processing(deduplication_set)
                 return Response({MESSAGE: STARTED})
             case DeduplicationSet.State.PROCESSING:
-                return Response({MESSAGE: ALREADY_PROCESSING}, status=status.HTTP_400_BAD_REQUEST)
+                return Response(
+                    {MESSAGE: ALREADY_PROCESSING}, status=status.HTTP_400_BAD_REQUEST
+                )
 
 
 class ImageViewSet(
@@ -76,7 +97,11 @@ class ImageViewSet(
     viewsets.GenericViewSet,
 ):
     authentication_classes = (HDETokenAuthentication,)
-    permission_classes = IsAuthenticated, AssignedToExternalSystem, UserAndDeduplicationSetAreOfTheSameSystem
+    permission_classes = (
+        IsAuthenticated,
+        AssignedToExternalSystem,
+        UserAndDeduplicationSetAreOfTheSameSystem,
+    )
     serializer_class = ImageSerializer
     queryset = Image.objects.all()
     parent_lookup_kwargs = {
@@ -108,14 +133,18 @@ def __setitem__(self, key: str, value: Any) -> None:
 
 
 class WrapRequestDataMixin:
-    def initialize_request(self, request: Request, *args: Any, **kwargs: Any) -> Request:
+    def initialize_request(
+        self, request: Request, *args: Any, **kwargs: Any
+    ) -> Request:
         request = super().initialize_request(request, *args, **kwargs)
         request._full_data = ListDataWrapper(request.data)
         return request
 
 
 class UnwrapRequestDataMixin:
-    def initialize_request(self, request: Request, *args: Any, **kwargs: Any) -> Request:
+    def initialize_request(
+        self, request: Request, *args: Any, **kwargs: Any
+    ) -> Request:
         request = super().initialize_request(request, *args, **kwargs)
         request._full_data = request._full_data.data
         return request
@@ -131,7 +160,11 @@ class BulkImageViewSet(
     viewsets.GenericViewSet,
 ):
     authentication_classes = (HDETokenAuthentication,)
-    permission_classes = IsAuthenticated, AssignedToExternalSystem, UserAndDeduplicationSetAreOfTheSameSystem
+    permission_classes = (
+        IsAuthenticated,
+        AssignedToExternalSystem,
+        UserAndDeduplicationSetAreOfTheSameSystem,
+    )
     serializer_class = ImageSerializer
     queryset = Image.objects.all()
     parent_lookup_kwargs = {
@@ -143,7 +176,9 @@ def get_serializer(self, *args: Any, **kwargs: Any) -> Serializer:
 
     def perform_create(self, serializer: Serializer) -> None:
         super().perform_create(serializer)
-        if deduplication_set := serializer.instance[0].deduplication_set if serializer.instance else None:
+        if deduplication_set := (
+            serializer.instance[0].deduplication_set if serializer.instance else None
+        ):
             deduplication_set.updated_by = self.request.user
             deduplication_set.save()
 
@@ -156,9 +191,15 @@ def clear(self, request: Request, deduplication_set_pk: str) -> Response:
         return Response(status=status.HTTP_204_NO_CONTENT)
 
 
-class DuplicateViewSet(nested_viewsets.NestedViewSetMixin, mixins.ListModelMixin, viewsets.GenericViewSet):
+class DuplicateViewSet(
+    nested_viewsets.NestedViewSetMixin, mixins.ListModelMixin, viewsets.GenericViewSet
+):
     authentication_classes = (HDETokenAuthentication,)
-    permission_classes = IsAuthenticated, AssignedToExternalSystem, UserAndDeduplicationSetAreOfTheSameSystem
+    permission_classes = (
+        IsAuthenticated,
+        AssignedToExternalSystem,
+        UserAndDeduplicationSetAreOfTheSameSystem,
+    )
     serializer_class = DuplicateSerializer
     queryset = Duplicate.objects.all()
     parent_lookup_kwargs = {
diff --git a/src/hope_dedup_engine/apps/core/management/commands/createsystem.py b/src/hope_dedup_engine/apps/core/management/commands/createsystem.py
index fbe2707d..f9dafbac 100644
--- a/src/hope_dedup_engine/apps/core/management/commands/createsystem.py
+++ b/src/hope_dedup_engine/apps/core/management/commands/createsystem.py
@@ -10,7 +10,9 @@ def add_arguments(self, parser):
         parser.add_argument("name")
 
     def handle(self, *args, **options):
-        system, created = ExternalSystem.objects.get_or_create(name=(name := options["name"]))
+        system, created = ExternalSystem.objects.get_or_create(
+            name=(name := options["name"])
+        )
         if created:
             self.stdout.write(self.style.SUCCESS(f'"{name}" system created.'))
         else:
diff --git a/src/hope_dedup_engine/apps/core/management/commands/env.py b/src/hope_dedup_engine/apps/core/management/commands/env.py
index 782c94f4..a1bfff67 100644
--- a/src/hope_dedup_engine/apps/core/management/commands/env.py
+++ b/src/hope_dedup_engine/apps/core/management/commands/env.py
@@ -33,14 +33,26 @@ def add_arguments(self, parser: "CommandParser") -> None:
             default="export {key}={value}",
             help="Check env for variable availability (default: 'export {key}=\"{value}\"')",
         )
-        parser.add_argument("--develop", action="store_true", help="Display development values")
-        parser.add_argument("--config", action="store_true", help="Only list changed values")
+        parser.add_argument(
+            "--develop", action="store_true", help="Display development values"
+        )
+        parser.add_argument(
+            "--config", action="store_true", help="Only list changed values"
+        )
         parser.add_argument("--diff", action="store_true", help="Mark changed values")
         parser.add_argument(
-            "--check", action="store_true", dest="check", default=False, help="Check env for variable availability"
+            "--check",
+            action="store_true",
+            dest="check",
+            default=False,
+            help="Check env for variable availability",
         )
         parser.add_argument(
-            "--ignore-errors", action="store_true", dest="ignore_errors", default=False, help="Do not fail"
+            "--ignore-errors",
+            action="store_true",
+            dest="ignore_errors",
+            default=False,
+            help="Do not fail",
         )
 
     def handle(self, *args: "Any", **options: "Any") -> None:
@@ -62,7 +74,9 @@ def handle(self, *args: "Any", **options: "Any") -> None:
                 else:
                     value: Any = env.get_value(k)
 
-                line: str = pattern.format(key=k, value=clean(value), help=help, default=default)
+                line: str = pattern.format(
+                    key=k, value=clean(value), help=help, default=default
+                )
                 if options["diff"]:
                     if value != default:
                         line = self.style.SUCCESS(line)
diff --git a/src/hope_dedup_engine/apps/core/management/commands/upgrade.py b/src/hope_dedup_engine/apps/core/management/commands/upgrade.py
index a6e09ff0..513e2f01 100644
--- a/src/hope_dedup_engine/apps/core/management/commands/upgrade.py
+++ b/src/hope_dedup_engine/apps/core/management/commands/upgrade.py
@@ -89,7 +89,9 @@ def get_options(self, options: dict[str, Any]) -> None:
         self.debug = options["debug"]
 
         self.admin_email = str(options["admin_email"] or env("ADMIN_EMAIL", ""))
-        self.admin_password = str(options["admin_password"] or env("ADMIN_PASSWORD", ""))
+        self.admin_password = str(
+            options["admin_password"] or env("ADMIN_PASSWORD", "")
+        )
 
     def halt(self, e: Exception) -> None:
         self.stdout.write(str(e), style_func=self.style.ERROR)
@@ -123,7 +125,9 @@ def handle(self, *args: Any, **options: Any) -> None:  # noqa: C901
                 call_command("check", deploy=True, verbosity=self.verbosity - 1)
             if self.static:
                 static_root = Path(env("STATIC_ROOT"))
-                echo(f"Run collectstatic to: '{static_root}' - '{static_root.absolute()}")
+                echo(
+                    f"Run collectstatic to: '{static_root}' - '{static_root.absolute()}"
+                )
                 if not static_root.exists():
                     static_root.mkdir(parents=True)
                 call_command("collectstatic", **extra)
@@ -144,7 +148,10 @@ def handle(self, *args: Any, **options: Any) -> None:  # noqa: C901
                         style_func=self.style.WARNING,
                     )
                 else:
-                    echo(f"Creating superuser: {self.admin_email}", style_func=self.style.WARNING)
+                    echo(
+                        f"Creating superuser: {self.admin_email}",
+                        style_func=self.style.WARNING,
+                    )
                     validate_email(self.admin_email)
                     os.environ["DJANGO_SUPERUSER_USERNAME"] = self.admin_email
                     os.environ["DJANGO_SUPERUSER_EMAIL"] = self.admin_email
diff --git a/src/hope_dedup_engine/apps/faces/celery_tasks.py b/src/hope_dedup_engine/apps/faces/celery_tasks.py
index 2c156cfb..2fec0d72 100644
--- a/src/hope_dedup_engine/apps/faces/celery_tasks.py
+++ b/src/hope_dedup_engine/apps/faces/celery_tasks.py
@@ -9,7 +9,9 @@
 @shared_task(bind=True, soft_time_limit=0.5 * 60 * 60, time_limit=1 * 60 * 60)
 @task_lifecycle(name="Deduplicate", ttl=1 * 60 * 60)
 # TODO: Use DeduplicationSet objects as input to deduplication pipeline
-def deduplicate(self, filenames: tuple[str], ignore_pairs: tuple[tuple[str, str]] = tuple()) -> tuple[tuple[str]]:
+def deduplicate(
+    self, filenames: tuple[str], ignore_pairs: tuple[tuple[str, str]] = tuple()
+) -> tuple[tuple[str]]:
     """
     Deduplicate a set of filenames, ignoring any specified pairs of filenames.
 
@@ -25,5 +27,8 @@ def deduplicate(self, filenames: tuple[str], ignore_pairs: tuple[tuple[str, str]
         dd = DuplicationDetector(filenames, ignore_pairs)
         return dd.find_duplicates()
     except Exception as e:
-        self.update_state(state=states.FAILURE, meta={"exc_message": str(e), "traceback": traceback.format_exc()})
+        self.update_state(
+            state=states.FAILURE,
+            meta={"exc_message": str(e), "traceback": traceback.format_exc()},
+        )
         raise e
diff --git a/src/hope_dedup_engine/apps/faces/utils/celery_utils.py b/src/hope_dedup_engine/apps/faces/utils/celery_utils.py
index eec34e9a..6aba6ced 100644
--- a/src/hope_dedup_engine/apps/faces/utils/celery_utils.py
+++ b/src/hope_dedup_engine/apps/faces/utils/celery_utils.py
@@ -21,7 +21,9 @@ def wrapper(self, *args, **kwargs) -> any:
             ignore_pairs = args[1] if args else kwargs.get("ignore_pairs")
             lock_name: str = f"{name}_{_get_hash(filenames, ignore_pairs)}"
             if not _acquire_lock(lock_name, ttl):
-                logger.info(f"Task {name} with brocker lock {lock_name} is already running.")
+                logger.info(
+                    f"Task {name} with brocker lock {lock_name} is already running."
+                )
                 return None
 
             try:
@@ -49,6 +51,8 @@ def _release_lock(lock_name: str) -> None:
 
 def _get_hash(filenames: tuple[str], ignore_pairs: tuple[tuple[str, str]]) -> str:
     fn_str: str = ",".join(sorted(filenames))
-    ip_sorted = sorted((min(item1, item2), max(item1, item2)) for item1, item2 in ignore_pairs)
+    ip_sorted = sorted(
+        (min(item1, item2), max(item1, item2)) for item1, item2 in ignore_pairs
+    )
     ip_str = ",".join(f"{item1},{item2}" for item1, item2 in ip_sorted)
     return hashlib.sha256(f"{fn_str}{ip_str}".encode()).hexdigest()
diff --git a/src/hope_dedup_engine/apps/faces/utils/duplication_detector.py b/src/hope_dedup_engine/apps/faces/utils/duplication_detector.py
index c0683943..5b9257e7 100644
--- a/src/hope_dedup_engine/apps/faces/utils/duplication_detector.py
+++ b/src/hope_dedup_engine/apps/faces/utils/duplication_detector.py
@@ -11,7 +11,11 @@
 import numpy as np
 from constance import config
 
-from hope_dedup_engine.apps.core.storage import CV2DNNStorage, HDEAzureStorage, HOPEAzureStorage
+from hope_dedup_engine.apps.core.storage import (
+    CV2DNNStorage,
+    HDEAzureStorage,
+    HOPEAzureStorage,
+)
 
 
 class DuplicationDetector:
@@ -32,7 +36,9 @@ class FaceEncodingsConfig:
 
     logger: logging.Logger = logging.getLogger(__name__)
 
-    def __init__(self, filenames: tuple[str], ignore_pairs: tuple[str, str] = tuple()) -> None:
+    def __init__(
+        self, filenames: tuple[str], ignore_pairs: tuple[str, str] = tuple()
+    ) -> None:
         """
         Initialize the DuplicationDetector with the given filenames.
 
@@ -83,7 +89,13 @@ def _set_net(self, storage: CV2DNNStorage) -> cv2.dnn_Net:
         return net
 
     def _get_shape(self) -> dict[str, int]:
-        pattern = r"input_shape\s*\{\s*" r"dim:\s*(\d+)\s*" r"dim:\s*(\d+)\s*" r"dim:\s*(\d+)\s*" r"dim:\s*(\d+)\s*\}"
+        pattern = (
+            r"input_shape\s*\{\s*"
+            r"dim:\s*(\d+)\s*"
+            r"dim:\s*(\d+)\s*"
+            r"dim:\s*(\d+)\s*"
+            r"dim:\s*(\d+)\s*\}"
+        )
         with open(settings.PROTOTXT_FILE, "r") as file:
             if match := re.search(pattern, file.read()):
                 return {
@@ -95,15 +107,21 @@ def _get_shape(self) -> dict[str, int]:
             else:
                 raise ValueError("Could not find input_shape in prototxt file.")
 
-    def _get_pairs_to_ignore(self, ignore: tuple[tuple[str, str]]) -> set[tuple[str, str]]:
+    def _get_pairs_to_ignore(
+        self, ignore: tuple[tuple[str, str]]
+    ) -> set[tuple[str, str]]:
         ignore = tuple(tuple(pair) for pair in ignore)
         if not ignore:
             return set()
         if all(
-            isinstance(pair, tuple) and len(pair) == 2 and all(isinstance(item, str) and item for item in pair)
+            isinstance(pair, tuple)
+            and len(pair) == 2
+            and all(isinstance(item, str) and item for item in pair)
             for pair in ignore
         ):
-            return {(item1, item2) for item1, item2 in ignore} | {(item2, item1) for item1, item2 in ignore}
+            return {(item1, item2) for item1, item2 in ignore} | {
+                (item2, item1) for item1, item2 in ignore
+            }
         elif len(ignore) == 2 and all(isinstance(item, str) for item in ignore):
             return {(ignore[0], ignore[1]), (ignore[1], ignore[0])}
         else:
@@ -117,7 +135,9 @@ def _encodings_filename(self, filename: str) -> str:
     def _has_encodings(self, filename: str) -> bool:
         return self.storages["encoded"].exists(self._encodings_filename(filename))
 
-    def _get_face_detections_dnn(self, filename: str) -> list[tuple[int, int, int, int]]:
+    def _get_face_detections_dnn(
+        self, filename: str
+    ) -> list[tuple[int, int, int, int]]:
         face_regions: list[tuple[int, int, int, int]] = []
         try:
             with self.storages["images"].open(filename, "rb") as img_file:
@@ -128,9 +148,16 @@ def _get_face_detections_dnn(self, filename: str) -> list[tuple[int, int, int, i
             # Create a blob (4D tensor) from the image
             blob = cv2.dnn.blobFromImage(
                 image=cv2.resize(
-                    image, dsize=(self.blob_from_image_cfg.shape["height"], self.blob_from_image_cfg.shape["width"])
+                    image,
+                    dsize=(
+                        self.blob_from_image_cfg.shape["height"],
+                        self.blob_from_image_cfg.shape["width"],
+                    ),
+                ),
+                size=(
+                    self.blob_from_image_cfg.shape["height"],
+                    self.blob_from_image_cfg.shape["width"],
                 ),
-                size=(self.blob_from_image_cfg.shape["height"], self.blob_from_image_cfg.shape["width"]),
                 scalefactor=self.blob_from_image_cfg.scale_factor,
                 mean=self.blob_from_image_cfg.mean_values,
             )
@@ -145,17 +172,26 @@ def _get_face_detections_dnn(self, filename: str) -> list[tuple[int, int, int, i
                 confidence = detections[0, 0, i, 2]
                 # Filter out weak detections by ensuring the confidence is greater than the minimum confidence
                 if confidence > self.face_detection_confidence:
-                    box = (detections[0, 0, i, 3:7] * np.array([w, h, w, h])).astype("int")
+                    box = (detections[0, 0, i, 3:7] * np.array([w, h, w, h])).astype(
+                        "int"
+                    )
                     boxes.append(box)
                     confidences.append(confidence)
             if boxes:
                 # Apply non-maxima suppression to suppress weak, overlapping bounding boxes
-                indices = cv2.dnn.NMSBoxes(boxes, confidences, self.face_detection_confidence, self.nms_threshold)
+                indices = cv2.dnn.NMSBoxes(
+                    boxes,
+                    confidences,
+                    self.face_detection_confidence,
+                    self.nms_threshold,
+                )
                 if indices is not None:
                     for i in indices:
                         face_regions.append(tuple(boxes[i]))
         except Exception as e:
-            self.logger.exception("Error processing face detection for image %s", filename)
+            self.logger.exception(
+                "Error processing face detection for image %s", filename
+            )
             raise e
         return face_regions
 
@@ -193,13 +229,19 @@ def _encode_face(self, filename: str) -> None:
                         encodings.extend(face_encodings)
                     else:
                         self.logger.error("Invalid face region %s", region)
-                with self.storages["encoded"].open(self._encodings_filename(filename), "wb") as f:
+                with self.storages["encoded"].open(
+                    self._encodings_filename(filename), "wb"
+                ) as f:
                     np.save(f, encodings)
         except Exception as e:
-            self.logger.exception("Error processing face encodings for image %s", filename)
+            self.logger.exception(
+                "Error processing face encodings for image %s", filename
+            )
             raise e
 
-    def _get_duplicated_groups(self, checked: set[tuple[str, str, float]]) -> tuple[tuple[str]]:
+    def _get_duplicated_groups(
+        self, checked: set[tuple[str, str, float]]
+    ) -> tuple[tuple[str]]:
         # Dictionary to store connections between paths where distances are less than the threshold
         groups = []
         connections = defaultdict(set)
@@ -216,10 +258,14 @@ def _get_duplicated_groups(self, checked: set[tuple[str, str, float]]) -> tuple[
                 # Try to expand the group ensuring each new path is duplicated to all in the group
                 while queue:
                     neighbor = queue.pop(0)
-                    if neighbor not in new_group and all(neighbor in connections[member] for member in new_group):
+                    if neighbor not in new_group and all(
+                        neighbor in connections[member] for member in new_group
+                    ):
                         new_group.add(neighbor)
                         # Add neighbors of the current neighbor, excluding those already in the group
-                        queue.extend([n for n in connections[neighbor] if n not in new_group])
+                        queue.extend(
+                            [n for n in connections[neighbor] if n not in new_group]
+                        )
                 # Add the newly formed group to the list of groups
                 groups.append(new_group)
         return tuple(map(tuple, groups))
@@ -244,12 +290,18 @@ def find_duplicates(self) -> tuple[tuple[str]]:
                         min_distance = float("inf")
                         for encoding1 in encodings1:
                             if (
-                                current_min := min(face_recognition.face_distance(encodings2, encoding1))
+                                current_min := min(
+                                    face_recognition.face_distance(
+                                        encodings2, encoding1
+                                    )
+                                )
                             ) < min_distance:
                                 min_distance = current_min
                         checked.add((path1, path2, min_distance))
 
             return self._get_duplicated_groups(checked)
         except Exception as e:
-            self.logger.exception("Error finding duplicates for images %s", self.filenames)
+            self.logger.exception(
+                "Error finding duplicates for images %s", self.filenames
+            )
             raise e
diff --git a/src/hope_dedup_engine/apps/faces/validators.py b/src/hope_dedup_engine/apps/faces/validators.py
index 1b8288f4..d3f3f5bd 100644
--- a/src/hope_dedup_engine/apps/faces/validators.py
+++ b/src/hope_dedup_engine/apps/faces/validators.py
@@ -8,7 +8,9 @@ def to_python(self, value):
             if len(values) != 3:
                 raise ValueError("The tuple must have exactly three elements.")
             if not all(-255 <= v <= 255 for v in values):
-                raise ValueError("Each value in the tuple must be between -255 and 255.")
+                raise ValueError(
+                    "Each value in the tuple must be between -255 and 255."
+                )
             return values
         except Exception as e:
             raise ValidationError(
diff --git a/src/hope_dedup_engine/apps/security/models.py b/src/hope_dedup_engine/apps/security/models.py
index 8ed7506a..044c1daa 100644
--- a/src/hope_dedup_engine/apps/security/models.py
+++ b/src/hope_dedup_engine/apps/security/models.py
@@ -13,7 +13,9 @@ class ExternalSystem(models.Model):
 
 
 class User(SecurityMixin, AbstractUser):
-    external_system = models.ForeignKey(ExternalSystem, on_delete=models.SET_NULL, null=True, blank=True)
+    external_system = models.ForeignKey(
+        ExternalSystem, on_delete=models.SET_NULL, null=True, blank=True
+    )
 
     class Meta:
         abstract = False
diff --git a/src/hope_dedup_engine/apps/social/pipeline.py b/src/hope_dedup_engine/apps/social/pipeline.py
index aea7c84c..51610b3d 100644
--- a/src/hope_dedup_engine/apps/social/pipeline.py
+++ b/src/hope_dedup_engine/apps/social/pipeline.py
@@ -6,7 +6,9 @@
 from social_core.backends.base import BaseAuth
 
 
-def save_to_group(backend: BaseAuth, user: Optional[User] = None, **kwargs: Any) -> dict[str, Any]:
+def save_to_group(
+    backend: BaseAuth, user: Optional[User] = None, **kwargs: Any
+) -> dict[str, Any]:
     if user:
         grp = Group.objects.get(name=config.NEW_USER_DEFAULT_GROUP)
         user.groups.add(grp)
diff --git a/src/hope_dedup_engine/config/__init__.py b/src/hope_dedup_engine/config/__init__.py
index 4cd90b32..ccd74314 100644
--- a/src/hope_dedup_engine/config/__init__.py
+++ b/src/hope_dedup_engine/config/__init__.py
@@ -5,7 +5,9 @@
 from environ import Env
 
 if TYPE_CHECKING:
-    ConfigItem: TypeAlias = Union[Tuple[type, Any, str, Any], Tuple[type, Any, str], Tuple[type, Any]]
+    ConfigItem: TypeAlias = Union[
+        Tuple[type, Any, str, Any], Tuple[type, Any, str], Tuple[type, Any]
+    ]
 
 
 DJANGO_HELP_BASE = "https://docs.djangoproject.com/en/5.0/ref/settings"
@@ -20,7 +22,14 @@ class Group(Enum):
 
 
 NOT_SET = "<- not set ->"
-EXPLICIT_SET = ["DATABASE_URL", "SECRET_KEY", "CACHE_URL", "CELERY_BROKER_URL", "MEDIA_ROOT", "STATIC_ROOT"]
+EXPLICIT_SET = [
+    "DATABASE_URL",
+    "SECRET_KEY",
+    "CACHE_URL",
+    "CELERY_BROKER_URL",
+    "MEDIA_ROOT",
+    "STATIC_ROOT",
+]
 
 CONFIG: "Dict[str, ConfigItem]" = {
     "ADMIN_EMAIL": (str, "", "Initial user created at first deploy"),
@@ -29,7 +38,11 @@ class Group(Enum):
     "AUTHENTICATION_BACKENDS": (list, [], setting("authentication-backends")),
     "CACHE_URL": (str, "redis://localhost:6379/0"),
     "CATCH_ALL_EMAIL": (str, "If set all the emails will be sent to this address"),
-    "CELERY_BROKER_URL": (str, NOT_SET, "https://docs.celeryq.dev/en/stable/django/first-steps-with-django.html"),
+    "CELERY_BROKER_URL": (
+        str,
+        NOT_SET,
+        "https://docs.celeryq.dev/en/stable/django/first-steps-with-django.html",
+    ),
     "CELERY_TASK_ALWAYS_EAGER": (
         bool,
         False,
@@ -54,21 +67,47 @@ class Group(Enum):
         "postgres://127.0.0.1:5432/dedupe",
     ),
     "DEBUG": (bool, False, setting("debug"), True),
-    "EMAIL_BACKEND": (str, "django.core.mail.backends.smtp.EmailBackend", setting("email-backend"), True),
+    "EMAIL_BACKEND": (
+        str,
+        "django.core.mail.backends.smtp.EmailBackend",
+        setting("email-backend"),
+        True,
+    ),
     "EMAIL_HOST": (str, "localhost", setting("email-host"), True),
     "EMAIL_HOST_USER": (str, "", setting("email-host-user"), True),
     "EMAIL_HOST_PASSWORD": (str, "", setting("email-host-password"), True),
     "EMAIL_PORT": (int, "25", setting("email-port"), True),
-    "EMAIL_SUBJECT_PREFIX": (str, "[Hope-dedupe]", setting("email-subject-prefix"), True),
+    "EMAIL_SUBJECT_PREFIX": (
+        str,
+        "[Hope-dedupe]",
+        setting("email-subject-prefix"),
+        True,
+    ),
     "EMAIL_USE_LOCALTIME": (bool, False, setting("email-use-localtime"), True),
     "EMAIL_USE_TLS": (bool, False, setting("email-use-tls"), True),
     "EMAIL_USE_SSL": (bool, False, setting("email-use-ssl"), True),
     "EMAIL_TIMEOUT": (str, None, setting("email-timeout"), True),
     "LOGGING_LEVEL": (str, "CRITICAL", setting("logging-level")),
-    "FILE_STORAGE_DEFAULT": (str, "django.core.files.storage.FileSystemStorage", setting("storages")),
-    "FILE_STORAGE_MEDIA": (str, "django.core.files.storage.FileSystemStorage", setting("storages")),
-    "FILE_STORAGE_STATIC": (str, "django.contrib.staticfiles.storage.StaticFilesStorage", setting("storages")),
-    "FILE_STORAGE_HOPE": (str, "django.core.files.storage.FileSystemStorage", setting("storages")),
+    "FILE_STORAGE_DEFAULT": (
+        str,
+        "django.core.files.storage.FileSystemStorage",
+        setting("storages"),
+    ),
+    "FILE_STORAGE_MEDIA": (
+        str,
+        "django.core.files.storage.FileSystemStorage",
+        setting("storages"),
+    ),
+    "FILE_STORAGE_STATIC": (
+        str,
+        "django.contrib.staticfiles.storage.StaticFilesStorage",
+        setting("storages"),
+    ),
+    "FILE_STORAGE_HOPE": (
+        str,
+        "django.core.files.storage.FileSystemStorage",
+        setting("storages"),
+    ),
     "MEDIA_ROOT": (str, None, setting("media-root")),
     "MEDIA_URL": (str, "/media/", setting("media-url")),
     "ROOT_TOKEN": (str, "", ""),
@@ -79,16 +118,29 @@ class Group(Enum):
     "SENTRY_DSN": (str, "", "Sentry DSN"),
     "SENTRY_ENVIRONMENT": (str, "production", "Sentry Environment"),
     "SENTRY_URL": (str, "", "Sentry server url"),
-    "SESSION_COOKIE_DOMAIN": (str, "", setting("std-setting-SESSION_COOKIE_DOMAIN"), "localhost"),
+    "SESSION_COOKIE_DOMAIN": (
+        str,
+        "",
+        setting("std-setting-SESSION_COOKIE_DOMAIN"),
+        "localhost",
+    ),
     "SESSION_COOKIE_HTTPONLY": (bool, True, setting("session-cookie-httponly"), False),
     "SESSION_COOKIE_NAME": (str, "dedupe_session", setting("session-cookie-name")),
     "SESSION_COOKIE_PATH": (str, "/", setting("session-cookie-path")),
     "SESSION_COOKIE_SECURE": (bool, True, setting("session-cookie-secure"), False),
-    "SIGNING_BACKEND": (str, "django.core.signing.TimestampSigner", setting("signing-backend")),
+    "SIGNING_BACKEND": (
+        str,
+        "django.core.signing.TimestampSigner",
+        setting("signing-backend"),
+    ),
     "SOCIAL_AUTH_LOGIN_URL": (str, "/login/", "", ""),
     "SOCIAL_AUTH_RAISE_EXCEPTIONS": (bool, False, "", True),
     "SOCIAL_AUTH_REDIRECT_IS_HTTPS": (bool, True, "", False),
-    "STATIC_FILE_STORAGE": (str, "django.core.files.storage.FileSystemStorage", setting("storages")),
+    "STATIC_FILE_STORAGE": (
+        str,
+        "django.core.files.storage.FileSystemStorage",
+        setting("storages"),
+    ),
     "STATIC_ROOT": (str, None, setting("static-root")),
     "STATIC_URL": (str, "/static/", setting("static-url")),
     "TIME_ZONE": (str, "UTC", setting("std-setting-TIME_ZONE")),
diff --git a/src/hope_dedup_engine/config/fragments/constance.py b/src/hope_dedup_engine/config/fragments/constance.py
index 555dbc49..e6b7146b 100644
--- a/src/hope_dedup_engine/config/fragments/constance.py
+++ b/src/hope_dedup_engine/config/fragments/constance.py
@@ -6,7 +6,11 @@
 
 CONSTANCE_CONFIG = {
     "NEW_USER_IS_STAFF": (False, "Set any new user as staff", bool),
-    "NEW_USER_DEFAULT_GROUP": (DEFAULT_GROUP_NAME, "Group to assign to any new user", str),
+    "NEW_USER_DEFAULT_GROUP": (
+        DEFAULT_GROUP_NAME,
+        "Group to assign to any new user",
+        str,
+    ),
     "DNN_BACKEND": (
         cv2.dnn.DNN_BACKEND_OPENCV,
         "Specifies the computation backend to be used by OpenCV for deep learning inference.",
diff --git a/src/hope_dedup_engine/config/fragments/csp.py b/src/hope_dedup_engine/config/fragments/csp.py
index 3070bdaa..a0e02fbd 100644
--- a/src/hope_dedup_engine/config/fragments/csp.py
+++ b/src/hope_dedup_engine/config/fragments/csp.py
@@ -1,7 +1,27 @@
 # CSP_DEFAULT_SRC = ["'self'", "'unsafe-inline'", "'same-origin'", "fonts.googleapis.com", 'fonts.gstatic.com', 'data:',
 #                    'blob:', "cdn.redoc.ly"]
 CSP_DEFAULT_SRC = ["'self'", "'unsafe-inline'"]
-CSP_STYLE_SRC = ["'self'", "'unsafe-inline'", "same-origin", "fonts.googleapis.com", "fonts.gstatic.com"]
+CSP_STYLE_SRC = [
+    "'self'",
+    "'unsafe-inline'",
+    "same-origin",
+    "fonts.googleapis.com",
+    "fonts.gstatic.com",
+]
 CSP_SCRIPT_SRC = ["'self'", "'unsafe-inline'", "same-origin", "blob:"]
-CSP_IMG_SRC = ["'self'", "'unsafe-inline'", "same-origin", "blob:", "data:", "cdn.redoc.ly"]
-CSP_FONT_SRC = ["'self'", "fonts.googleapis.com", "same-origin", "fonts.googleapis.com", "fonts.gstatic.com", "blob:"]
+CSP_IMG_SRC = [
+    "'self'",
+    "'unsafe-inline'",
+    "same-origin",
+    "blob:",
+    "data:",
+    "cdn.redoc.ly",
+]
+CSP_FONT_SRC = [
+    "'self'",
+    "fonts.googleapis.com",
+    "same-origin",
+    "fonts.googleapis.com",
+    "fonts.gstatic.com",
+    "blob:",
+]
diff --git a/src/hope_dedup_engine/state.py b/src/hope_dedup_engine/state.py
index 0973df8f..28253cbf 100644
--- a/src/hope_dedup_engine/state.py
+++ b/src/hope_dedup_engine/state.py
@@ -38,7 +38,16 @@ def add_cookie(
         samesite: str | None = None,
     ) -> None:
         value = json.dumps(value)
-        self.cookies[key] = [value, max_age, expires, path, domain, secure, httponly, samesite]
+        self.cookies[key] = [
+            value,
+            max_age,
+            expires,
+            path,
+            domain,
+            secure,
+            httponly,
+            samesite,
+        ]
 
     def get_cookie(self, name: str) -> Optional[str]:
         return self.request.COOKIES.get(name)
diff --git a/src/hope_dedup_engine/utils/http.py b/src/hope_dedup_engine/utils/http.py
index f4300e8d..236b1a9b 100644
--- a/src/hope_dedup_engine/utils/http.py
+++ b/src/hope_dedup_engine/utils/http.py
@@ -39,5 +39,7 @@ def absolute_uri(url: str | None = None) -> str:
     return uri
 
 
-def absolute_reverse(name: str, args: Tuple[Any] | None = None, kwargs: Dict[str, Any] | None = None) -> str:
+def absolute_reverse(
+    name: str, args: Tuple[Any] | None = None, kwargs: Dict[str, Any] | None = None
+) -> str:
     return absolute_uri(reverse(name, args=args, kwargs=kwargs))
diff --git a/src/hope_dedup_engine/utils/security.py b/src/hope_dedup_engine/utils/security.py
index 4ed19bd0..9ee29f33 100644
--- a/src/hope_dedup_engine/utils/security.py
+++ b/src/hope_dedup_engine/utils/security.py
@@ -4,4 +4,7 @@
 
 
 def is_root(request: Any, *args: Any, **kwargs: Any) -> bool:
-    return request.user.is_superuser and request.headers.get(settings.ROOT_TOKEN_HEADER) == settings.ROOT_TOKEN != ""
+    return (
+        request.user.is_superuser
+        and request.headers.get(settings.ROOT_TOKEN_HEADER) == settings.ROOT_TOKEN != ""
+    )

From 057e1894d2c8e0a89c0b5ccf88065377acde6f0e Mon Sep 17 00:00:00 2001
From: sax 
Date: Tue, 11 Jun 2024 17:44:03 +0200
Subject: [PATCH 17/57] updates lint workflow

---
 .github/workflows/lint.yml | 34 +++++++++++++++++-----------------
 1 file changed, 17 insertions(+), 17 deletions(-)

diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 5cefaa6f..7fe4c1e8 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -25,7 +25,7 @@ permissions:
 
 jobs:
   changes:
-#    if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
+    if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
     name: check files
     runs-on: ubuntu-latest
     timeout-minutes: 3
@@ -46,7 +46,7 @@ jobs:
   flake8:
     needs: changes
     runs-on: ubuntu-latest
-#    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
     steps:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
@@ -63,7 +63,7 @@ jobs:
   isort:
     needs: changes
     runs-on: ubuntu-latest
-#    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
     steps:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
@@ -76,7 +76,7 @@ jobs:
   black:
     needs: changes
     runs-on: ubuntu-latest
-#    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
     steps:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
@@ -86,16 +86,16 @@ jobs:
         run: pip install black
       - name: Black
         run: black src/ --check
-#  bandit:
-#    needs: changes
-#    runs-on: ubuntu-latest
-##    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
-#    steps:
-#      - uses: actions/checkout@v2
-#      - uses: actions/setup-python@v2
-#        with:
-#          python-version: '3.11'
-#      - name: Install requirements
-#        run: pip install bandit
-#      - name: bandit
-#        run: bandit src/
+  bandit:
+    needs: changes
+    runs-on: ubuntu-latest
+    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    steps:
+      - uses: actions/checkout@v2
+      - uses: actions/setup-python@v2
+        with:
+          python-version: '3.11'
+      - name: Install requirements
+        run: pip install bandit
+      - name: bandit
+        run: bandit src/

From d7c781eb0fea6b864f23a7dbcd7df8f3902a14a1 Mon Sep 17 00:00:00 2001
From: sax 
Date: Tue, 11 Jun 2024 17:54:22 +0200
Subject: [PATCH 18/57] updates image_updated action

---
 .github/actions/image_updated/action.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index 6b7a3dc2..5aafe226 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -64,6 +64,7 @@ runs:
       id: check
       shell: bash
       run: |
+        echo "Repo: ${{steps.setup.outputs.repo}}"
         url="https://registry-1.docker.io/v2/${{steps.setup.outputs.repo}}/manifests/${{steps.setup.outputs.tag}}"        
         manifest=$(curl -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \
                         -H 'Authorization: Bearer ${{steps.setup.outputs.token}}' \

From 193b83d63ab5dd308ccda98606e21ac72f3c45b6 Mon Sep 17 00:00:00 2001
From: sax 
Date: Tue, 11 Jun 2024 18:08:56 +0200
Subject: [PATCH 19/57] updates Dockerifle

---
 docker/Dockerfile | 2 +-
 docker/Makefile   | 3 +++
 2 files changed, 4 insertions(+), 1 deletion(-)

diff --git a/docker/Dockerfile b/docker/Dockerfile
index 04cc0aa0..0844267a 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -64,7 +64,7 @@ RUN \
       postgresql-client \
       libgl1  \
       libglib2.0-0 \
-      libffi7 \
+      libffi8 \
       libgif-dev \
       libjpeg-dev \
       libmagic1 \
diff --git a/docker/Makefile b/docker/Makefile
index 0140b900..d013ae68 100644
--- a/docker/Makefile
+++ b/docker/Makefile
@@ -33,6 +33,9 @@ help:
 sha:
 #	@cd .. && sha1sum ${HASH_SEEDS} | sha1sum | awk '{print $1}' | cut -c 1-8
 	@echo ${LOCK_SHA}
+xxx:
+	docker run -it --rm -t python:3.12-slim-bookworm /bin/bash
+
 
 .build:
 	@echo  ${DOCKERHUB_TOKEN} | docker login -u saxix --password-stdin

From 447f581b28f411988fab47302b532b91bc0c8b06 Mon Sep 17 00:00:00 2001
From: sax 
Date: Tue, 11 Jun 2024 19:38:56 +0200
Subject: [PATCH 20/57] updates CI ci:debug

---
 .github/actions/image_updated/action.yml | 19 ++++--
 .github/workflows/dump.yml               | 87 ++++++++++++++++++++++++
 .github/workflows/test.yml               | 12 +++-
 .gitignore                               |  4 +-
 4 files changed, 113 insertions(+), 9 deletions(-)
 create mode 100644 .github/workflows/dump.yml

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index 5aafe226..aa8b0789 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -48,11 +48,14 @@ runs:
         architecture=${{ inputs.architecture }}
         repo="${ref%:*}"
         tag="${ref##*:}"
+        
+        echo "image=$ref" >> $GITHUB_OUTPUT
         echo "repo=$repo" >> $GITHUB_OUTPUT
         echo "tag=$tag" >> $GITHUB_OUTPUT
         echo "architecture=$architecture" >> $GITHUB_OUTPUT
 
-        res=$(curl -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull")
+        res=$(curl -u "${{ inputs.username }}:${{ inputs.password }}" \
+                   -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull")
         
         token=$(echo $res | jq -r '.token')
         if [[ -z "$token" ]];then
@@ -64,12 +67,11 @@ runs:
       id: check
       shell: bash
       run: |
-        echo "Repo: ${{steps.setup.outputs.repo}}"
         url="https://registry-1.docker.io/v2/${{steps.setup.outputs.repo}}/manifests/${{steps.setup.outputs.tag}}"        
         manifest=$(curl -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \
                         -H 'Authorization: Bearer ${{steps.setup.outputs.token}}' \
                         -s $url)
-
+                
         if [[ $manifest == *MANIFEST_UNKNOWN* ]];then
           echo "exists=false" >> "$GITHUB_OUTPUT"
           echo "updated=false" >> "$GITHUB_OUTPUT"
@@ -108,13 +110,22 @@ runs:
           echo "::error title=â›” error hint::Digest is empty"
           exit 1
         fi        
-        url=https://registry-1.docker.io/v2/${{steps.setup.outputs.repo}}/blobs/$digest      
+        url=https://registry-1.docker.io/v2/${{steps.setup.outputs.repo}}/blobs/$digest
+        
         blob=$(curl \
               --silent \
               --location \
               -H "Accept: application/vnd.docker.disribution.manifest.v2+json" \
               -H 'Authorization: Bearer ${{steps.setup.outputs.token}}' \
               $url )        
+        
+        if [[ $blob == *BLOB_UNKNOWN* ]];then
+          code=$(echo $blob | jq .errors[0].code)
+          message=$(echo $blob | jq .errors[0].message)          
+          echo "::error title=$code error hint::$message $url"
+          exit 1        
+        fi
+        
         if [[ -z "$blob" ]]; then
           echo "Unable to get blob from: https://registry-1.docker.io/v2/$repo/blobs/$digest"
           echo "updated=false" >> "$GITHUB_OUTPUT"
diff --git a/.github/workflows/dump.yml b/.github/workflows/dump.yml
new file mode 100644
index 00000000..cf13699b
--- /dev/null
+++ b/.github/workflows/dump.yml
@@ -0,0 +1,87 @@
+name: "[DEBUG] Dump"
+
+on:
+  check_run:
+  create:
+  delete:
+  discussion:
+  discussion_comment:
+  fork:
+  issues:
+  issue_comment:
+  milestone:
+  pull_request:
+  pull_request_review_comment:
+  pull_request_review:
+  push:
+  release:
+  workflow_dispatch:
+
+
+jobs:
+  dump:
+    name: "[DEBUG] Echo Full Context"
+    if: ${{ contains(github.event.head_commit.message, 'ci:debug') }}
+    runs-on: [ubuntu-latest, self-hosted]
+    steps:
+      - name: Dump Env vars
+        run: |
+            echo "====== ENVIRONMENT ================="
+            env | sort
+            echo "===================================="
+      - name: Dump GitHub context
+        env:
+          GITHUB_CONTEXT: ${{ toJSON(github) }}
+        run: |
+            echo "====== GITHUB_CONTEXT =============="
+            echo "$GITHUB_CONTEXT"
+            echo "===================================="
+      - name: Dump job context
+        env:
+          JOB_CONTEXT: ${{ toJSON(job) }}
+        run: |
+          echo "====== JOB_CONTEXT =============="
+          echo "$JOB_CONTEXT"
+          echo "===================================="
+      - name: Dump steps context
+        env:
+          STEPS_CONTEXT: ${{ toJSON(steps) }}
+        run: |
+          echo "====== STEPS_CONTEXT =============="
+          echo "$STEPS_CONTEXT"
+          echo "===================================="
+      - name: Dump runner context
+        env:
+          RUNNER_CONTEXT: ${{ toJSON(runner) }}
+        run: |
+          echo "====== RUNNER_CONTEXT =============="
+          echo "$RUNNER_CONTEXT"
+          echo "===================================="
+      - name: Dump strategy context
+        env:
+          STRATEGY_CONTEXT: ${{ toJSON(strategy) }}
+        run: |
+          echo "====== STRATEGY_CONTEXT =============="
+          echo "$STRATEGY_CONTEXT"
+          echo "===================================="
+      - name: Dump matrix context
+        env:
+          MATRIX_CONTEXT: ${{ toJSON(matrix) }}
+        run: |
+          echo "====== MATRIX_CONTEXT =============="
+          echo "$MATRIX_CONTEXT"
+          echo "===================================="
+      - name: Dump vars context
+        env:
+          VARS_CONTEXT: ${{ toJSON(vars) }}
+        run: |
+          echo "====== VARS =============="
+          echo "$VARS_CONTEXT"
+          echo "===================================="
+      - name: Dump env context
+        env:
+          ENV_CONTEXT: ${{ toJSON(env) }}
+        run: |
+          echo "====== ENV =============="
+          echo "$ENV_CONTEXT"
+          echo "===================================="
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index ebd0931b..8d66304f 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -61,6 +61,15 @@ jobs:
         id: image_name
         run: |
           echo "name=${{vars.DOCKER_IMAGE}}:test-${{steps.meta.outputs.version}}" >> $GITHUB_OUTPUT
+      - id: image_updated
+        name: Check if image exists and updated
+        uses: ./.github/actions/image_updated
+        with:
+          image: unicef/hope-dedupe-engine:test-feature-ci_rc
+          checksum: 00f2f954
+          username: ${{ secrets.username }}
+          password: ${{ secrets.password }}
+
   build:
     if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
     uses: ./.github/workflows/_build.yml
@@ -97,8 +106,6 @@ jobs:
         uses: actions/checkout@v2
       - name: Run tests
         run: |
-          ls -al $PWD
-          touch __PIPPO__
           docker run --rm \
             -e DATABASE_URL=postgres://postgres:postgres@db:5432/dedupe \
             -e SECRET_KEY=secret_key \
@@ -108,7 +115,6 @@ jobs:
             -w /code/app \
             -t ${{needs.setup.outputs.image_name}} \
             pytest tests -v 
-          ls -al $PWD
       - name: Upload coverage to Codecov
         uses: codecov/codecov-action@v4
         with:
diff --git a/.gitignore b/.gitignore
index 50d6b3af..b9fb62bc 100644
--- a/.gitignore
+++ b/.gitignore
@@ -19,8 +19,8 @@ __pycache__/
 !.trivyignore
 !docker/bin/*.sh
 !bandit.yaml
-build
-dist
+/build
+/dist
 coverage.xml
 Makefile
 site

From 5da53abb927204ba21562742cb0ad90694651ca4 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 05:20:26 +0200
Subject: [PATCH 21/57] updates CI ci:debug

---
 .github/actions/image_updated/action.yml | 5 +++++
 .github/workflows/dump.yml               | 2 +-
 2 files changed, 6 insertions(+), 1 deletion(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index aa8b0789..0b8d3a30 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -49,6 +49,11 @@ runs:
         repo="${ref%:*}"
         tag="${ref##*:}"
         
+        echo "DOCKER_IMAGE=$ref" >> $GITHUB_ENV
+        echo "DOCKER_TAG=$tag" >> $GITHUB_ENV
+        echo "DOCKER_REPO=$repo" >> $GITHUB_ENV
+        echo "DOCKER_ARCH=$architecture" >> $GITHUB_ENV
+        
         echo "image=$ref" >> $GITHUB_OUTPUT
         echo "repo=$repo" >> $GITHUB_OUTPUT
         echo "tag=$tag" >> $GITHUB_OUTPUT
diff --git a/.github/workflows/dump.yml b/.github/workflows/dump.yml
index cf13699b..461d80c9 100644
--- a/.github/workflows/dump.yml
+++ b/.github/workflows/dump.yml
@@ -22,7 +22,7 @@ jobs:
   dump:
     name: "[DEBUG] Echo Full Context"
     if: ${{ contains(github.event.head_commit.message, 'ci:debug') }}
-    runs-on: [ubuntu-latest, self-hosted]
+    runs-on: ubuntu-latest
     steps:
       - name: Dump Env vars
         run: |

From 7c8e14bd3a5d6d14fe5a7c435094a724117937f6 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 05:33:56 +0200
Subject: [PATCH 22/57] updates CI ci:debug

---
 .github/actions/image_updated/action.yml | 12 +++++++-----
 1 file changed, 7 insertions(+), 5 deletions(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index 0b8d3a30..facf4b83 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -58,13 +58,15 @@ runs:
         echo "repo=$repo" >> $GITHUB_OUTPUT
         echo "tag=$tag" >> $GITHUB_OUTPUT
         echo "architecture=$architecture" >> $GITHUB_OUTPUT
-
+        
+        url="https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull"
+        
         res=$(curl -u "${{ inputs.username }}:${{ inputs.password }}" \
-                   -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull")
+                   -s $url)
         
         token=$(echo $res | jq -r '.token')
         if [[ -z "$token" ]];then
-          echo "::error title=â›” error hint::Unable to get valid token"
+          echo "::error title=â›” error hint::Unable to get valid token from $url"
           exit 1
         fi
         echo "token=$token" >> $GITHUB_OUTPUT
@@ -87,7 +89,7 @@ runs:
         if [[ $manifest == *errors\":* ]];then
           code=$(echo $manifest | jq .errors[0].code)
           message=$(echo $manifest | jq .errors[0].message)          
-          echo "::error title=$code error hint::$message https://registry-1.docker.io/v2/${repo}/manifests/${tag}"
+          echo "::error title=$code error hint::$message $url"
           exit 1
         fi
         echo "exists=true" >> $GITHUB_OUTPUT
@@ -132,7 +134,7 @@ runs:
         fi
         
         if [[ -z "$blob" ]]; then
-          echo "Unable to get blob from: https://registry-1.docker.io/v2/$repo/blobs/$digest"
+          echo "Unable to get blob from: $url"
           echo "updated=false" >> "$GITHUB_OUTPUT"
           echo "build_number=1" >> "$GITHUB_OUTPUT"
           echo "build_date=-" >> "$GITHUB_OUTPUT"        

From 54eb8de8bf127870aa1b1a5037d3efede3c1eac8 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 06:03:00 +0200
Subject: [PATCH 23/57] updates CI ci:debug

---
 .github/actions/image_updated/action.yml | 27 ++++++++++++------------
 .github/workflows/test.yml               |  4 ++--
 2 files changed, 16 insertions(+), 15 deletions(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index facf4b83..320c159d 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -49,10 +49,10 @@ runs:
         repo="${ref%:*}"
         tag="${ref##*:}"
         
-        echo "DOCKER_IMAGE=$ref" >> $GITHUB_ENV
-        echo "DOCKER_TAG=$tag" >> $GITHUB_ENV
-        echo "DOCKER_REPO=$repo" >> $GITHUB_ENV
-        echo "DOCKER_ARCH=$architecture" >> $GITHUB_ENV
+        echo "::debug:: $ref"
+        echo "::debug:: $repo"
+        echo "::debug:: $tag"
+        echo "::debug:: $architecture"
         
         echo "image=$ref" >> $GITHUB_OUTPUT
         echo "repo=$repo" >> $GITHUB_OUTPUT
@@ -61,12 +61,11 @@ runs:
         
         url="https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull"
         
-        res=$(curl -u "${{ inputs.username }}:${{ inputs.password }}" \
-                   -s $url)
+        res=$(curl -s $url)
         
         token=$(echo $res | jq -r '.token')
         if [[ -z "$token" ]];then
-          echo "::error title=â›” error hint::Unable to get valid token from $url"
+          echo "::error:: title=â›”::Unable to get valid token from $url"
           exit 1
         fi
         echo "token=$token" >> $GITHUB_OUTPUT
@@ -80,6 +79,7 @@ runs:
                         -s $url)
                 
         if [[ $manifest == *MANIFEST_UNKNOWN* ]];then
+          echo "::warning:: MANIFEST_UNKNOWN"
           echo "exists=false" >> "$GITHUB_OUTPUT"
           echo "updated=false" >> "$GITHUB_OUTPUT"
           echo "build_number=1" >> "$GITHUB_OUTPUT"
@@ -88,8 +88,9 @@ runs:
         fi
         if [[ $manifest == *errors\":* ]];then
           code=$(echo $manifest | jq .errors[0].code)
-          message=$(echo $manifest | jq .errors[0].message)          
-          echo "::error title=$code error hint::$message $url"
+          message=$(echo $manifest | jq .errors[0].message)                    
+          echo "::error title=$code::Error fetching manifest"
+          echo "::error title=$code::$message $url"
           exit 1
         fi
         echo "exists=true" >> $GITHUB_OUTPUT
@@ -106,15 +107,15 @@ runs:
         elif [[ -n "$check2" ]]; then
           digest=$(echo $manifest | jq -r '.config.digest')
         else
-          echo "::error title=â›” error hint::Unable to detect digest"
+          echo "::error title=â›”::Unable to detect digest"
           exit 1        
         fi
         if [[ $digest == null ]]; then
-          echo "::error title=â›” error hint::Digest is null"
+          echo "::error title=â›”::Digest is null"
           exit 1
         fi
         if [[ -z "$digest" ]];then
-          echo "::error title=â›” error hint::Digest is empty"
+          echo "::error title=â›”::Digest is empty"
           exit 1
         fi        
         url=https://registry-1.docker.io/v2/${{steps.setup.outputs.repo}}/blobs/$digest
@@ -129,7 +130,7 @@ runs:
         if [[ $blob == *BLOB_UNKNOWN* ]];then
           code=$(echo $blob | jq .errors[0].code)
           message=$(echo $blob | jq .errors[0].message)          
-          echo "::error title=$code error hint::$message $url"
+          echo "::error title=$code::$message $url"
           exit 1        
         fi
         
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 8d66304f..ccebd389 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -65,8 +65,8 @@ jobs:
         name: Check if image exists and updated
         uses: ./.github/actions/image_updated
         with:
-          image: unicef/hope-dedupe-engine:test-feature-ci_rc
-          checksum: 00f2f954
+          image: ${{steps.image_name.outputs.name}}
+          checksum: ${{needs.setup.outputs.hash}}
           username: ${{ secrets.username }}
           password: ${{ secrets.password }}
 

From 310967221a039877f79b5bcf019a8192c900470e Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 06:11:21 +0200
Subject: [PATCH 24/57]  - updates ci:debug

---
 .github/actions/distro_hash/action.yml   | 2 +-
 .github/actions/docker_build/action.yml  | 2 +-
 .github/actions/image_updated/action.yml | 9 +++++----
 .github/actions/version/action.xyml      | 4 ++--
 4 files changed, 9 insertions(+), 8 deletions(-)

diff --git a/.github/actions/distro_hash/action.yml b/.github/actions/distro_hash/action.yml
index 53fc160d..f6ea140a 100644
--- a/.github/actions/distro_hash/action.yml
+++ b/.github/actions/distro_hash/action.yml
@@ -15,7 +15,7 @@ runs:
   using: 'composite'
   steps:
     - id: calc
-      shell: bash --noprofile --norc -eo pipefail -ux {0}
+      shell: bash
       run: |
         LOCK_SHA=$(sha1sum ${{ inputs.files }} | sha1sum | awk '{print $1}' | cut -c 1-8)
         echo "hash=$LOCK_SHA" >> "$GITHUB_OUTPUT"
diff --git a/.github/actions/docker_build/action.yml b/.github/actions/docker_build/action.yml
index 69fea0a1..47d0947a 100644
--- a/.github/actions/docker_build/action.yml
+++ b/.github/actions/docker_build/action.yml
@@ -45,7 +45,7 @@ runs:
 
     - name: Calculate Release Hash
       id: calc
-      shell: bash --noprofile --norc -eo pipefail {0}
+      shell: bash
       run: |
           docker pull ${{inputs.image}}
           echo "----------"
diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index 320c159d..8fce679b 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -49,10 +49,10 @@ runs:
         repo="${ref%:*}"
         tag="${ref##*:}"
         
-        echo "::debug:: $ref"
-        echo "::debug:: $repo"
-        echo "::debug:: $tag"
-        echo "::debug:: $architecture"
+        echo "::debug::REF:${ref}"
+        echo "::debug::REPO:${repo}"
+        echo "::debug::TAG:${tag}"
+        echo "::debug::ARCH:${architecture}"
         
         echo "image=$ref" >> $GITHUB_OUTPUT
         echo "repo=$repo" >> $GITHUB_OUTPUT
@@ -130,6 +130,7 @@ runs:
         if [[ $blob == *BLOB_UNKNOWN* ]];then
           code=$(echo $blob | jq .errors[0].code)
           message=$(echo $blob | jq .errors[0].message)          
+          echo "::debug::$blob"
           echo "::error title=$code::$message $url"
           exit 1        
         fi
diff --git a/.github/actions/version/action.xyml b/.github/actions/version/action.xyml
index 217adc62..63be76db 100644
--- a/.github/actions/version/action.xyml
+++ b/.github/actions/version/action.xyml
@@ -20,13 +20,13 @@ runs:
   using: 'composite'
   steps:
     - name: Configure Git
-      shell: bash --noprofile --norc -eo pipefail -ux {0}
+      shell: bash
       run: |
         git config --global --add safe.directory $(realpath .)
     - name: ch
       uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
     - name: Parse branch
-      shell: bash --noprofile --norc -eo pipefail -ux {0}
+      shell: bash
       id: parser
       run: |
         #        BASE=$(git describe --all --exact-match 2>/dev/null | sed 's=.*/==')

From aead377c83f4c9b6f0434a4c995d31db03067693 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 06:17:04 +0200
Subject: [PATCH 25/57] 2024-06-12 06:17 - updates ci:debug

---
 .github/actions/image_updated/action.yml | 13 ++++++++-----
 1 file changed, 8 insertions(+), 5 deletions(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index 8fce679b..487561f4 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -128,11 +128,14 @@ runs:
               $url )        
         
         if [[ $blob == *BLOB_UNKNOWN* ]];then
-          code=$(echo $blob | jq .errors[0].code)
-          message=$(echo $blob | jq .errors[0].message)          
-          echo "::debug::$blob"
-          echo "::error title=$code::$message $url"
-          exit 1        
+#          code=$(echo $blob | jq .errors[0].code)
+#          message=$(echo $blob | jq .errors[0].message)
+#          echo "::debug::$blob"
+          echo "::warning title=$code::$message $url"
+          echo "updated=false" >> "$GITHUB_OUTPUT"
+          echo "build_number=1" >> "$GITHUB_OUTPUT"
+          echo "build_date=-" >> "$GITHUB_OUTPUT"             
+          exit 0        
         fi
         
         if [[ -z "$blob" ]]; then

From 44dda207bc2c301410016ec516ceb79b52ba4f30 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 06:18:25 +0200
Subject: [PATCH 26/57] 2024-06-12 06:18 - updates ci:debug

---
 .github/actions/image_updated/action.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index 487561f4..d61eaa02 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -128,8 +128,8 @@ runs:
               $url )        
         
         if [[ $blob == *BLOB_UNKNOWN* ]];then
-#          code=$(echo $blob | jq .errors[0].code)
-#          message=$(echo $blob | jq .errors[0].message)
+          code=$(echo $blob | jq .errors[0].code)
+          message=$(echo $blob | jq .errors[0].message)
 #          echo "::debug::$blob"
           echo "::warning title=$code::$message $url"
           echo "updated=false" >> "$GITHUB_OUTPUT"

From 043c308affa3ec18e6073d1f3b23523411e29f38 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 06:21:12 +0200
Subject: [PATCH 27/57] 2024-06-12 06:21 - updates ci:debug

---
 .github/actions/image_updated/action.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index d61eaa02..f4c3c664 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -131,7 +131,7 @@ runs:
           code=$(echo $blob | jq .errors[0].code)
           message=$(echo $blob | jq .errors[0].message)
 #          echo "::debug::$blob"
-          echo "::warning title=$code::$message $url"
+          echo "::warning title=$code::$message"
           echo "updated=false" >> "$GITHUB_OUTPUT"
           echo "build_number=1" >> "$GITHUB_OUTPUT"
           echo "build_date=-" >> "$GITHUB_OUTPUT"             

From 7383c24047515df70d076346315f877e2f33d191 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 06:22:18 +0200
Subject: [PATCH 28/57] 2024-06-12 06:22 - updates ci:debug

---
 .github/actions/image_updated/action.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index f4c3c664..6154ecf0 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -130,7 +130,6 @@ runs:
         if [[ $blob == *BLOB_UNKNOWN* ]];then
           code=$(echo $blob | jq .errors[0].code)
           message=$(echo $blob | jq .errors[0].message)
-#          echo "::debug::$blob"
           echo "::warning title=$code::$message"
           echo "updated=false" >> "$GITHUB_OUTPUT"
           echo "build_number=1" >> "$GITHUB_OUTPUT"

From 36ffca48b5c304d6fcf74f0cafffacfa75bc145a Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 06:38:58 +0200
Subject: [PATCH 29/57] 2024-06-12 06:38 - updates ci:debug

---
 .github/workflows/_build.yml | 5 +++--
 .github/workflows/test.yml   | 4 +++-
 2 files changed, 6 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/_build.yml b/.github/workflows/_build.yml
index c72a4957..91d47bb7 100644
--- a/.github/workflows/_build.yml
+++ b/.github/workflows/_build.yml
@@ -72,13 +72,11 @@ jobs:
           echo "image=${tag%:*}" >> $GITHUB_OUTPUT
       - name: Setup Environment (PR)
         if: ${{ github.event_name == 'pull_request' }}
-#        shell: bash
         run: |
           SHA=${{ github.event.pull_request.head.sha }}
           echo "LAST_COMMIT_SHA=${SHA::7}" >> ${GITHUB_ENV}
       - name: Setup Environment (Push)
         if: ${{ github.event_name == 'push' }}
-#        shell: bash
         run: |
           echo "LAST_COMMIT_SHA=${GITHUB_SHA::7}" >> ${GITHUB_ENV}
       - run: |
@@ -107,6 +105,9 @@ jobs:
           platforms: linux/amd64
           driver: docker-container
           driver-opts: 'image=moby/buildkit:v0.13.2'
+      - if: ${{ steps.image_updated.outputs.updated != 'true' || inputs.force == 'true' }}
+        run: |
+          echo "::warning:: Build docker ${{ inputs.image }} - Checksum: ${{ steps.release_hash.outputs.hash }}"
       - name: Build and push
         if: ${{ steps.image_updated.outputs.updated != 'true' || inputs.force == 'true' }}
         uses: docker/build-push-action@v5
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index ccebd389..8b6775a7 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -94,6 +94,8 @@ jobs:
           POSTGRES_DATABASE: dedupe
           POSTGRES_PASSWORD: postgres
           POSTGRES_USERNAME: postgres
+        ports:
+          - 5432:5432
         options: >-
           --health-cmd pg_isready
           --health-interval 10s
@@ -114,7 +116,7 @@ jobs:
             -v $PWD:/code/app \
             -w /code/app \
             -t ${{needs.setup.outputs.image_name}} \
-            pytest tests -v 
+            pytest tests -v --maxfail=10
       - name: Upload coverage to Codecov
         uses: codecov/codecov-action@v4
         with:

From 6c994b78f73634f340bebfe6529fa235f733bc2f Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 06:52:18 +0200
Subject: [PATCH 30/57] 2024-06-12 06:52 - updates ci:debug

---
 .github/workflows/test.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 8b6775a7..26945219 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -109,7 +109,7 @@ jobs:
       - name: Run tests
         run: |
           docker run --rm \
-            -e DATABASE_URL=postgres://postgres:postgres@db:5432/dedupe \
+            -e DATABASE_URL=postgres://postgres:postgres@127.0.0.1:5432/dedupe \
             -e SECRET_KEY=secret_key \
             -e CACHE_URL=redis://redis:6379/0 \
             -e CELERY_BROKER_URL=redis://redis:6379/0 \

From 125bc70fd374a6c7c9d595da3132ccd72a912846 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 07:01:50 +0200
Subject: [PATCH 31/57] 2024-06-12 07:01 - updates ci:debug

---
 .github/workflows/test.yml      |   4 +
 docker/bin/docker-entrypoint.sh |   4 -
 docker/bin/wait-for-it.sh       | 182 --------------------------------
 3 files changed, 4 insertions(+), 186 deletions(-)
 delete mode 100755 docker/bin/wait-for-it.sh

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 26945219..0d611268 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -106,6 +106,10 @@ jobs:
     steps:
       - name: Checkout code
         uses: actions/checkout@v2
+      - name: Checkout code
+        run:
+          docker run --rm \
+            waitforit -host=127.0.0.1 -port=5432 -timeout=20 -debug -proto tcp
       - name: Run tests
         run: |
           docker run --rm \
diff --git a/docker/bin/docker-entrypoint.sh b/docker/bin/docker-entrypoint.sh
index ddbc467e..7c67a60f 100755
--- a/docker/bin/docker-entrypoint.sh
+++ b/docker/bin/docker-entrypoint.sh
@@ -6,10 +6,6 @@ export STATIC_ROOT="${STATIC_ROOT:-/var/run/app/static}"
 export UWSGI_PROCESSES="${UWSGI_PROCESSES:-"4"}"
 mkdir -p "${MEDIA_ROOT}" "${STATIC_ROOT}" || echo "Cannot create dirs ${MEDIA_ROOT} ${STATIC_ROOT}"
 
-echo 111, $1
-echo 222, "$@"
-
-
 case "$1" in
     run)
 	    set -- tini -- "$@"
diff --git a/docker/bin/wait-for-it.sh b/docker/bin/wait-for-it.sh
deleted file mode 100755
index 3974640b..00000000
--- a/docker/bin/wait-for-it.sh
+++ /dev/null
@@ -1,182 +0,0 @@
-#!/usr/bin/env bash
-# Use this script to test if a given TCP host/port are available
-
-WAITFORIT_cmdname=${0##*/}
-
-echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
-
-usage()
-{
-    cat << USAGE >&2
-Usage:
-    $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-    -h HOST | --host=HOST       Host or IP under test
-    -p PORT | --port=PORT       TCP port under test
-                                Alternatively, you specify the host and port as host:port
-    -s | --strict               Only execute subcommand if the test succeeds
-    -q | --quiet                Don't output any status messages
-    -t TIMEOUT | --timeout=TIMEOUT
-                                Timeout in seconds, zero for no timeout
-    -- COMMAND ARGS             Execute command with args after the test finishes
-USAGE
-    exit 1
-}
-
-wait_for()
-{
-    if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
-        echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
-    else
-        echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
-    fi
-    WAITFORIT_start_ts=$(date +%s)
-    while :
-    do
-        if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
-            nc -z $WAITFORIT_HOST $WAITFORIT_PORT
-            WAITFORIT_result=$?
-        else
-            (echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
-            WAITFORIT_result=$?
-        fi
-        if [[ $WAITFORIT_result -eq 0 ]]; then
-            WAITFORIT_end_ts=$(date +%s)
-            echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
-            break
-        fi
-        sleep 1
-    done
-    return $WAITFORIT_result
-}
-
-wait_for_wrapper()
-{
-    # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
-    if [[ $WAITFORIT_QUIET -eq 1 ]]; then
-        timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
-    else
-        timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
-    fi
-    WAITFORIT_PID=$!
-    trap "kill -INT -$WAITFORIT_PID" INT
-    wait $WAITFORIT_PID
-    WAITFORIT_RESULT=$?
-    if [[ $WAITFORIT_RESULT -ne 0 ]]; then
-        echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
-    fi
-    return $WAITFORIT_RESULT
-}
-
-# process arguments
-while [[ $# -gt 0 ]]
-do
-    case "$1" in
-        *:* )
-        WAITFORIT_hostport=(${1//:/ })
-        WAITFORIT_HOST=${WAITFORIT_hostport[0]}
-        WAITFORIT_PORT=${WAITFORIT_hostport[1]}
-        shift 1
-        ;;
-        --child)
-        WAITFORIT_CHILD=1
-        shift 1
-        ;;
-        -q | --quiet)
-        WAITFORIT_QUIET=1
-        shift 1
-        ;;
-        -s | --strict)
-        WAITFORIT_STRICT=1
-        shift 1
-        ;;
-        -h)
-        WAITFORIT_HOST="$2"
-        if [[ $WAITFORIT_HOST == "" ]]; then break; fi
-        shift 2
-        ;;
-        --host=*)
-        WAITFORIT_HOST="${1#*=}"
-        shift 1
-        ;;
-        -p)
-        WAITFORIT_PORT="$2"
-        if [[ $WAITFORIT_PORT == "" ]]; then break; fi
-        shift 2
-        ;;
-        --port=*)
-        WAITFORIT_PORT="${1#*=}"
-        shift 1
-        ;;
-        -t)
-        WAITFORIT_TIMEOUT="$2"
-        if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
-        shift 2
-        ;;
-        --timeout=*)
-        WAITFORIT_TIMEOUT="${1#*=}"
-        shift 1
-        ;;
-        --)
-        shift
-        WAITFORIT_CLI=("$@")
-        break
-        ;;
-        --help)
-        usage
-        ;;
-        *)
-        echoerr "Unknown argument: $1"
-        usage
-        ;;
-    esac
-done
-
-if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
-    echoerr "Error: you need to provide a host and port to test."
-    usage
-fi
-
-WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
-WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
-WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
-WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
-
-# Check to see if timeout is from busybox?
-WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
-WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
-
-WAITFORIT_BUSYTIMEFLAG=""
-if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
-    WAITFORIT_ISBUSY=1
-    # Check if busybox timeout uses -t flag
-    # (recent Alpine versions don't support -t anymore)
-    if timeout &>/dev/stdout | grep -q -e '-t '; then
-        WAITFORIT_BUSYTIMEFLAG="-t"
-    fi
-else
-    WAITFORIT_ISBUSY=0
-fi
-
-if [[ $WAITFORIT_CHILD -gt 0 ]]; then
-    wait_for
-    WAITFORIT_RESULT=$?
-    exit $WAITFORIT_RESULT
-else
-    if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
-        wait_for_wrapper
-        WAITFORIT_RESULT=$?
-    else
-        wait_for
-        WAITFORIT_RESULT=$?
-    fi
-fi
-
-if [[ $WAITFORIT_CLI != "" ]]; then
-    if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
-        echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
-        exit $WAITFORIT_RESULT
-    fi
-    exec "${WAITFORIT_CLI[@]}"
-else
-    exit $WAITFORIT_RESULT
-fi
\ No newline at end of file

From dbd1bbc7e2715aab119274d5babdd2d991628317 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 07:16:35 +0200
Subject: [PATCH 32/57] 2024-06-12 07:16 - updates ci:debug

---
 .github/workflows/test.yml | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 0d611268..26945219 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -106,10 +106,6 @@ jobs:
     steps:
       - name: Checkout code
         uses: actions/checkout@v2
-      - name: Checkout code
-        run:
-          docker run --rm \
-            waitforit -host=127.0.0.1 -port=5432 -timeout=20 -debug -proto tcp
       - name: Run tests
         run: |
           docker run --rm \

From 6acc9c51298681775fe45ce641640b90f3f51911 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 07:57:56 +0200
Subject: [PATCH 33/57] 2024-06-12 07:57 - updates ci:debug

---
 .github/workflows/test.yml |   8 +-
 docker/bin/wait-for-it.sh  | 182 +++++++++++++++++++++++++++++++++++++
 2 files changed, 188 insertions(+), 2 deletions(-)
 create mode 100644 docker/bin/wait-for-it.sh

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 26945219..3eff2a41 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -106,17 +106,21 @@ jobs:
     steps:
       - name: Checkout code
         uses: actions/checkout@v2
+      - name: Checkout code
+        run:
+          docker run --rm \
+            waitforit -host=127.0.0.1 -port=5432 -timeout=20 -debug -proto tcp
       - name: Run tests
         run: |
           docker run --rm \
-            -e DATABASE_URL=postgres://postgres:postgres@127.0.0.1:5432/dedupe \
+            -e DATABASE_URL=postgres://postgres:postgres@db:5432/dedupe \
             -e SECRET_KEY=secret_key \
             -e CACHE_URL=redis://redis:6379/0 \
             -e CELERY_BROKER_URL=redis://redis:6379/0 \
             -v $PWD:/code/app \
             -w /code/app \
             -t ${{needs.setup.outputs.image_name}} \
-            pytest tests -v --maxfail=10
+            pytest tests --create-db -v --maxfail=10
       - name: Upload coverage to Codecov
         uses: codecov/codecov-action@v4
         with:
diff --git a/docker/bin/wait-for-it.sh b/docker/bin/wait-for-it.sh
new file mode 100644
index 00000000..5d868ecd
--- /dev/null
+++ b/docker/bin/wait-for-it.sh
@@ -0,0 +1,182 @@
+#!/usr/bin/env bash
+# Use this script to test if a given TCP host/port are available
+
+WAITFORIT_cmdname=${0##*/}
+
+echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
+
+usage()
+{
+    cat << USAGE >&2
+Usage:
+    $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
+    -h HOST | --host=HOST       Host or IP under test
+    -p PORT | --port=PORT       TCP port under test
+                                Alternatively, you specify the host and port as host:port
+    -s | --strict               Only execute subcommand if the test succeeds
+    -q | --quiet                Don't output any status messages
+    -t TIMEOUT | --timeout=TIMEOUT
+                                Timeout in seconds, zero for no timeout
+    -- COMMAND ARGS             Execute command with args after the test finishes
+USAGE
+    exit 1
+}
+
+wait_for()
+{
+    if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
+        echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
+    else
+        echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
+    fi
+    WAITFORIT_start_ts=$(date +%s)
+    while :
+    do
+        if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
+            nc -z $WAITFORIT_HOST $WAITFORIT_PORT
+            WAITFORIT_result=$?
+        else
+            (echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
+            WAITFORIT_result=$?
+        fi
+        if [[ $WAITFORIT_result -eq 0 ]]; then
+            WAITFORIT_end_ts=$(date +%s)
+            echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
+            break
+        fi
+        sleep 1
+    done
+    return $WAITFORIT_result
+}
+
+wait_for_wrapper()
+{
+    # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
+    if [[ $WAITFORIT_QUIET -eq 1 ]]; then
+        $timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
+    else
+        $timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
+    fi
+    WAITFORIT_PID=$!
+    trap "kill -INT -$WAITFORIT_PID" INT
+    wait $WAITFORIT_PID
+    WAITFORIT_RESULT=$?
+    if [[ $WAITFORIT_RESULT -ne 0 ]]; then
+        echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
+    fi
+    return $WAITFORIT_RESULT
+}
+
+# process arguments
+while [[ $# -gt 0 ]]
+do
+    case "$1" in
+        *:* )
+        WAITFORIT_hostport=(${1//:/ })
+        WAITFORIT_HOST=${WAITFORIT_hostport[0]}
+        WAITFORIT_PORT=${WAITFORIT_hostport[1]}
+        shift 1
+        ;;
+        --child)
+        WAITFORIT_CHILD=1
+        shift 1
+        ;;
+        -q | --quiet)
+        WAITFORIT_QUIET=1
+        shift 1
+        ;;
+        -s | --strict)
+        WAITFORIT_STRICT=1
+        shift 1
+        ;;
+        -h)
+        WAITFORIT_HOST="$2"
+        if [[ $WAITFORIT_HOST == "" ]]; then break; fi
+        shift 2
+        ;;
+        --host=*)
+        WAITFORIT_HOST="${1#*=}"
+        shift 1
+        ;;
+        -p)
+        WAITFORIT_PORT="$2"
+        if [[ $WAITFORIT_PORT == "" ]]; then break; fi
+        shift 2
+        ;;
+        --port=*)
+        WAITFORIT_PORT="${1#*=}"
+        shift 1
+        ;;
+        -t)
+        WAITFORIT_TIMEOUT="$2"
+        if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
+        shift 2
+        ;;
+        --timeout=*)
+        WAITFORIT_TIMEOUT="${1#*=}"
+        shift 1
+        ;;
+        --)
+        shift
+        WAITFORIT_CLI=("$@")
+        break
+        ;;
+        --help)
+        usage
+        ;;
+        *)
+        echoerr "Unknown argument: $1"
+        usage
+        ;;
+    esac
+done
+
+if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
+    echoerr "Error: you need to provide a host and port to test."
+    usage
+fi
+
+WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
+WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
+WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
+WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
+
+# Check to see if timeout is from busybox?
+WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
+WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
+
+WAITFORIT_BUSYTIMEFLAG=""
+if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
+    WAITFORIT_ISBUSY=1
+    # Check if busybox timeout uses -t flag
+    # (recent Alpine versions don't support -t anymore)
+    if timeout &>/dev/stdout | grep -q -e '-t '; then
+        WAITFORIT_BUSYTIMEFLAG="-t"
+    fi
+else
+    WAITFORIT_ISBUSY=0
+fi
+
+if [[ $WAITFORIT_CHILD -gt 0 ]]; then
+    wait_for
+    WAITFORIT_RESULT=$?
+    exit $WAITFORIT_RESULT
+else
+    if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
+        wait_for_wrapper
+        WAITFORIT_RESULT=$?
+    else
+        wait_for
+        WAITFORIT_RESULT=$?
+    fi
+fi
+
+if [[ $WAITFORIT_CLI != "" ]]; then
+    if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
+        echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
+        exit $WAITFORIT_RESULT
+    fi
+    exec "${WAITFORIT_CLI[@]}"
+else
+    exit $WAITFORIT_RESULT
+fi
\ No newline at end of file

From db52bf30c77d0df82017dc40d53e719601d5acc3 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 10:14:40 +0200
Subject: [PATCH 34/57] 2024-06-12 10:14 - updates ci:debug

---
 .github/actions/image_updated/action.yml | 13 ++++++---
 .github/workflows/_build.yml             |  9 ++++--
 .github/workflows/test.yml               | 35 +++++++++++++-----------
 docker/Dockerfile                        |  2 +-
 4 files changed, 36 insertions(+), 23 deletions(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index 6154ecf0..2d310f77 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -36,6 +36,9 @@ outputs:
   build_date:
       description: 'Returns tha image build date'
       value: ${{ steps.check.outputs.build_date }}
+  version:
+      description: 'Returns tha image build date'
+      value: ${{ steps.check.outputs.version }}
 
 runs:
   using: 'composite'
@@ -48,11 +51,13 @@ runs:
         architecture=${{ inputs.architecture }}
         repo="${ref%:*}"
         tag="${ref##*:}"
+        version
         
-        echo "::debug::REF:${ref}"
-        echo "::debug::REPO:${repo}"
-        echo "::debug::TAG:${tag}"
-        echo "::debug::ARCH:${architecture}"
+        echo "::notice::REF:${ref}"
+        echo "::notice::REPO:${repo}"
+        echo "::notice::TAG:${tag}"
+        echo "::notice::ARCH:${architecture}"
+        echo "::notice::VER:${version}"
         
         echo "image=$ref" >> $GITHUB_OUTPUT
         echo "repo=$repo" >> $GITHUB_OUTPUT
diff --git a/.github/workflows/_build.yml b/.github/workflows/_build.yml
index 91d47bb7..d96741d5 100644
--- a/.github/workflows/_build.yml
+++ b/.github/workflows/_build.yml
@@ -17,6 +17,10 @@ on:
         required: false
         type: string
         default: false
+      version:
+        description: "Docker version"
+        required: true
+        type: string
       target:
         description: "Dockerfile stage to stop to"
         required: true
@@ -85,6 +89,7 @@ jobs:
           echo image           : ${{steps.setup.outputs.image}}
           echo tag             : ${{inputs.image}}
           echo target          : ${{inputs.target}}
+          echo version         : ${{steps.image_updated.outputs.version}}
           echo target-checksum : ${{inputs.checksum}}
           echo docker-checksum : ${{steps.image_updated.outputs.checksum }}
           echo image_exists    : ${{steps.image_updated.outputs.exists}}
@@ -118,13 +123,13 @@ jobs:
           file: ./docker/Dockerfile
           platforms: linux/amd64
           outputs: type=registry
-          cache-from: "type=registry,ref=${{inputs.image}}-buildcache,ref=${{steps.setup.outputs.image}}:test-develop,mode=max"
+          cache-from: "type=registry,ref=${{inputs.image}}-buildcache,mode=max"
           cache-to: "type=registry,ref=${{inputs.image}}-buildcache,mode=max"
           labels: BuildNumber=${{ steps.setup.outputs.build_number }},
           build-args: |
             BUILD_DATE=${{ steps.setup.outputs.date }}
             CHECKSUM=${{ steps.release_hash.outputs.hash }}
-            VERSION=${{ steps.version.outputs.version }}
+            VERSION=${{ inputs.version }}
             SOURCE_COMMIT=${{ env.LAST_COMMIT_SHA }}
             GITHUB_SERVER_URL=${{ github.server_url }}
             GITHUB_REPOSITORY=${{ github.repository }}
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 3eff2a41..97e294ad 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -30,14 +30,14 @@ jobs:
     runs-on: ubuntu-latest
     timeout-minutes: 3
     outputs:
-      test_files: ${{ steps.changes.outputs.run_tests }}
-      docker: ${{ steps.changes.outputs.docker_base }}
-      python_files: ${{ steps.changes.outputs.python }}
-      branch: ${{ steps.extract_branch.outputs.branch }}
-      hash: ${{ steps.release_hash.outputs.hash }}
-      tags: ${{ steps.meta.outputs.tags }}
-      version: ${{ steps.meta.outputs.version }}
-      image_name: ${{ steps.image_name.outputs.name }}
+#      test_files: ${{ steps.changes.outputs.run_tests }}
+#      docker: ${{ steps.changes.outputs.docker_base }}
+#      python_files: ${{ steps.changes.outputs.python }}
+#      branch: ${{ steps.extract_branch.outputs.branch }}
+      hash: ${{ steps.docker_image.outputs.checksum }}
+#      tags: ${{ steps.meta.outputs.tags }}
+      version: ${{ steps.docker_image.outputs.version }}
+      image_name: ${{ steps.docker_image.outputs.name }}
     steps:
       - run: git config --global --add safe.directory $(realpath .)
       - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
@@ -48,25 +48,28 @@ jobs:
           base: ${{ github.ref }}
           token: ${{ github.token }}
           filters: .github/file-filters.yml
-      - id: extract_branch
-        name: Extract branch name
-#        shell: bash
-        run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
       - id: release_hash
         uses: ./.github/actions/distro_hash
       - name: Docker meta
         id: meta
         uses: docker/metadata-action@v5
       - name: Define target image name
-        id: image_name
+        id: docker_image
         run: |
+          branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}
+          version="${branch##*/}"
           echo "name=${{vars.DOCKER_IMAGE}}:test-${{steps.meta.outputs.version}}" >> $GITHUB_OUTPUT
+          echo "tag=test-${{steps.meta.outputs.version}}" >> $GITHUB_OUTPUT
+          echo "checksum=${{ steps.release_hash.outputs.hash }}" >> $GITHUB_OUTPUT
+          echo "version=$version" >> $GITHUB_OUTPUT
+          echo "branch=$branch" >> $GITHUB_OUTPUT
+
       - id: image_updated
         name: Check if image exists and updated
         uses: ./.github/actions/image_updated
         with:
-          image: ${{steps.image_name.outputs.name}}
-          checksum: ${{needs.setup.outputs.hash}}
+          image: ${{steps.docker_image.outputs.name}}
+          checksum: ${{steps.docker_image.outputs.checksum}}
           username: ${{ secrets.username }}
           password: ${{ secrets.password }}
 
@@ -77,8 +80,8 @@ jobs:
     secrets: inherit
     with:
       image: ${{needs.setup.outputs.image_name}}
-      cache-from: ${{needs.setup.outputs.image_name}}
       checksum: ${{needs.setup.outputs.hash}}
+      version: ${{needs.setup.outputs.version}}
       target: "python_dev_deps"
 
   test:
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 0844267a..9c0a5445 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -202,7 +202,7 @@ RUN < /RELEASE
  "commit": "$SOURCE_COMMIT",
  "date": "$BUILD_DATE",
  "checksum": "$CHECKSUM",
- "source": "${GITHUB_SERVER_URL}/$${GITHUB_REPOSITORY}/tree/${SOURCE_COMMIT:-master}/"
+ "source": "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/tree/${SOURCE_COMMIT:-master}/"
 }
 EOF
 

From 42246da26d46d4138d7518205e81ee0b38d7f7b4 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 10:52:26 +0200
Subject: [PATCH 35/57] 2024-06-12 10:52 - updates ci:debug

---
 .github/workflows/test.yml | 9 ---------
 1 file changed, 9 deletions(-)

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 97e294ad..7356699f 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -64,15 +64,6 @@ jobs:
           echo "version=$version" >> $GITHUB_OUTPUT
           echo "branch=$branch" >> $GITHUB_OUTPUT
 
-      - id: image_updated
-        name: Check if image exists and updated
-        uses: ./.github/actions/image_updated
-        with:
-          image: ${{steps.docker_image.outputs.name}}
-          checksum: ${{steps.docker_image.outputs.checksum}}
-          username: ${{ secrets.username }}
-          password: ${{ secrets.password }}
-
   build:
     if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
     uses: ./.github/workflows/_build.yml

From 78524e912b346ea0d71da18f7f3adbd375c8a565 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 10:54:39 +0200
Subject: [PATCH 36/57] 2024-06-12 10:54 - updates ci:debug

---
 .github/actions/image_updated/action.yml | 13 ++++++-------
 1 file changed, 6 insertions(+), 7 deletions(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index 2d310f77..95fd5a4c 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -51,13 +51,12 @@ runs:
         architecture=${{ inputs.architecture }}
         repo="${ref%:*}"
         tag="${ref##*:}"
-        version
-        
-        echo "::notice::REF:${ref}"
-        echo "::notice::REPO:${repo}"
-        echo "::notice::TAG:${tag}"
-        echo "::notice::ARCH:${architecture}"
-        echo "::notice::VER:${version}"
+#
+#        echo "::notice::REF:${ref}"
+#        echo "::notice::REPO:${repo}"
+#        echo "::notice::TAG:${tag}"
+#        echo "::notice::ARCH:${architecture}"
+#        echo "::notice::VER:${version}"
         
         echo "image=$ref" >> $GITHUB_OUTPUT
         echo "repo=$repo" >> $GITHUB_OUTPUT

From 477c8c4574b3af40f9c4e8f6f3771b81486f3d89 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 14:03:25 +0200
Subject: [PATCH 37/57] 2024-06-12 14:03 - updates ci:debug

---
 .github/actions/image_updated/action.yml |  7 ----
 .github/workflows/_build.yml             | 52 ++++++++++++------------
 2 files changed, 27 insertions(+), 32 deletions(-)

diff --git a/.github/actions/image_updated/action.yml b/.github/actions/image_updated/action.yml
index 95fd5a4c..7886e1e3 100644
--- a/.github/actions/image_updated/action.yml
+++ b/.github/actions/image_updated/action.yml
@@ -51,13 +51,6 @@ runs:
         architecture=${{ inputs.architecture }}
         repo="${ref%:*}"
         tag="${ref##*:}"
-#
-#        echo "::notice::REF:${ref}"
-#        echo "::notice::REPO:${repo}"
-#        echo "::notice::TAG:${tag}"
-#        echo "::notice::ARCH:${architecture}"
-#        echo "::notice::VER:${version}"
-        
         echo "image=$ref" >> $GITHUB_OUTPUT
         echo "repo=$repo" >> $GITHUB_OUTPUT
         echo "tag=$tag" >> $GITHUB_OUTPUT
diff --git a/.github/workflows/_build.yml b/.github/workflows/_build.yml
index d96741d5..eb30e502 100644
--- a/.github/workflows/_build.yml
+++ b/.github/workflows/_build.yml
@@ -114,29 +114,31 @@ jobs:
         run: |
           echo "::warning:: Build docker ${{ inputs.image }} - Checksum: ${{ steps.release_hash.outputs.hash }}"
       - name: Build and push
-        if: ${{ steps.image_updated.outputs.updated != 'true' || inputs.force == 'true' }}
-        uses: docker/build-push-action@v5
-        with:
-          context: .
-          tags: ${{ inputs.image }}
-          target: ${{ inputs.target }}
-          file: ./docker/Dockerfile
-          platforms: linux/amd64
-          outputs: type=registry
-          cache-from: "type=registry,ref=${{inputs.image}}-buildcache,mode=max"
-          cache-to: "type=registry,ref=${{inputs.image}}-buildcache,mode=max"
-          labels: BuildNumber=${{ steps.setup.outputs.build_number }},
-          build-args: |
-            BUILD_DATE=${{ steps.setup.outputs.date }}
-            CHECKSUM=${{ steps.release_hash.outputs.hash }}
-            VERSION=${{ inputs.version }}
-            SOURCE_COMMIT=${{ env.LAST_COMMIT_SHA }}
-            GITHUB_SERVER_URL=${{ github.server_url }}
-            GITHUB_REPOSITORY=${{ github.repository }}
-      - name: Dump Image Info
+        if: steps.image_updated.outputs.updated != 'true' || inputs.force == 'true' }}
         run: |
-          echo "Pulling... ${{ inputs.image }}"
-          docker pull --platform linux/amd64 ${{ inputs.image }}
-          docker inspect --format='{{json .Config.Labels}}' ${{ inputs.image }}
-          docker run --platform linux/amd64 -t ${{ inputs.image }} release-info.sh
-          echo "----------"
+          echo "==========="
+#        uses: docker/build-push-action@v5
+#        with:
+#          context: .
+#          tags: ${{ inputs.image }}
+#          target: ${{ inputs.target }}
+#          file: ./docker/Dockerfile
+#          platforms: linux/amd64
+#          outputs: type=registry
+#          cache-from: "type=registry,ref=${{inputs.image}}-buildcache,mode=max"
+#          cache-to: "type=registry,ref=${{inputs.image}}-buildcache,mode=max"
+#          labels: BuildNumber=${{ steps.setup.outputs.build_number }},
+#          build-args: |
+#            BUILD_DATE=${{ steps.setup.outputs.date }}
+#            CHECKSUM=${{ steps.release_hash.outputs.hash }}
+#            VERSION=${{ inputs.version }}
+#            SOURCE_COMMIT=${{ env.LAST_COMMIT_SHA }}
+#            GITHUB_SERVER_URL=${{ github.server_url }}
+#            GITHUB_REPOSITORY=${{ github.repository }}
+#      - name: Dump Image Info
+#        run: |
+#          echo "Pulling... ${{ inputs.image }}"
+#          docker pull --platform linux/amd64 ${{ inputs.image }}
+#          docker inspect --format='{{json .Config.Labels}}' ${{ inputs.image }}
+#          docker run --platform linux/amd64 -t ${{ inputs.image }} release-info.sh
+#          echo "----------"

From e4199c335f1292a28cd2fe5ec498d7b29f78512a Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 14:05:43 +0200
Subject: [PATCH 38/57] 2024-06-12 14:05 - updates ci:debug

---
 .github/workflows/_build.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/_build.yml b/.github/workflows/_build.yml
index eb30e502..e2ac1e56 100644
--- a/.github/workflows/_build.yml
+++ b/.github/workflows/_build.yml
@@ -114,7 +114,7 @@ jobs:
         run: |
           echo "::warning:: Build docker ${{ inputs.image }} - Checksum: ${{ steps.release_hash.outputs.hash }}"
       - name: Build and push
-        if: steps.image_updated.outputs.updated != 'true' || inputs.force == 'true' }}
+        if: steps.image_updated.outputs.updated != 'true' || inputs.force == 'true'
         run: |
           echo "==========="
 #        uses: docker/build-push-action@v5

From ace3b444304165be2e0f9959c2de23e2cdf6c757 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 14:35:21 +0200
Subject: [PATCH 39/57] 2024-06-12 14:35 - updates ci:debug

---
 .github/workflows/test.yml | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 7356699f..62b98f7c 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -100,10 +100,6 @@ jobs:
     steps:
       - name: Checkout code
         uses: actions/checkout@v2
-      - name: Checkout code
-        run:
-          docker run --rm \
-            waitforit -host=127.0.0.1 -port=5432 -timeout=20 -debug -proto tcp
       - name: Run tests
         run: |
           docker run --rm \

From 638ea53d0cf903ef2dda1566c1296518dd253e58 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 15:01:20 +0200
Subject: [PATCH 40/57] 2024-06-12 15:01 - updates ci:debug

---
 .github/workflows/test.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 62b98f7c..45e4b172 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -103,7 +103,7 @@ jobs:
       - name: Run tests
         run: |
           docker run --rm \
-            -e DATABASE_URL=postgres://postgres:postgres@db:5432/dedupe \
+            -e DATABASE_URL=postgres://postgres:postgres@localhost:5432/dedupe \
             -e SECRET_KEY=secret_key \
             -e CACHE_URL=redis://redis:6379/0 \
             -e CELERY_BROKER_URL=redis://redis:6379/0 \

From 1abfdc1fc7fd147c6b84c8d1d70fbdabf2f3258a Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 17:23:30 +0200
Subject: [PATCH 41/57] 2024-06-12 17:23 - updates ci:debug

---
 .github/workflows/test.yml | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 45e4b172..f3968c77 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -79,12 +79,14 @@ jobs:
     if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
     needs: [build, setup]
     runs-on: ubuntu-latest
+    container: "ubuntu:latest"
     services:
       redis:
         image: redis
       db:
         image: postgres:14
         env:
+          POSTGRES_HOST: db
           POSTGRES_DATABASE: dedupe
           POSTGRES_PASSWORD: postgres
           POSTGRES_USERNAME: postgres
@@ -103,7 +105,7 @@ jobs:
       - name: Run tests
         run: |
           docker run --rm \
-            -e DATABASE_URL=postgres://postgres:postgres@localhost:5432/dedupe \
+            -e DATABASE_URL=postgres://postgres:postgres@db:5432/dedupe \
             -e SECRET_KEY=secret_key \
             -e CACHE_URL=redis://redis:6379/0 \
             -e CELERY_BROKER_URL=redis://redis:6379/0 \

From 07e5c2417a42c67c623b3c35fc75f95033039911 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 17:25:12 +0200
Subject: [PATCH 42/57] 2024-06-12 17:25

---
 .github/workflows/test.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index f3968c77..4800d73e 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -79,7 +79,6 @@ jobs:
     if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
     needs: [build, setup]
     runs-on: ubuntu-latest
-    container: "ubuntu:latest"
     services:
       redis:
         image: redis

From 4bf1fb885cb082693dc8fc5a79c415bf2dc821d7 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 17:28:55 +0200
Subject: [PATCH 43/57] 2024-06-12 17:28

---
 .github/workflows/test.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 4800d73e..5c118d47 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -104,7 +104,7 @@ jobs:
       - name: Run tests
         run: |
           docker run --rm \
-            -e DATABASE_URL=postgres://postgres:postgres@db:5432/dedupe \
+            -e DATABASE_URL=postgres://postgres:postgres@localhost:5432/dedupe \
             -e SECRET_KEY=secret_key \
             -e CACHE_URL=redis://redis:6379/0 \
             -e CELERY_BROKER_URL=redis://redis:6379/0 \

From ea8f5b45117e372b412b6e3b2c38ecabc4e23ace Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 18:00:18 +0200
Subject: [PATCH 44/57] 2024-06-12 18:00

---
 .github/workflows/test.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 5c118d47..13d08753 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -108,6 +108,7 @@ jobs:
             -e SECRET_KEY=secret_key \
             -e CACHE_URL=redis://redis:6379/0 \
             -e CELERY_BROKER_URL=redis://redis:6379/0 \
+            --network host \
             -v $PWD:/code/app \
             -w /code/app \
             -t ${{needs.setup.outputs.image_name}} \

From 2a7b2f058ef29e5b016351b529d4363fa9c1a9fa Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 18:06:34 +0200
Subject: [PATCH 45/57] 2024-06-12 18:06

---
 pytest.ini | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pytest.ini b/pytest.ini
index 4e18ed08..602de3cd 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -15,7 +15,7 @@ addopts =
         -rs
         --tb=short
         --capture=sys
-        --cov trash
+        --cov hope_dedup_engine
         --cov-config=tests/.coveragerc
         --cov-report html
         --cov-report xml:coverage.xml

From 2ac94c6480639546bae0d98a166f0dd298a42a5b Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 18:45:21 +0200
Subject: [PATCH 46/57] 2024-06-12 18:45

---
 CHANGELOG.md                      | 4 ++++
 README.md                         | 7 ++++++-
 history/+random.bugfix.rst        | 1 -
 history/3456.doc.rst              | 1 -
 history/8765.removal.txt          | 1 -
 src/hope_dedup_engine/__init__.py | 4 ++--
 6 files changed, 12 insertions(+), 6 deletions(-)
 create mode 100644 CHANGELOG.md
 delete mode 100644 history/+random.bugfix.rst
 delete mode 100644 history/3456.doc.rst
 delete mode 100644 history/8765.removal.txt

diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 00000000..4eb7cf49
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,4 @@
+v.0.1.0 2024-06-12
+==================
+
+No significant changes.
diff --git a/README.md b/README.md
index a5ac95a0..569860a1 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,17 @@
 ABOUT HOPE Deduplication Engine
 ===============================
 
+[![Test](https://github.com/unicef/hope-dedup-engine/actions/workflows/test.yml/badge.svg)](https://github.com/unicef/hope-dedup-engine/actions/workflows/test.yml)
+[![codecov](https://codecov.io/gh/unicef/hope-dedup-engine/graph/badge.svg?token=kAuZEX5k5o)](https://codecov.io/gh/unicef/hope-dedup-engine)
+![Version](https://img.shields.io/badge/dynamic/toml?url=https%3A%2F%2Fraw.githubusercontent.com%2Fsaxix%2Ftrash%2Fdevelop%2Fpyproject.toml&query=%24.project.version&label=version)
+![License](https://img.shields.io/badge/dynamic/toml?url=https%3A%2F%2Fraw.githubusercontent.com%2Fsaxix%2Ftrash%2Fdevelop%2Fpyproject.toml&query=%24.project.license.text&label=license)
+
 
 ## Contributing
 
 ### System Requirements
 
-- python 3.11
+- python 3.12
 - [direnv](https://direnv.net/) - not mandatory but strongly recommended
 - [pdm](https://pdm.fming.dev/2.9/)
 
diff --git a/history/+random.bugfix.rst b/history/+random.bugfix.rst
deleted file mode 100644
index 0ff80b3e..00000000
--- a/history/+random.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Orphan fragments have no ticket ID.
\ No newline at end of file
diff --git a/history/3456.doc.rst b/history/3456.doc.rst
deleted file mode 100644
index c45a83dc..00000000
--- a/history/3456.doc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Can also be ``rst`` as well!
\ No newline at end of file
diff --git a/history/8765.removal.txt b/history/8765.removal.txt
deleted file mode 100644
index 3f5425f8..00000000
--- a/history/8765.removal.txt
+++ /dev/null
@@ -1 +0,0 @@
-The final part is ignored, so set it to whatever you want.
diff --git a/src/hope_dedup_engine/__init__.py b/src/hope_dedup_engine/__init__.py
index 42920d47..01518441 100644
--- a/src/hope_dedup_engine/__init__.py
+++ b/src/hope_dedup_engine/__init__.py
@@ -1,5 +1,5 @@
-from hope_dedup_engine.config.celery import app as celery_app
+# from hope_dedup_engine.config.celery import app as celery_app
 
 VERSION = __version__ = "0.1.0"
 
-__all__ = ("celery_app",)
+# __all__ = ("celery_app",)

From e0ee910c70c8d8c15720827168748f1a85469d4f Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 19:53:16 +0200
Subject: [PATCH 47/57] 2024-06-12 19:53

---
 .bumpversion.cfg                        |  2 +-
 .github/file-filters.yml                |  5 +++++
 .github/workflows/assign_to_project.yml | 13 ++++++-------
 .github/workflows/towncrier.yml         |  2 +-
 .pre-commit-config.yaml                 |  6 +++++-
 CHANGELOG.md                            | 20 +++++++++++++++++++-
 CHANGELOG.rst                           |  4 ----
 {history => changes}/.gitignore         |  0
 changes/11111.feature                   |  0
 changes/1234.bugfix                     |  2 ++
 changes/3456.doc.rst                    |  0
 changes/444.removal                     |  0
 changes/aaaa.misc                       |  3 +++
 pyproject.toml                          |  3 ++-
 14 files changed, 44 insertions(+), 16 deletions(-)
 delete mode 100644 CHANGELOG.rst
 rename {history => changes}/.gitignore (100%)
 create mode 100644 changes/11111.feature
 create mode 100644 changes/1234.bugfix
 create mode 100644 changes/3456.doc.rst
 create mode 100644 changes/444.removal
 create mode 100644 changes/aaaa.misc

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 75e558fe..f9ff63b4 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -3,7 +3,7 @@ commit = False
 tag = False
 allow_dirty = True
 tag_name = {new_version}
-current_version = 1.3.1-rc3
+current_version = 0.1.0
 parse = ^
 	(?P\d+)\.(?P\d+)\.(?P\d+)
 	(-?(?P(rc|final))
diff --git a/.github/file-filters.yml b/.github/file-filters.yml
index 2a632551..6612dc64 100644
--- a/.github/file-filters.yml
+++ b/.github/file-filters.yml
@@ -11,6 +11,11 @@ python: &python
   - added|modified: 'tests/**'
   - 'manage.py'
 
+changelog:
+  - added|modified: 'changes/**'
+  - 'CHANGELOG.md'
+
+
 docker_base:
   - *docker
   - *dependencies
diff --git a/.github/workflows/assign_to_project.yml b/.github/workflows/assign_to_project.yml
index 803ad7bb..e09bf448 100644
--- a/.github/workflows/assign_to_project.yml
+++ b/.github/workflows/assign_to_project.yml
@@ -12,13 +12,12 @@ env:
 
 jobs:
   assign_one_project:
-    if: vars.DEFAULT_PROJECT
+    if: ${{ vars.DEFAULT_PROJECT != '' }}
     runs-on: ubuntu-latest
     name: Assign to Project
     steps:
-    - name: Assign NEW issues and NEW pull requests to '${{ vars.DEFAULT_PROJECT }}'
-      uses: srggrs/assign-one-project-github-action@1.2.1
-      if: ${{ vars.DEFAULT_PROJECT }}
-      with:
-        project: ${{ vars.DEFAULT_PROJECT }}
-        column_name: 'New'
+      - name: Assign NEW issues and NEW pull requests to ${{ vars.DEFAULT_PROJECT }}
+        uses: srggrs/assign-one-project-github-action@1.2.1
+        with:
+          project: ${{ vars.DEFAULT_PROJECT }}
+          column_name: 'New'
diff --git a/.github/workflows/towncrier.yml b/.github/workflows/towncrier.yml
index 40b6f350..43e3defb 100644
--- a/.github/workflows/towncrier.yml
+++ b/.github/workflows/towncrier.yml
@@ -13,7 +13,7 @@ jobs:
           fetch-depth: 0
       - uses: actions/setup-python@v2
         with:
-          python-version: '3.9'
+          python-version: '3.12'
       - name: Install python deps
         run: pip install towncrier
 
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index e2f56c95..85feb2d4 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -22,4 +22,8 @@ repos:
     rev: '1.7.8' # Update me!
     hooks:
       - id: bandit
-        args: ["-c", "bandit.yaml"]
\ No newline at end of file
+        args: ["-c", "bandit.yaml"]
+  - repo: https://github.com/twisted/towncrier
+    rev: 22.13.0 
+    hooks:
+      - id: towncrier-check
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4eb7cf49..00a5b6b9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,22 @@
 v.0.1.0 2024-06-12
 ==================
 
-No significant changes.
+### Features
+
+-  (#11111)
+
+### Bugfixes
+
+-  (#1234)
+
+### Improved Documentation
+
+-  (#3456)
+
+### Deprecations and Removals
+
+-  (#444)
+
+### Misc
+
+- aaaa
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
deleted file mode 100644
index d1daad89..00000000
--- a/CHANGELOG.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-v.0.1.0 2024-05-26
-==================
-
-No significant changes.
diff --git a/history/.gitignore b/changes/.gitignore
similarity index 100%
rename from history/.gitignore
rename to changes/.gitignore
diff --git a/changes/11111.feature b/changes/11111.feature
new file mode 100644
index 00000000..e69de29b
diff --git a/changes/1234.bugfix b/changes/1234.bugfix
new file mode 100644
index 00000000..a1261547
--- /dev/null
+++ b/changes/1234.bugfix
@@ -0,0 +1,2 @@
+lllll
+kjjjj
\ No newline at end of file
diff --git a/changes/3456.doc.rst b/changes/3456.doc.rst
new file mode 100644
index 00000000..e69de29b
diff --git a/changes/444.removal b/changes/444.removal
new file mode 100644
index 00000000..e69de29b
diff --git a/changes/aaaa.misc b/changes/aaaa.misc
new file mode 100644
index 00000000..2f0b7ba3
--- /dev/null
+++ b/changes/aaaa.misc
@@ -0,0 +1,3 @@
+# aaaaaa
+
+kkkkkkkkk
diff --git a/pyproject.toml b/pyproject.toml
index 8b2748a3..f3c4fbd9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -116,9 +116,10 @@ coverage = "pytest tests/ --cov -n auto --create-db -c pytest.ini"
 act = "act"
 
 [tool.towncrier]
+name = "Deduplication Engine"
 directory = "changes"
 package = "hope_dedup_engine"
 filename = "CHANGELOG.md"
 title_format = "v.{version} {project_date}"
-issue_format = "`#{issue} `_"
+#issue_format = "`#{issue} `_"
 wrap = true

From 3ec7fbb8a2c109afaf046e4927b816723765d86e Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 20:00:23 +0200
Subject: [PATCH 48/57] 2024-06-12 20:00

---
 .pre-commit-config.yaml |  2 +-
 CHANGELOG.md            | 18 +-----------------
 changes/11111.feature   |  0
 changes/1234.bugfix     |  2 --
 changes/3456.doc.rst    |  0
 changes/444.removal     |  0
 changes/aaaa.misc       |  3 ---
 7 files changed, 2 insertions(+), 23 deletions(-)
 delete mode 100644 changes/11111.feature
 delete mode 100644 changes/1234.bugfix
 delete mode 100644 changes/3456.doc.rst
 delete mode 100644 changes/444.removal
 delete mode 100644 changes/aaaa.misc

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 85feb2d4..5f2aef2b 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -24,6 +24,6 @@ repos:
       - id: bandit
         args: ["-c", "bandit.yaml"]
   - repo: https://github.com/twisted/towncrier
-    rev: 22.13.0 
+    rev: 22.13.0
     hooks:
       - id: towncrier-check
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 00a5b6b9..2adc2513 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,20 +3,4 @@ v.0.1.0 2024-06-12
 
 ### Features
 
--  (#11111)
-
-### Bugfixes
-
--  (#1234)
-
-### Improved Documentation
-
--  (#3456)
-
-### Deprecations and Removals
-
--  (#444)
-
-### Misc
-
-- aaaa
+- Initial Release (start)
diff --git a/changes/11111.feature b/changes/11111.feature
deleted file mode 100644
index e69de29b..00000000
diff --git a/changes/1234.bugfix b/changes/1234.bugfix
deleted file mode 100644
index a1261547..00000000
--- a/changes/1234.bugfix
+++ /dev/null
@@ -1,2 +0,0 @@
-lllll
-kjjjj
\ No newline at end of file
diff --git a/changes/3456.doc.rst b/changes/3456.doc.rst
deleted file mode 100644
index e69de29b..00000000
diff --git a/changes/444.removal b/changes/444.removal
deleted file mode 100644
index e69de29b..00000000
diff --git a/changes/aaaa.misc b/changes/aaaa.misc
deleted file mode 100644
index 2f0b7ba3..00000000
--- a/changes/aaaa.misc
+++ /dev/null
@@ -1,3 +0,0 @@
-# aaaaaa
-
-kkkkkkkkk

From 55c65a357149b1126d899e9ebdd721a67c44ee4c Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 20:10:08 +0200
Subject: [PATCH 49/57] 2024-06-12 20:10

---
 .github/workflows/lint.yml | 4 ++--
 .github/workflows/test.yml | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 7fe4c1e8..d3893c59 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -16,7 +16,7 @@ defaults:
 
 
 concurrency:
-  group: ${{ github.workflow }}-${{ github.ref }}
+  group: "${{ github.workflow }}-${{ github.ref }}"
   cancel-in-progress: true
 
 
@@ -25,7 +25,7 @@ permissions:
 
 jobs:
   changes:
-    if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
+    if: (github.event_name != 'pull_request' && ! github.event.pull_request.head.repo.fork) || (github.event_name == 'pull_request' && (github.event.pull_request.head.repo.fork || startsWith(github.head_ref, 'dependabot/')))
     name: check files
     runs-on: ubuntu-latest
     timeout-minutes: 3
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 13d08753..7ce299b3 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -25,7 +25,7 @@ defaults:
 
 jobs:
   setup:
-    if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
+    if: (github.event_name != 'pull_request' && ! github.event.pull_request.head.repo.fork) || (github.event_name == 'pull_request' && (github.event.pull_request.head.repo.fork || startsWith(github.head_ref, 'dependabot/')))
     name: check files
     runs-on: ubuntu-latest
     timeout-minutes: 3
@@ -76,7 +76,7 @@ jobs:
       target: "python_dev_deps"
 
   test:
-    if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
+    if: (github.event_name != 'pull_request' && ! github.event.pull_request.head.repo.fork) || (github.event_name == 'pull_request' && (github.event.pull_request.head.repo.fork || startsWith(github.head_ref, 'dependabot/')))
     needs: [build, setup]
     runs-on: ubuntu-latest
     services:

From 11e3f975692aaffc5838ec35c3fc41574c987dcd Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 20:14:01 +0200
Subject: [PATCH 50/57] 2024-06-12 20:14

---
 .github/workflows/towncrier.yml | 2 --
 1 file changed, 2 deletions(-)

diff --git a/.github/workflows/towncrier.yml b/.github/workflows/towncrier.yml
index 43e3defb..a0e33930 100644
--- a/.github/workflows/towncrier.yml
+++ b/.github/workflows/towncrier.yml
@@ -9,8 +9,6 @@ jobs:
     runs-on: ubuntu-latest
     steps:
       - uses: actions/checkout@v2
-        with:
-          fetch-depth: 0
       - uses: actions/setup-python@v2
         with:
           python-version: '3.12'

From b5c9dc48d4fec63cb39493fd97b2649f156af162 Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 20:15:40 +0200
Subject: [PATCH 51/57] 2024-06-12 20:15

---
 .github/workflows/lint.yml | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index d3893c59..c6ef0e8d 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -46,7 +46,7 @@ jobs:
   flake8:
     needs: changes
     runs-on: ubuntu-latest
-    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    if: github.event.pull_request.draft == false && needs.changes.outputs.lint
     steps:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
@@ -63,7 +63,7 @@ jobs:
   isort:
     needs: changes
     runs-on: ubuntu-latest
-    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    if: github.event.pull_request.draft == false && needs.changes.outputs.lint
     steps:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
@@ -76,7 +76,7 @@ jobs:
   black:
     needs: changes
     runs-on: ubuntu-latest
-    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    if: github.event.pull_request.draft == false && needs.changes.outputs.lint
     steps:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
@@ -89,7 +89,7 @@ jobs:
   bandit:
     needs: changes
     runs-on: ubuntu-latest
-    if: github.event.pull_request.draft == false && steps.changes.outputs.lint
+    if: github.event.pull_request.draft == false && needs.changes.outputs.lint
     steps:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2

From 678cd6c1dd812bfd10f851247245a17c1c61fdff Mon Sep 17 00:00:00 2001
From: sax 
Date: Wed, 12 Jun 2024 20:18:33 +0200
Subject: [PATCH 52/57] 2024-06-12 20:18

---
 .github/workflows/lint.yml      | 8 ++++----
 .github/workflows/towncrier.yml | 7 +++----
 2 files changed, 7 insertions(+), 8 deletions(-)

diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index c6ef0e8d..83975993 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -51,7 +51,7 @@ jobs:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
         with:
-          python-version: '3.9'
+          python-version: '3.12'
       - name: Install requirements
         run: pip install flake8 pycodestyle
       - name: Check syntax
@@ -68,7 +68,7 @@ jobs:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
         with:
-          python-version: '3.11'
+          python-version: '3.12'
       - name: Install requirements
         run: pip install isort
       - name: iSort
@@ -81,7 +81,7 @@ jobs:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
         with:
-          python-version: '3.11'
+          python-version: '3.12'
       - name: Install requirements
         run: pip install black
       - name: Black
@@ -94,7 +94,7 @@ jobs:
       - uses: actions/checkout@v2
       - uses: actions/setup-python@v2
         with:
-          python-version: '3.11'
+          python-version: '3.12'
       - name: Install requirements
         run: pip install bandit
       - name: bandit
diff --git a/.github/workflows/towncrier.yml b/.github/workflows/towncrier.yml
index a0e33930..386760cc 100644
--- a/.github/workflows/towncrier.yml
+++ b/.github/workflows/towncrier.yml
@@ -13,7 +13,6 @@ jobs:
         with:
           python-version: '3.12'
       - name: Install python deps
-        run: pip install towncrier
-
-      - name: Check that changelog is updated
-        run: towncrier check >> $GITHUB_STEP_SUMMARY
+        run: |
+          pip install towncrier
+          towncrier check >> $GITHUB_STEP_SUMMARY

From 3af3955cc0f228455f17feff7b4c1c530dc6fee1 Mon Sep 17 00:00:00 2001
From: sax 
Date: Thu, 13 Jun 2024 05:42:12 +0200
Subject: [PATCH 53/57] 2024-06-13 05:42

---
 CHANGELOG.md   | 4 ++--
 pyproject.toml | 3 ++-
 2 files changed, 4 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2adc2513..26a0e4c3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,5 @@
-v.0.1.0 2024-06-12
-==================
+v.0.1.0
+=======
 
 ### Features
 
diff --git a/pyproject.toml b/pyproject.toml
index f3c4fbd9..ede66847 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -120,6 +120,7 @@ name = "Deduplication Engine"
 directory = "changes"
 package = "hope_dedup_engine"
 filename = "CHANGELOG.md"
-title_format = "v.{version} {project_date}"
+title_format = "{version}"
+#template =
 #issue_format = "`#{issue} `_"
 wrap = true

From 13ee5cc4dd18a1945fcdc69b83d402cdd56b79cc Mon Sep 17 00:00:00 2001
From: sax 
Date: Thu, 13 Jun 2024 05:43:15 +0200
Subject: [PATCH 54/57] 2024-06-13 05:43

---
 .github/workflows/towncrier.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/towncrier.yml b/.github/workflows/towncrier.yml
index 386760cc..09c6f46a 100644
--- a/.github/workflows/towncrier.yml
+++ b/.github/workflows/towncrier.yml
@@ -15,4 +15,4 @@ jobs:
       - name: Install python deps
         run: |
           pip install towncrier
-          towncrier check >> $GITHUB_STEP_SUMMARY
+          towncrier check
\ No newline at end of file

From ddf54d0aa7ddd61976859c919ecb4412328d8e59 Mon Sep 17 00:00:00 2001
From: sax 
Date: Thu, 13 Jun 2024 05:46:51 +0200
Subject: [PATCH 55/57] 2024-06-13 05:46 - updates

---
 .github/file-filters.yml   | 3 +++
 .github/workflows/mypy.yml | 4 ++--
 .mypy.ini                  | 5 -----
 3 files changed, 5 insertions(+), 7 deletions(-)

diff --git a/.github/file-filters.yml b/.github/file-filters.yml
index 6612dc64..53da9348 100644
--- a/.github/file-filters.yml
+++ b/.github/file-filters.yml
@@ -15,6 +15,9 @@ changelog:
   - added|modified: 'changes/**'
   - 'CHANGELOG.md'
 
+mypy:
+  - *python
+  - 'mypy.ini'
 
 docker_base:
   - *docker
diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml
index aba253bc..c7c411ca 100644
--- a/.github/workflows/mypy.yml
+++ b/.github/workflows/mypy.yml
@@ -11,7 +11,7 @@ on:
       - releases/*
 
 concurrency:
-  group: ${{ github.workflow }}-${{ github.ref }}
+  group: "${{ github.workflow }}-${{ github.ref }}"
   cancel-in-progress: true
 
 defaults:
@@ -25,7 +25,7 @@ jobs:
     runs-on: ubuntu-latest
     timeout-minutes: 3
     outputs:
-      python_files: ${{ steps.changes.outputs.python }}
+      mypy: ${{ steps.changes.outputs.mypy }}
     steps:
 #      - run: git config --global --add safe.directory $(realpath .)
       - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
diff --git a/.mypy.ini b/.mypy.ini
index ada05c9d..cc5ba859 100644
--- a/.mypy.ini
+++ b/.mypy.ini
@@ -1,11 +1,6 @@
 [mypy]
 python_version = 3.12
 files = src/
-;exclude = ~OLD
-;        ~management
-;        ~billing
-;exclude = (?x)(^\~management$)
-;exclude = (?x)(.*/power_query/.*)
 exclude = (management/|tests/|manage.py)
 
 install_types = true

From 3b4e45fea618a1cf3fa4ef803832a05f53c77f8a Mon Sep 17 00:00:00 2001
From: sax 
Date: Thu, 13 Jun 2024 05:51:54 +0200
Subject: [PATCH 56/57] 2024-06-13 05:51 - updates

---
 .github/workflows/towncrier.yml | 18 ------------------
 1 file changed, 18 deletions(-)
 delete mode 100644 .github/workflows/towncrier.yml

diff --git a/.github/workflows/towncrier.yml b/.github/workflows/towncrier.yml
deleted file mode 100644
index 09c6f46a..00000000
--- a/.github/workflows/towncrier.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-name: Changelog entries
-on: [pull_request]
-
-permissions:
-  contents: read
-
-jobs:
-  towncrier:
-    runs-on: ubuntu-latest
-    steps:
-      - uses: actions/checkout@v2
-      - uses: actions/setup-python@v2
-        with:
-          python-version: '3.12'
-      - name: Install python deps
-        run: |
-          pip install towncrier
-          towncrier check
\ No newline at end of file

From c6893613b177dcebe3d9add1f513ab76710da59f Mon Sep 17 00:00:00 2001
From: sax 
Date: Thu, 13 Jun 2024 12:32:36 +0200
Subject: [PATCH 57/57] updates CI ci:debug

---
 src/hope_dedup_engine/__init__.py             |  5 +-
 src/hope_dedup_engine/apps/api/admin.py       |  7 +-
 src/hope_dedup_engine/apps/api/auth.py        |  4 +-
 .../apps/api/models/__init__.py               |  6 +-
 src/hope_dedup_engine/apps/api/models/auth.py |  4 +-
 .../apps/api/models/deduplication.py          | 33 ++-----
 src/hope_dedup_engine/apps/api/serializers.py | 15 +---
 src/hope_dedup_engine/apps/api/urls.py        | 25 ++----
 src/hope_dedup_engine/apps/api/views.py       | 80 ++++-------------
 .../core/management/commands/createsystem.py  |  4 +-
 .../apps/core/management/commands/env.py      | 24 ++---
 .../apps/core/management/commands/upgrade.py  | 13 +--
 .../apps/faces/celery_tasks.py                |  9 +-
 .../apps/faces/utils/celery_utils.py          |  8 +-
 .../apps/faces/utils/duplication_detector.py  | 90 ++++---------------
 .../apps/faces/validators.py                  |  4 +-
 src/hope_dedup_engine/apps/security/models.py |  4 +-
 src/hope_dedup_engine/apps/social/pipeline.py |  4 +-
 src/hope_dedup_engine/config/__init__.py      | 76 +++-------------
 .../config/fragments/constance.py             |  6 +-
 src/hope_dedup_engine/config/fragments/csp.py | 26 +-----
 src/hope_dedup_engine/state.py                | 11 +--
 src/hope_dedup_engine/utils/http.py           |  4 +-
 src/hope_dedup_engine/utils/security.py       |  5 +-
 24 files changed, 91 insertions(+), 376 deletions(-)

diff --git a/src/hope_dedup_engine/__init__.py b/src/hope_dedup_engine/__init__.py
index 01518441..0e2df472 100644
--- a/src/hope_dedup_engine/__init__.py
+++ b/src/hope_dedup_engine/__init__.py
@@ -1,5 +1,6 @@
-# from hope_dedup_engine.config.celery import app as celery_app
+from hope_dedup_engine.config.celery import app as celery_app
+
 
 VERSION = __version__ = "0.1.0"
 
-# __all__ = ("celery_app",)
+__all__ = ("celery_app",)
diff --git a/src/hope_dedup_engine/apps/api/admin.py b/src/hope_dedup_engine/apps/api/admin.py
index 7753cfe2..9f384e0a 100644
--- a/src/hope_dedup_engine/apps/api/admin.py
+++ b/src/hope_dedup_engine/apps/api/admin.py
@@ -1,11 +1,6 @@
 from django.contrib import admin
 
-from hope_dedup_engine.apps.api.models import (
-    DeduplicationSet,
-    Duplicate,
-    HDEToken,
-    Image,
-)
+from hope_dedup_engine.apps.api.models import DeduplicationSet, Duplicate, HDEToken, Image
 
 admin.site.register(DeduplicationSet)
 admin.site.register(Duplicate)
diff --git a/src/hope_dedup_engine/apps/api/auth.py b/src/hope_dedup_engine/apps/api/auth.py
index a63dd6c2..4a78ffcc 100644
--- a/src/hope_dedup_engine/apps/api/auth.py
+++ b/src/hope_dedup_engine/apps/api/auth.py
@@ -14,9 +14,7 @@ def has_permission(self, request: Request, view: View) -> bool:
 
 class UserAndDeduplicationSetAreOfTheSameSystem(BasePermission):
     def has_permission(self, request: Request, view: View) -> bool:
-        if deduplication_set_pk := view.kwargs.get(
-            "deduplication_set_pk"
-        ) or view.kwargs.get("pk"):
+        if deduplication_set_pk := view.kwargs.get("deduplication_set_pk") or view.kwargs.get("pk"):
             return DeduplicationSet.objects.filter(
                 external_system=request.user.external_system, pk=deduplication_set_pk
             ).exists()
diff --git a/src/hope_dedup_engine/apps/api/models/__init__.py b/src/hope_dedup_engine/apps/api/models/__init__.py
index 40bdb2fa..571a4bfd 100644
--- a/src/hope_dedup_engine/apps/api/models/__init__.py
+++ b/src/hope_dedup_engine/apps/api/models/__init__.py
@@ -1,6 +1,2 @@
 from hope_dedup_engine.apps.api.models.auth import HDEToken  # noqa: F401
-from hope_dedup_engine.apps.api.models.deduplication import (  # noqa: F401
-    DeduplicationSet,
-    Duplicate,
-    Image,
-)
+from hope_dedup_engine.apps.api.models.deduplication import DeduplicationSet, Duplicate, Image  # noqa: F401
diff --git a/src/hope_dedup_engine/apps/api/models/auth.py b/src/hope_dedup_engine/apps/api/models/auth.py
index 050a852b..025370bd 100644
--- a/src/hope_dedup_engine/apps/api/models/auth.py
+++ b/src/hope_dedup_engine/apps/api/models/auth.py
@@ -5,6 +5,4 @@
 
 
 class HDEToken(Token):
-    user = models.ForeignKey(
-        settings.AUTH_USER_MODEL, related_name="auth_tokens", on_delete=models.CASCADE
-    )
+    user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="auth_tokens", on_delete=models.CASCADE)
diff --git a/src/hope_dedup_engine/apps/api/models/deduplication.py b/src/hope_dedup_engine/apps/api/models/deduplication.py
index bbeb8cbd..4bce2d0e 100644
--- a/src/hope_dedup_engine/apps/api/models/deduplication.py
+++ b/src/hope_dedup_engine/apps/api/models/deduplication.py
@@ -12,10 +12,7 @@
 class DeduplicationSet(models.Model):
     class State(models.IntegerChoices):
         CLEAN = 0, "Clean"  # Deduplication set is created or already processed
-        DIRTY = (
-            1,
-            "Dirty",
-        )  # Images are added to deduplication set, but not yet processed
+        DIRTY = 1, "Dirty"  # Images are added to deduplication set, but not yet processed
         PROCESSING = 2, "Processing"  # Images are being processed
         ERROR = 3, "Error"  # Error occurred
 
@@ -30,19 +27,11 @@ class State(models.IntegerChoices):
     external_system = models.ForeignKey(ExternalSystem, on_delete=models.CASCADE)
     error = models.CharField(max_length=255, null=True, blank=True)
     created_by = models.ForeignKey(
-        settings.AUTH_USER_MODEL,
-        on_delete=models.CASCADE,
-        null=True,
-        blank=True,
-        related_name="+",
+        settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True, blank=True, related_name="+"
     )
     created_at = models.DateTimeField(auto_now_add=True)
     updated_by = models.ForeignKey(
-        settings.AUTH_USER_MODEL,
-        on_delete=models.CASCADE,
-        null=True,
-        blank=True,
-        related_name="+",
+        settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True, blank=True, related_name="+"
     )
     updated_at = models.DateTimeField(auto_now=True)
     notification_url = models.CharField(max_length=255, null=True, blank=True)
@@ -54,11 +43,7 @@ class Image(models.Model):
     reference_pk = models.CharField(max_length=REFERENCE_PK_LENGTH)
     filename = models.CharField(max_length=255)
     created_by = models.ForeignKey(
-        settings.AUTH_USER_MODEL,
-        on_delete=models.CASCADE,
-        null=True,
-        blank=True,
-        related_name="+",
+        settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True, blank=True, related_name="+"
     )
     created_at = models.DateTimeField(auto_now_add=True)
 
@@ -78,15 +63,9 @@ class IgnoredKeyPair(models.Model):
     second_reference_pk = models.CharField(max_length=REFERENCE_PK_LENGTH)
 
     class Meta:
-        unique_together = (
-            "deduplication_set",
-            "first_reference_pk",
-            "second_reference_pk",
-        )
+        unique_together = "deduplication_set", "first_reference_pk", "second_reference_pk"
 
     @override
     def save(self, **kwargs: Any) -> None:
-        self.first_reference_pk, self.second_reference_pk = sorted(
-            (self.first_reference_pk, self.second_reference_pk)
-        )
+        self.first_reference_pk, self.second_reference_pk = sorted((self.first_reference_pk, self.second_reference_pk))
         super().save(**kwargs)
diff --git a/src/hope_dedup_engine/apps/api/serializers.py b/src/hope_dedup_engine/apps/api/serializers.py
index 4ef1087a..2227e72a 100644
--- a/src/hope_dedup_engine/apps/api/serializers.py
+++ b/src/hope_dedup_engine/apps/api/serializers.py
@@ -1,11 +1,7 @@
 from rest_framework import serializers
 
 from hope_dedup_engine.apps.api.models import DeduplicationSet
-from hope_dedup_engine.apps.api.models.deduplication import (
-    Duplicate,
-    IgnoredKeyPair,
-    Image,
-)
+from hope_dedup_engine.apps.api.models.deduplication import Duplicate, IgnoredKeyPair, Image
 
 
 class DeduplicationSetSerializer(serializers.ModelSerializer):
@@ -14,14 +10,7 @@ class DeduplicationSetSerializer(serializers.ModelSerializer):
     class Meta:
         model = DeduplicationSet
         exclude = ("deleted",)
-        read_only_fields = (
-            "external_system",
-            "created_at",
-            "created_by",
-            "deleted",
-            "updated_at",
-            "updated_by",
-        )
+        read_only_fields = "external_system", "created_at", "created_by", "deleted", "updated_at", "updated_by"
 
 
 class ImageSerializer(serializers.ModelSerializer):
diff --git a/src/hope_dedup_engine/apps/api/urls.py b/src/hope_dedup_engine/apps/api/urls.py
index 8bda942a..fb83633a 100644
--- a/src/hope_dedup_engine/apps/api/urls.py
+++ b/src/hope_dedup_engine/apps/api/urls.py
@@ -20,25 +20,12 @@
 )
 
 router = routers.SimpleRouter()
-router.register(
-    DEDUPLICATION_SET_LIST, DeduplicationSetViewSet, basename=DEDUPLICATION_SET_LIST
-)
+router.register(DEDUPLICATION_SET_LIST, DeduplicationSetViewSet, basename=DEDUPLICATION_SET_LIST)
 
-deduplication_sets_router = nested_routers.NestedSimpleRouter(
-    router, DEDUPLICATION_SET_LIST, lookup=DEDUPLICATION_SET
-)
+deduplication_sets_router = nested_routers.NestedSimpleRouter(router, DEDUPLICATION_SET_LIST, lookup=DEDUPLICATION_SET)
 deduplication_sets_router.register(IMAGE_LIST, ImageViewSet, basename=IMAGE_LIST)
-deduplication_sets_router.register(
-    BULK_IMAGE_LIST, BulkImageViewSet, basename=BULK_IMAGE_LIST
-)
-deduplication_sets_router.register(
-    DUPLICATE_LIST, DuplicateViewSet, basename=DUPLICATE_LIST
-)
-deduplication_sets_router.register(
-    IGNORED_KEYS_LIST, IgnoredKeyPairViewSet, basename=IGNORED_KEYS_LIST
-)
+deduplication_sets_router.register(BULK_IMAGE_LIST, BulkImageViewSet, basename=BULK_IMAGE_LIST)
+deduplication_sets_router.register(DUPLICATE_LIST, DuplicateViewSet, basename=DUPLICATE_LIST)
+deduplication_sets_router.register(IGNORED_KEYS_LIST, IgnoredKeyPairViewSet, basename=IGNORED_KEYS_LIST)
 
-urlpatterns = [
-    path("", include(router.urls)),
-    path("", include(deduplication_sets_router.urls)),
-]
+urlpatterns = [path("", include(router.urls)), path("", include(deduplication_sets_router.urls))]
diff --git a/src/hope_dedup_engine/apps/api/views.py b/src/hope_dedup_engine/apps/api/views.py
index fab5fd17..eb07413e 100644
--- a/src/hope_dedup_engine/apps/api/views.py
+++ b/src/hope_dedup_engine/apps/api/views.py
@@ -18,16 +18,9 @@
     HDETokenAuthentication,
     UserAndDeduplicationSetAreOfTheSameSystem,
 )
-from hope_dedup_engine.apps.api.const import (
-    DEDUPLICATION_SET_FILTER,
-    DEDUPLICATION_SET_PARAM,
-)
+from hope_dedup_engine.apps.api.const import DEDUPLICATION_SET_FILTER, DEDUPLICATION_SET_PARAM
 from hope_dedup_engine.apps.api.models import DeduplicationSet
-from hope_dedup_engine.apps.api.models.deduplication import (
-    Duplicate,
-    IgnoredKeyPair,
-    Image,
-)
+from hope_dedup_engine.apps.api.models.deduplication import Duplicate, IgnoredKeyPair, Image
 from hope_dedup_engine.apps.api.serializers import (
     DeduplicationSetSerializer,
     DuplicateSerializer,
@@ -43,29 +36,17 @@
 
 
 class DeduplicationSetViewSet(
-    mixins.ListModelMixin,
-    mixins.CreateModelMixin,
-    mixins.DestroyModelMixin,
-    viewsets.GenericViewSet,
+    mixins.ListModelMixin, mixins.CreateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet
 ):
     authentication_classes = (HDETokenAuthentication,)
-    permission_classes = (
-        IsAuthenticated,
-        AssignedToExternalSystem,
-        UserAndDeduplicationSetAreOfTheSameSystem,
-    )
+    permission_classes = IsAuthenticated, AssignedToExternalSystem, UserAndDeduplicationSetAreOfTheSameSystem
     serializer_class = DeduplicationSetSerializer
 
     def get_queryset(self) -> QuerySet:
-        return DeduplicationSet.objects.filter(
-            external_system=self.request.user.external_system, deleted=False
-        )
+        return DeduplicationSet.objects.filter(external_system=self.request.user.external_system, deleted=False)
 
     def perform_create(self, serializer: Serializer) -> None:
-        serializer.save(
-            created_by=self.request.user,
-            external_system=self.request.user.external_system,
-        )
+        serializer.save(created_by=self.request.user, external_system=self.request.user.external_system)
 
     def perform_destroy(self, instance: DeduplicationSet) -> None:
         instance.updated_by = self.request.user
@@ -89,9 +70,7 @@ def process(self, request: Request, pk: UUID | None = None) -> Response:
                 self._start_processing(deduplication_set)
                 return Response({MESSAGE: STARTED})
             case DeduplicationSet.State.PROCESSING:
-                return Response(
-                    {MESSAGE: ALREADY_PROCESSING}, status=status.HTTP_400_BAD_REQUEST
-                )
+                return Response({MESSAGE: ALREADY_PROCESSING}, status=status.HTTP_400_BAD_REQUEST)
 
 
 class ImageViewSet(
@@ -102,11 +81,7 @@ class ImageViewSet(
     viewsets.GenericViewSet,
 ):
     authentication_classes = (HDETokenAuthentication,)
-    permission_classes = (
-        IsAuthenticated,
-        AssignedToExternalSystem,
-        UserAndDeduplicationSetAreOfTheSameSystem,
-    )
+    permission_classes = IsAuthenticated, AssignedToExternalSystem, UserAndDeduplicationSetAreOfTheSameSystem
     serializer_class = ImageSerializer
     queryset = Image.objects.all()
     parent_lookup_kwargs = {
@@ -138,18 +113,14 @@ def __setitem__(self, key: str, value: Any) -> None:
 
 
 class WrapRequestDataMixin:
-    def initialize_request(
-        self, request: Request, *args: Any, **kwargs: Any
-    ) -> Request:
+    def initialize_request(self, request: Request, *args: Any, **kwargs: Any) -> Request:
         request = super().initialize_request(request, *args, **kwargs)
         request._full_data = ListDataWrapper(request.data)
         return request
 
 
 class UnwrapRequestDataMixin:
-    def initialize_request(
-        self, request: Request, *args: Any, **kwargs: Any
-    ) -> Request:
+    def initialize_request(self, request: Request, *args: Any, **kwargs: Any) -> Request:
         request = super().initialize_request(request, *args, **kwargs)
         request._full_data = request._full_data.data
         return request
@@ -165,11 +136,7 @@ class BulkImageViewSet(
     viewsets.GenericViewSet,
 ):
     authentication_classes = (HDETokenAuthentication,)
-    permission_classes = (
-        IsAuthenticated,
-        AssignedToExternalSystem,
-        UserAndDeduplicationSetAreOfTheSameSystem,
-    )
+    permission_classes = IsAuthenticated, AssignedToExternalSystem, UserAndDeduplicationSetAreOfTheSameSystem
     serializer_class = ImageSerializer
     queryset = Image.objects.all()
     parent_lookup_kwargs = {
@@ -181,9 +148,7 @@ def get_serializer(self, *args: Any, **kwargs: Any) -> Serializer:
 
     def perform_create(self, serializer: Serializer) -> None:
         super().perform_create(serializer)
-        if deduplication_set := (
-            serializer.instance[0].deduplication_set if serializer.instance else None
-        ):
+        if deduplication_set := serializer.instance[0].deduplication_set if serializer.instance else None:
             deduplication_set.updated_by = self.request.user
             deduplication_set.save()
 
@@ -196,15 +161,9 @@ def clear(self, request: Request, deduplication_set_pk: str) -> Response:
         return Response(status=status.HTTP_204_NO_CONTENT)
 
 
-class DuplicateViewSet(
-    nested_viewsets.NestedViewSetMixin, mixins.ListModelMixin, viewsets.GenericViewSet
-):
+class DuplicateViewSet(nested_viewsets.NestedViewSetMixin, mixins.ListModelMixin, viewsets.GenericViewSet):
     authentication_classes = (HDETokenAuthentication,)
-    permission_classes = (
-        IsAuthenticated,
-        AssignedToExternalSystem,
-        UserAndDeduplicationSetAreOfTheSameSystem,
-    )
+    permission_classes = IsAuthenticated, AssignedToExternalSystem, UserAndDeduplicationSetAreOfTheSameSystem
     serializer_class = DuplicateSerializer
     queryset = Duplicate.objects.all()
     parent_lookup_kwargs = {
@@ -213,17 +172,10 @@ class DuplicateViewSet(
 
 
 class IgnoredKeyPairViewSet(
-    nested_viewsets.NestedViewSetMixin,
-    mixins.ListModelMixin,
-    mixins.CreateModelMixin,
-    viewsets.GenericViewSet,
+    nested_viewsets.NestedViewSetMixin, mixins.ListModelMixin, mixins.CreateModelMixin, viewsets.GenericViewSet
 ):
     authentication_classes = (HDETokenAuthentication,)
-    permission_classes = (
-        IsAuthenticated,
-        AssignedToExternalSystem,
-        UserAndDeduplicationSetAreOfTheSameSystem,
-    )
+    permission_classes = IsAuthenticated, AssignedToExternalSystem, UserAndDeduplicationSetAreOfTheSameSystem
     serializer_class = IgnoredKeyPairSerializer
     queryset = IgnoredKeyPair.objects.all()
     parent_lookup_kwargs = {
diff --git a/src/hope_dedup_engine/apps/core/management/commands/createsystem.py b/src/hope_dedup_engine/apps/core/management/commands/createsystem.py
index f9dafbac..fbe2707d 100644
--- a/src/hope_dedup_engine/apps/core/management/commands/createsystem.py
+++ b/src/hope_dedup_engine/apps/core/management/commands/createsystem.py
@@ -10,9 +10,7 @@ def add_arguments(self, parser):
         parser.add_argument("name")
 
     def handle(self, *args, **options):
-        system, created = ExternalSystem.objects.get_or_create(
-            name=(name := options["name"])
-        )
+        system, created = ExternalSystem.objects.get_or_create(name=(name := options["name"]))
         if created:
             self.stdout.write(self.style.SUCCESS(f'"{name}" system created.'))
         else:
diff --git a/src/hope_dedup_engine/apps/core/management/commands/env.py b/src/hope_dedup_engine/apps/core/management/commands/env.py
index a1bfff67..782c94f4 100644
--- a/src/hope_dedup_engine/apps/core/management/commands/env.py
+++ b/src/hope_dedup_engine/apps/core/management/commands/env.py
@@ -33,26 +33,14 @@ def add_arguments(self, parser: "CommandParser") -> None:
             default="export {key}={value}",
             help="Check env for variable availability (default: 'export {key}=\"{value}\"')",
         )
-        parser.add_argument(
-            "--develop", action="store_true", help="Display development values"
-        )
-        parser.add_argument(
-            "--config", action="store_true", help="Only list changed values"
-        )
+        parser.add_argument("--develop", action="store_true", help="Display development values")
+        parser.add_argument("--config", action="store_true", help="Only list changed values")
         parser.add_argument("--diff", action="store_true", help="Mark changed values")
         parser.add_argument(
-            "--check",
-            action="store_true",
-            dest="check",
-            default=False,
-            help="Check env for variable availability",
+            "--check", action="store_true", dest="check", default=False, help="Check env for variable availability"
         )
         parser.add_argument(
-            "--ignore-errors",
-            action="store_true",
-            dest="ignore_errors",
-            default=False,
-            help="Do not fail",
+            "--ignore-errors", action="store_true", dest="ignore_errors", default=False, help="Do not fail"
         )
 
     def handle(self, *args: "Any", **options: "Any") -> None:
@@ -74,9 +62,7 @@ def handle(self, *args: "Any", **options: "Any") -> None:
                 else:
                     value: Any = env.get_value(k)
 
-                line: str = pattern.format(
-                    key=k, value=clean(value), help=help, default=default
-                )
+                line: str = pattern.format(key=k, value=clean(value), help=help, default=default)
                 if options["diff"]:
                     if value != default:
                         line = self.style.SUCCESS(line)
diff --git a/src/hope_dedup_engine/apps/core/management/commands/upgrade.py b/src/hope_dedup_engine/apps/core/management/commands/upgrade.py
index 513e2f01..a6e09ff0 100644
--- a/src/hope_dedup_engine/apps/core/management/commands/upgrade.py
+++ b/src/hope_dedup_engine/apps/core/management/commands/upgrade.py
@@ -89,9 +89,7 @@ def get_options(self, options: dict[str, Any]) -> None:
         self.debug = options["debug"]
 
         self.admin_email = str(options["admin_email"] or env("ADMIN_EMAIL", ""))
-        self.admin_password = str(
-            options["admin_password"] or env("ADMIN_PASSWORD", "")
-        )
+        self.admin_password = str(options["admin_password"] or env("ADMIN_PASSWORD", ""))
 
     def halt(self, e: Exception) -> None:
         self.stdout.write(str(e), style_func=self.style.ERROR)
@@ -125,9 +123,7 @@ def handle(self, *args: Any, **options: Any) -> None:  # noqa: C901
                 call_command("check", deploy=True, verbosity=self.verbosity - 1)
             if self.static:
                 static_root = Path(env("STATIC_ROOT"))
-                echo(
-                    f"Run collectstatic to: '{static_root}' - '{static_root.absolute()}"
-                )
+                echo(f"Run collectstatic to: '{static_root}' - '{static_root.absolute()}")
                 if not static_root.exists():
                     static_root.mkdir(parents=True)
                 call_command("collectstatic", **extra)
@@ -148,10 +144,7 @@ def handle(self, *args: Any, **options: Any) -> None:  # noqa: C901
                         style_func=self.style.WARNING,
                     )
                 else:
-                    echo(
-                        f"Creating superuser: {self.admin_email}",
-                        style_func=self.style.WARNING,
-                    )
+                    echo(f"Creating superuser: {self.admin_email}", style_func=self.style.WARNING)
                     validate_email(self.admin_email)
                     os.environ["DJANGO_SUPERUSER_USERNAME"] = self.admin_email
                     os.environ["DJANGO_SUPERUSER_EMAIL"] = self.admin_email
diff --git a/src/hope_dedup_engine/apps/faces/celery_tasks.py b/src/hope_dedup_engine/apps/faces/celery_tasks.py
index 2fec0d72..2c156cfb 100644
--- a/src/hope_dedup_engine/apps/faces/celery_tasks.py
+++ b/src/hope_dedup_engine/apps/faces/celery_tasks.py
@@ -9,9 +9,7 @@
 @shared_task(bind=True, soft_time_limit=0.5 * 60 * 60, time_limit=1 * 60 * 60)
 @task_lifecycle(name="Deduplicate", ttl=1 * 60 * 60)
 # TODO: Use DeduplicationSet objects as input to deduplication pipeline
-def deduplicate(
-    self, filenames: tuple[str], ignore_pairs: tuple[tuple[str, str]] = tuple()
-) -> tuple[tuple[str]]:
+def deduplicate(self, filenames: tuple[str], ignore_pairs: tuple[tuple[str, str]] = tuple()) -> tuple[tuple[str]]:
     """
     Deduplicate a set of filenames, ignoring any specified pairs of filenames.
 
@@ -27,8 +25,5 @@ def deduplicate(
         dd = DuplicationDetector(filenames, ignore_pairs)
         return dd.find_duplicates()
     except Exception as e:
-        self.update_state(
-            state=states.FAILURE,
-            meta={"exc_message": str(e), "traceback": traceback.format_exc()},
-        )
+        self.update_state(state=states.FAILURE, meta={"exc_message": str(e), "traceback": traceback.format_exc()})
         raise e
diff --git a/src/hope_dedup_engine/apps/faces/utils/celery_utils.py b/src/hope_dedup_engine/apps/faces/utils/celery_utils.py
index 6aba6ced..eec34e9a 100644
--- a/src/hope_dedup_engine/apps/faces/utils/celery_utils.py
+++ b/src/hope_dedup_engine/apps/faces/utils/celery_utils.py
@@ -21,9 +21,7 @@ def wrapper(self, *args, **kwargs) -> any:
             ignore_pairs = args[1] if args else kwargs.get("ignore_pairs")
             lock_name: str = f"{name}_{_get_hash(filenames, ignore_pairs)}"
             if not _acquire_lock(lock_name, ttl):
-                logger.info(
-                    f"Task {name} with brocker lock {lock_name} is already running."
-                )
+                logger.info(f"Task {name} with brocker lock {lock_name} is already running.")
                 return None
 
             try:
@@ -51,8 +49,6 @@ def _release_lock(lock_name: str) -> None:
 
 def _get_hash(filenames: tuple[str], ignore_pairs: tuple[tuple[str, str]]) -> str:
     fn_str: str = ",".join(sorted(filenames))
-    ip_sorted = sorted(
-        (min(item1, item2), max(item1, item2)) for item1, item2 in ignore_pairs
-    )
+    ip_sorted = sorted((min(item1, item2), max(item1, item2)) for item1, item2 in ignore_pairs)
     ip_str = ",".join(f"{item1},{item2}" for item1, item2 in ip_sorted)
     return hashlib.sha256(f"{fn_str}{ip_str}".encode()).hexdigest()
diff --git a/src/hope_dedup_engine/apps/faces/utils/duplication_detector.py b/src/hope_dedup_engine/apps/faces/utils/duplication_detector.py
index 5b9257e7..c0683943 100644
--- a/src/hope_dedup_engine/apps/faces/utils/duplication_detector.py
+++ b/src/hope_dedup_engine/apps/faces/utils/duplication_detector.py
@@ -11,11 +11,7 @@
 import numpy as np
 from constance import config
 
-from hope_dedup_engine.apps.core.storage import (
-    CV2DNNStorage,
-    HDEAzureStorage,
-    HOPEAzureStorage,
-)
+from hope_dedup_engine.apps.core.storage import CV2DNNStorage, HDEAzureStorage, HOPEAzureStorage
 
 
 class DuplicationDetector:
@@ -36,9 +32,7 @@ class FaceEncodingsConfig:
 
     logger: logging.Logger = logging.getLogger(__name__)
 
-    def __init__(
-        self, filenames: tuple[str], ignore_pairs: tuple[str, str] = tuple()
-    ) -> None:
+    def __init__(self, filenames: tuple[str], ignore_pairs: tuple[str, str] = tuple()) -> None:
         """
         Initialize the DuplicationDetector with the given filenames.
 
@@ -89,13 +83,7 @@ def _set_net(self, storage: CV2DNNStorage) -> cv2.dnn_Net:
         return net
 
     def _get_shape(self) -> dict[str, int]:
-        pattern = (
-            r"input_shape\s*\{\s*"
-            r"dim:\s*(\d+)\s*"
-            r"dim:\s*(\d+)\s*"
-            r"dim:\s*(\d+)\s*"
-            r"dim:\s*(\d+)\s*\}"
-        )
+        pattern = r"input_shape\s*\{\s*" r"dim:\s*(\d+)\s*" r"dim:\s*(\d+)\s*" r"dim:\s*(\d+)\s*" r"dim:\s*(\d+)\s*\}"
         with open(settings.PROTOTXT_FILE, "r") as file:
             if match := re.search(pattern, file.read()):
                 return {
@@ -107,21 +95,15 @@ def _get_shape(self) -> dict[str, int]:
             else:
                 raise ValueError("Could not find input_shape in prototxt file.")
 
-    def _get_pairs_to_ignore(
-        self, ignore: tuple[tuple[str, str]]
-    ) -> set[tuple[str, str]]:
+    def _get_pairs_to_ignore(self, ignore: tuple[tuple[str, str]]) -> set[tuple[str, str]]:
         ignore = tuple(tuple(pair) for pair in ignore)
         if not ignore:
             return set()
         if all(
-            isinstance(pair, tuple)
-            and len(pair) == 2
-            and all(isinstance(item, str) and item for item in pair)
+            isinstance(pair, tuple) and len(pair) == 2 and all(isinstance(item, str) and item for item in pair)
             for pair in ignore
         ):
-            return {(item1, item2) for item1, item2 in ignore} | {
-                (item2, item1) for item1, item2 in ignore
-            }
+            return {(item1, item2) for item1, item2 in ignore} | {(item2, item1) for item1, item2 in ignore}
         elif len(ignore) == 2 and all(isinstance(item, str) for item in ignore):
             return {(ignore[0], ignore[1]), (ignore[1], ignore[0])}
         else:
@@ -135,9 +117,7 @@ def _encodings_filename(self, filename: str) -> str:
     def _has_encodings(self, filename: str) -> bool:
         return self.storages["encoded"].exists(self._encodings_filename(filename))
 
-    def _get_face_detections_dnn(
-        self, filename: str
-    ) -> list[tuple[int, int, int, int]]:
+    def _get_face_detections_dnn(self, filename: str) -> list[tuple[int, int, int, int]]:
         face_regions: list[tuple[int, int, int, int]] = []
         try:
             with self.storages["images"].open(filename, "rb") as img_file:
@@ -148,16 +128,9 @@ def _get_face_detections_dnn(
             # Create a blob (4D tensor) from the image
             blob = cv2.dnn.blobFromImage(
                 image=cv2.resize(
-                    image,
-                    dsize=(
-                        self.blob_from_image_cfg.shape["height"],
-                        self.blob_from_image_cfg.shape["width"],
-                    ),
-                ),
-                size=(
-                    self.blob_from_image_cfg.shape["height"],
-                    self.blob_from_image_cfg.shape["width"],
+                    image, dsize=(self.blob_from_image_cfg.shape["height"], self.blob_from_image_cfg.shape["width"])
                 ),
+                size=(self.blob_from_image_cfg.shape["height"], self.blob_from_image_cfg.shape["width"]),
                 scalefactor=self.blob_from_image_cfg.scale_factor,
                 mean=self.blob_from_image_cfg.mean_values,
             )
@@ -172,26 +145,17 @@ def _get_face_detections_dnn(
                 confidence = detections[0, 0, i, 2]
                 # Filter out weak detections by ensuring the confidence is greater than the minimum confidence
                 if confidence > self.face_detection_confidence:
-                    box = (detections[0, 0, i, 3:7] * np.array([w, h, w, h])).astype(
-                        "int"
-                    )
+                    box = (detections[0, 0, i, 3:7] * np.array([w, h, w, h])).astype("int")
                     boxes.append(box)
                     confidences.append(confidence)
             if boxes:
                 # Apply non-maxima suppression to suppress weak, overlapping bounding boxes
-                indices = cv2.dnn.NMSBoxes(
-                    boxes,
-                    confidences,
-                    self.face_detection_confidence,
-                    self.nms_threshold,
-                )
+                indices = cv2.dnn.NMSBoxes(boxes, confidences, self.face_detection_confidence, self.nms_threshold)
                 if indices is not None:
                     for i in indices:
                         face_regions.append(tuple(boxes[i]))
         except Exception as e:
-            self.logger.exception(
-                "Error processing face detection for image %s", filename
-            )
+            self.logger.exception("Error processing face detection for image %s", filename)
             raise e
         return face_regions
 
@@ -229,19 +193,13 @@ def _encode_face(self, filename: str) -> None:
                         encodings.extend(face_encodings)
                     else:
                         self.logger.error("Invalid face region %s", region)
-                with self.storages["encoded"].open(
-                    self._encodings_filename(filename), "wb"
-                ) as f:
+                with self.storages["encoded"].open(self._encodings_filename(filename), "wb") as f:
                     np.save(f, encodings)
         except Exception as e:
-            self.logger.exception(
-                "Error processing face encodings for image %s", filename
-            )
+            self.logger.exception("Error processing face encodings for image %s", filename)
             raise e
 
-    def _get_duplicated_groups(
-        self, checked: set[tuple[str, str, float]]
-    ) -> tuple[tuple[str]]:
+    def _get_duplicated_groups(self, checked: set[tuple[str, str, float]]) -> tuple[tuple[str]]:
         # Dictionary to store connections between paths where distances are less than the threshold
         groups = []
         connections = defaultdict(set)
@@ -258,14 +216,10 @@ def _get_duplicated_groups(
                 # Try to expand the group ensuring each new path is duplicated to all in the group
                 while queue:
                     neighbor = queue.pop(0)
-                    if neighbor not in new_group and all(
-                        neighbor in connections[member] for member in new_group
-                    ):
+                    if neighbor not in new_group and all(neighbor in connections[member] for member in new_group):
                         new_group.add(neighbor)
                         # Add neighbors of the current neighbor, excluding those already in the group
-                        queue.extend(
-                            [n for n in connections[neighbor] if n not in new_group]
-                        )
+                        queue.extend([n for n in connections[neighbor] if n not in new_group])
                 # Add the newly formed group to the list of groups
                 groups.append(new_group)
         return tuple(map(tuple, groups))
@@ -290,18 +244,12 @@ def find_duplicates(self) -> tuple[tuple[str]]:
                         min_distance = float("inf")
                         for encoding1 in encodings1:
                             if (
-                                current_min := min(
-                                    face_recognition.face_distance(
-                                        encodings2, encoding1
-                                    )
-                                )
+                                current_min := min(face_recognition.face_distance(encodings2, encoding1))
                             ) < min_distance:
                                 min_distance = current_min
                         checked.add((path1, path2, min_distance))
 
             return self._get_duplicated_groups(checked)
         except Exception as e:
-            self.logger.exception(
-                "Error finding duplicates for images %s", self.filenames
-            )
+            self.logger.exception("Error finding duplicates for images %s", self.filenames)
             raise e
diff --git a/src/hope_dedup_engine/apps/faces/validators.py b/src/hope_dedup_engine/apps/faces/validators.py
index d3f3f5bd..1b8288f4 100644
--- a/src/hope_dedup_engine/apps/faces/validators.py
+++ b/src/hope_dedup_engine/apps/faces/validators.py
@@ -8,9 +8,7 @@ def to_python(self, value):
             if len(values) != 3:
                 raise ValueError("The tuple must have exactly three elements.")
             if not all(-255 <= v <= 255 for v in values):
-                raise ValueError(
-                    "Each value in the tuple must be between -255 and 255."
-                )
+                raise ValueError("Each value in the tuple must be between -255 and 255.")
             return values
         except Exception as e:
             raise ValidationError(
diff --git a/src/hope_dedup_engine/apps/security/models.py b/src/hope_dedup_engine/apps/security/models.py
index 044c1daa..8ed7506a 100644
--- a/src/hope_dedup_engine/apps/security/models.py
+++ b/src/hope_dedup_engine/apps/security/models.py
@@ -13,9 +13,7 @@ class ExternalSystem(models.Model):
 
 
 class User(SecurityMixin, AbstractUser):
-    external_system = models.ForeignKey(
-        ExternalSystem, on_delete=models.SET_NULL, null=True, blank=True
-    )
+    external_system = models.ForeignKey(ExternalSystem, on_delete=models.SET_NULL, null=True, blank=True)
 
     class Meta:
         abstract = False
diff --git a/src/hope_dedup_engine/apps/social/pipeline.py b/src/hope_dedup_engine/apps/social/pipeline.py
index 51610b3d..aea7c84c 100644
--- a/src/hope_dedup_engine/apps/social/pipeline.py
+++ b/src/hope_dedup_engine/apps/social/pipeline.py
@@ -6,9 +6,7 @@
 from social_core.backends.base import BaseAuth
 
 
-def save_to_group(
-    backend: BaseAuth, user: Optional[User] = None, **kwargs: Any
-) -> dict[str, Any]:
+def save_to_group(backend: BaseAuth, user: Optional[User] = None, **kwargs: Any) -> dict[str, Any]:
     if user:
         grp = Group.objects.get(name=config.NEW_USER_DEFAULT_GROUP)
         user.groups.add(grp)
diff --git a/src/hope_dedup_engine/config/__init__.py b/src/hope_dedup_engine/config/__init__.py
index ccd74314..4cd90b32 100644
--- a/src/hope_dedup_engine/config/__init__.py
+++ b/src/hope_dedup_engine/config/__init__.py
@@ -5,9 +5,7 @@
 from environ import Env
 
 if TYPE_CHECKING:
-    ConfigItem: TypeAlias = Union[
-        Tuple[type, Any, str, Any], Tuple[type, Any, str], Tuple[type, Any]
-    ]
+    ConfigItem: TypeAlias = Union[Tuple[type, Any, str, Any], Tuple[type, Any, str], Tuple[type, Any]]
 
 
 DJANGO_HELP_BASE = "https://docs.djangoproject.com/en/5.0/ref/settings"
@@ -22,14 +20,7 @@ class Group(Enum):
 
 
 NOT_SET = "<- not set ->"
-EXPLICIT_SET = [
-    "DATABASE_URL",
-    "SECRET_KEY",
-    "CACHE_URL",
-    "CELERY_BROKER_URL",
-    "MEDIA_ROOT",
-    "STATIC_ROOT",
-]
+EXPLICIT_SET = ["DATABASE_URL", "SECRET_KEY", "CACHE_URL", "CELERY_BROKER_URL", "MEDIA_ROOT", "STATIC_ROOT"]
 
 CONFIG: "Dict[str, ConfigItem]" = {
     "ADMIN_EMAIL": (str, "", "Initial user created at first deploy"),
@@ -38,11 +29,7 @@ class Group(Enum):
     "AUTHENTICATION_BACKENDS": (list, [], setting("authentication-backends")),
     "CACHE_URL": (str, "redis://localhost:6379/0"),
     "CATCH_ALL_EMAIL": (str, "If set all the emails will be sent to this address"),
-    "CELERY_BROKER_URL": (
-        str,
-        NOT_SET,
-        "https://docs.celeryq.dev/en/stable/django/first-steps-with-django.html",
-    ),
+    "CELERY_BROKER_URL": (str, NOT_SET, "https://docs.celeryq.dev/en/stable/django/first-steps-with-django.html"),
     "CELERY_TASK_ALWAYS_EAGER": (
         bool,
         False,
@@ -67,47 +54,21 @@ class Group(Enum):
         "postgres://127.0.0.1:5432/dedupe",
     ),
     "DEBUG": (bool, False, setting("debug"), True),
-    "EMAIL_BACKEND": (
-        str,
-        "django.core.mail.backends.smtp.EmailBackend",
-        setting("email-backend"),
-        True,
-    ),
+    "EMAIL_BACKEND": (str, "django.core.mail.backends.smtp.EmailBackend", setting("email-backend"), True),
     "EMAIL_HOST": (str, "localhost", setting("email-host"), True),
     "EMAIL_HOST_USER": (str, "", setting("email-host-user"), True),
     "EMAIL_HOST_PASSWORD": (str, "", setting("email-host-password"), True),
     "EMAIL_PORT": (int, "25", setting("email-port"), True),
-    "EMAIL_SUBJECT_PREFIX": (
-        str,
-        "[Hope-dedupe]",
-        setting("email-subject-prefix"),
-        True,
-    ),
+    "EMAIL_SUBJECT_PREFIX": (str, "[Hope-dedupe]", setting("email-subject-prefix"), True),
     "EMAIL_USE_LOCALTIME": (bool, False, setting("email-use-localtime"), True),
     "EMAIL_USE_TLS": (bool, False, setting("email-use-tls"), True),
     "EMAIL_USE_SSL": (bool, False, setting("email-use-ssl"), True),
     "EMAIL_TIMEOUT": (str, None, setting("email-timeout"), True),
     "LOGGING_LEVEL": (str, "CRITICAL", setting("logging-level")),
-    "FILE_STORAGE_DEFAULT": (
-        str,
-        "django.core.files.storage.FileSystemStorage",
-        setting("storages"),
-    ),
-    "FILE_STORAGE_MEDIA": (
-        str,
-        "django.core.files.storage.FileSystemStorage",
-        setting("storages"),
-    ),
-    "FILE_STORAGE_STATIC": (
-        str,
-        "django.contrib.staticfiles.storage.StaticFilesStorage",
-        setting("storages"),
-    ),
-    "FILE_STORAGE_HOPE": (
-        str,
-        "django.core.files.storage.FileSystemStorage",
-        setting("storages"),
-    ),
+    "FILE_STORAGE_DEFAULT": (str, "django.core.files.storage.FileSystemStorage", setting("storages")),
+    "FILE_STORAGE_MEDIA": (str, "django.core.files.storage.FileSystemStorage", setting("storages")),
+    "FILE_STORAGE_STATIC": (str, "django.contrib.staticfiles.storage.StaticFilesStorage", setting("storages")),
+    "FILE_STORAGE_HOPE": (str, "django.core.files.storage.FileSystemStorage", setting("storages")),
     "MEDIA_ROOT": (str, None, setting("media-root")),
     "MEDIA_URL": (str, "/media/", setting("media-url")),
     "ROOT_TOKEN": (str, "", ""),
@@ -118,29 +79,16 @@ class Group(Enum):
     "SENTRY_DSN": (str, "", "Sentry DSN"),
     "SENTRY_ENVIRONMENT": (str, "production", "Sentry Environment"),
     "SENTRY_URL": (str, "", "Sentry server url"),
-    "SESSION_COOKIE_DOMAIN": (
-        str,
-        "",
-        setting("std-setting-SESSION_COOKIE_DOMAIN"),
-        "localhost",
-    ),
+    "SESSION_COOKIE_DOMAIN": (str, "", setting("std-setting-SESSION_COOKIE_DOMAIN"), "localhost"),
     "SESSION_COOKIE_HTTPONLY": (bool, True, setting("session-cookie-httponly"), False),
     "SESSION_COOKIE_NAME": (str, "dedupe_session", setting("session-cookie-name")),
     "SESSION_COOKIE_PATH": (str, "/", setting("session-cookie-path")),
     "SESSION_COOKIE_SECURE": (bool, True, setting("session-cookie-secure"), False),
-    "SIGNING_BACKEND": (
-        str,
-        "django.core.signing.TimestampSigner",
-        setting("signing-backend"),
-    ),
+    "SIGNING_BACKEND": (str, "django.core.signing.TimestampSigner", setting("signing-backend")),
     "SOCIAL_AUTH_LOGIN_URL": (str, "/login/", "", ""),
     "SOCIAL_AUTH_RAISE_EXCEPTIONS": (bool, False, "", True),
     "SOCIAL_AUTH_REDIRECT_IS_HTTPS": (bool, True, "", False),
-    "STATIC_FILE_STORAGE": (
-        str,
-        "django.core.files.storage.FileSystemStorage",
-        setting("storages"),
-    ),
+    "STATIC_FILE_STORAGE": (str, "django.core.files.storage.FileSystemStorage", setting("storages")),
     "STATIC_ROOT": (str, None, setting("static-root")),
     "STATIC_URL": (str, "/static/", setting("static-url")),
     "TIME_ZONE": (str, "UTC", setting("std-setting-TIME_ZONE")),
diff --git a/src/hope_dedup_engine/config/fragments/constance.py b/src/hope_dedup_engine/config/fragments/constance.py
index e6b7146b..555dbc49 100644
--- a/src/hope_dedup_engine/config/fragments/constance.py
+++ b/src/hope_dedup_engine/config/fragments/constance.py
@@ -6,11 +6,7 @@
 
 CONSTANCE_CONFIG = {
     "NEW_USER_IS_STAFF": (False, "Set any new user as staff", bool),
-    "NEW_USER_DEFAULT_GROUP": (
-        DEFAULT_GROUP_NAME,
-        "Group to assign to any new user",
-        str,
-    ),
+    "NEW_USER_DEFAULT_GROUP": (DEFAULT_GROUP_NAME, "Group to assign to any new user", str),
     "DNN_BACKEND": (
         cv2.dnn.DNN_BACKEND_OPENCV,
         "Specifies the computation backend to be used by OpenCV for deep learning inference.",
diff --git a/src/hope_dedup_engine/config/fragments/csp.py b/src/hope_dedup_engine/config/fragments/csp.py
index a0e02fbd..3070bdaa 100644
--- a/src/hope_dedup_engine/config/fragments/csp.py
+++ b/src/hope_dedup_engine/config/fragments/csp.py
@@ -1,27 +1,7 @@
 # CSP_DEFAULT_SRC = ["'self'", "'unsafe-inline'", "'same-origin'", "fonts.googleapis.com", 'fonts.gstatic.com', 'data:',
 #                    'blob:', "cdn.redoc.ly"]
 CSP_DEFAULT_SRC = ["'self'", "'unsafe-inline'"]
-CSP_STYLE_SRC = [
-    "'self'",
-    "'unsafe-inline'",
-    "same-origin",
-    "fonts.googleapis.com",
-    "fonts.gstatic.com",
-]
+CSP_STYLE_SRC = ["'self'", "'unsafe-inline'", "same-origin", "fonts.googleapis.com", "fonts.gstatic.com"]
 CSP_SCRIPT_SRC = ["'self'", "'unsafe-inline'", "same-origin", "blob:"]
-CSP_IMG_SRC = [
-    "'self'",
-    "'unsafe-inline'",
-    "same-origin",
-    "blob:",
-    "data:",
-    "cdn.redoc.ly",
-]
-CSP_FONT_SRC = [
-    "'self'",
-    "fonts.googleapis.com",
-    "same-origin",
-    "fonts.googleapis.com",
-    "fonts.gstatic.com",
-    "blob:",
-]
+CSP_IMG_SRC = ["'self'", "'unsafe-inline'", "same-origin", "blob:", "data:", "cdn.redoc.ly"]
+CSP_FONT_SRC = ["'self'", "fonts.googleapis.com", "same-origin", "fonts.googleapis.com", "fonts.gstatic.com", "blob:"]
diff --git a/src/hope_dedup_engine/state.py b/src/hope_dedup_engine/state.py
index 28253cbf..0973df8f 100644
--- a/src/hope_dedup_engine/state.py
+++ b/src/hope_dedup_engine/state.py
@@ -38,16 +38,7 @@ def add_cookie(
         samesite: str | None = None,
     ) -> None:
         value = json.dumps(value)
-        self.cookies[key] = [
-            value,
-            max_age,
-            expires,
-            path,
-            domain,
-            secure,
-            httponly,
-            samesite,
-        ]
+        self.cookies[key] = [value, max_age, expires, path, domain, secure, httponly, samesite]
 
     def get_cookie(self, name: str) -> Optional[str]:
         return self.request.COOKIES.get(name)
diff --git a/src/hope_dedup_engine/utils/http.py b/src/hope_dedup_engine/utils/http.py
index 236b1a9b..f4300e8d 100644
--- a/src/hope_dedup_engine/utils/http.py
+++ b/src/hope_dedup_engine/utils/http.py
@@ -39,7 +39,5 @@ def absolute_uri(url: str | None = None) -> str:
     return uri
 
 
-def absolute_reverse(
-    name: str, args: Tuple[Any] | None = None, kwargs: Dict[str, Any] | None = None
-) -> str:
+def absolute_reverse(name: str, args: Tuple[Any] | None = None, kwargs: Dict[str, Any] | None = None) -> str:
     return absolute_uri(reverse(name, args=args, kwargs=kwargs))
diff --git a/src/hope_dedup_engine/utils/security.py b/src/hope_dedup_engine/utils/security.py
index 9ee29f33..4ed19bd0 100644
--- a/src/hope_dedup_engine/utils/security.py
+++ b/src/hope_dedup_engine/utils/security.py
@@ -4,7 +4,4 @@
 
 
 def is_root(request: Any, *args: Any, **kwargs: Any) -> bool:
-    return (
-        request.user.is_superuser
-        and request.headers.get(settings.ROOT_TOKEN_HEADER) == settings.ROOT_TOKEN != ""
-    )
+    return request.user.is_superuser and request.headers.get(settings.ROOT_TOKEN_HEADER) == settings.ROOT_TOKEN != ""