diff --git a/.github/workflows/earthly_project_check.yml b/.github/workflows/earthly_project_check.yml new file mode 100644 index 00000000..422a429c --- /dev/null +++ b/.github/workflows/earthly_project_check.yml @@ -0,0 +1,61 @@ +name: run_earthly_checks + + +on: [push, pull_request] + +jobs: + run_earthly_checks: + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ['3.10', '3.11'] + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + +# In conflict with pyre-check +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Cache burny_common .venv +# uses: actions/cache@v3 +# with: +# path: burny_common/.venv +# key: burny_common-${{ matrix.os }}-${{ steps.setup-python.outputs.python-version }}-poetry-${{ hashFiles('poetry.lock') }} +# +# - name: Cache discord_bot .venv +# uses: actions/cache@v3 +# with: +# path: discord_bot/.venv +# key: discord_bot-${{ matrix.os }}-${{ steps.setup-python.outputs.python-version }}-poetry-${{ hashFiles('poetry.lock') }} +# +# - name: Cache fastapi_server .venv +# uses: actions/cache@v3 +# with: +# path: fastapi_server/.venv +# key: fastapi_server-${{ matrix.os }}-${{ steps.setup-python.outputs.python-version }}-poetry-${{ hashFiles('poetry.lock') }} +# +# - name: Cache python_examples .venv +# uses: actions/cache@v3 +# with: +# path: python_examples/.venv +# key: python_examples-${{ matrix.os }}-${{ steps.setup-python.outputs.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + + # https://earthly.dev/get-earthly + - name: Install Earthly + run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete' + + - name: Install + run: | + touch discord_bot/SECRETS.toml + earthly +install-all --verbose true --PYTHONVERSION=${{ matrix.python-version }} + + - name: Run code checks and tests + run: earthly +all --verbose true --PYTHONVERSION=${{ matrix.python-version }} + +# - name: Save cache +# run: earthly +export-cache-backend --verbose true --PYTHONVERSION=${{ matrix.python-version }} diff --git a/.github/workflows/publish_burny_common.yml b/.github/workflows/publish_burny_common.yml new file mode 100644 index 00000000..9a4e5707 --- /dev/null +++ b/.github/workflows/publish_burny_common.yml @@ -0,0 +1,110 @@ +name: publish_burny_common + + +on: + push: + paths: + - burny_common/** + - .github/workflows/publish_burny_common.yml + pull_request: + branches: + - master + - develop + +env: + SUBDIRECTORY: burny_common + +jobs: + run_code_check: + name: run_code_check_py${{ matrix.python-version }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ['3.8', '3.9', '3.10', '3.11'] + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + +# In conflict with pyre-check +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Cache .venv +# uses: actions/cache@v3 +# with: +# path: .venv +# key: ${{ env.SUBDIRECTORY }}-${{ matrix.os }}-${{ steps.setup-python.outputs.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + + # https://earthly.dev/get-earthly + - name: Install Earthly + run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete' + + - name: Install + working-directory: ${{ env.SUBDIRECTORY }} + run: earthly +install-dev --verbose true --PYTHONVERSION=${{ matrix.python-version }} + + - name: Run code checks and tests + working-directory: ${{ env.SUBDIRECTORY }} + run: earthly +all --verbose true --PYTHONVERSION=${{ matrix.python-version }} + +# - name: Save cache +# working-directory: ${{ env.SUBDIRECTORY }} +# run: earthly +export-cache --verbose true --PYTHONVERSION=${{ matrix.python-version }} + + build_and_publish: + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ['3.10'] + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + id: setup-python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Poetry + run: pip install poetry + + - name: Make poetry use local .venv folder + run: poetry config virtualenvs.in-project true + + - name: Set up cache + uses: actions/cache@v3 + with: + path: ${{ env.SUBDIRECTORY }}/.venv + key: ${{ matrix.os }}-${{ steps.setup-python.outputs.python-version }}-poetry-${{ env.SUBDIRECTORY }}-${{ hashFiles('poetry.lock') }} + + - name: Install dependencies + working-directory: ${{ env.SUBDIRECTORY }} + run: poetry install --no-dev + + - name: Build + working-directory: ${{ env.SUBDIRECTORY }} + run: poetry build + + - id: wait-for-jobs + uses: yogeshlonkar/wait-for-jobs@v0 + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} + jobs: | + run_code_check_py3.8 + run_code_check_py3.9 + run_code_check_py3.10 + run_code_check_py3.11 + + # Publish package on pypi + - name: Publish + if: github.ref == 'refs/heads/develop' && github.event_name == 'push' + working-directory: ${{ env.SUBDIRECTORY }} + continue-on-error: true + run: poetry publish --username ${{ secrets.pypi_username }} --password ${{ secrets.pypi_password }} diff --git a/.github/workflows/publish_example_package.yml b/.github/workflows/publish_example_package.yml deleted file mode 100644 index 0d9fd1a6..00000000 --- a/.github/workflows/publish_example_package.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: publish_example_package - - -on: - push: - paths: - - example_package/** - pull_request: - branches: - - master - - develop - -jobs: - build_and_publish: - env: - SUBDIRECTORY: example_package - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: ['3.10'] - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Cache poetry - uses: actions/cache@v2 - with: - path: ~/.cache/pypoetry/virtualenvs - key: ${{ runner.os }}-${{ matrix.python-version }}-poetry-${{ env.SUBDIRECTORY }}-${{ hashFiles('poetry.lock') }} - - - name: Install dependencies - run: | - python -m pip install poetry - cd ${{ env.SUBDIRECTORY }} - poetry install --no-dev - - - name: Build - run: | - cd ${{ env.SUBDIRECTORY }} - poetry build - - # Publish package on pypi - - name: Publish - continue-on-error: true - run: | - cd ${{ env.SUBDIRECTORY }} - poetry publish --username ${{ secrets.pypi_username }} --password ${{ secrets.pypi_password }} diff --git a/.github/workflows/python_examples.yml b/.github/workflows/python_examples.yml index 6b0a7529..82553d32 100644 --- a/.github/workflows/python_examples.yml +++ b/.github/workflows/python_examples.yml @@ -6,69 +6,132 @@ on: paths: - python_examples/** - poetry.lock + - .github/workflows/python_examples.yml pull_request: branches: - master - develop +env: + SUBDIRECTORY: python_examples + jobs: + run_code_check: + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ['3.8', '3.9', '3.10', '3.11'] + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + +# In conflict with pyre-check +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Cache .venv +# uses: actions/cache@v3 +# with: +# path: .venv +# key: ${{ env.SUBDIRECTORY }}-${{ matrix.os }}-${{ steps.setup-python.outputs.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + + - name: Start MongoDB + uses: ankane/setup-mongodb@v1 + + - name: Start Postgres + uses: ankane/setup-postgres@v1 + with: + postgres-version: 14 + + # https://earthly.dev/get-earthly + - name: Install Earthly + run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete' + + - name: Install + working-directory: ${{ env.SUBDIRECTORY }} + run: earthly +install-dev --verbose true --PYTHONVERSION=${{ matrix.python-version }} + + - name: Run code checks and tests + working-directory: ${{ env.SUBDIRECTORY }} + run: earthly +all --verbose true --PYTHONVERSION=${{ matrix.python-version }} + +# - name: Save cache +# working-directory: ${{ env.SUBDIRECTORY }} +# run: earthly +export-cache --verbose true --PYTHONVERSION=${{ matrix.python-version }} + run_python_examples: - env: - SUBDIRECTORY: python_examples + needs: [run_code_check] strategy: fail-fast: false matrix: os: [ubuntu-latest] - python-version: ['3.8', '3.9', '3.10'] - mongodb-version: [5.0.0] + python-version: ['3.9', '3.10', '3.11'] + include: + - os: macos-latest + python-version: '3.10' + - os: windows-latest + python-version: '3.10' + runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - name: Cache poetry - uses: actions/cache@v2 + - name: Install Poetry + run: pip install poetry + + - name: Make poetry use local .venv folder + run: poetry config virtualenvs.in-project true + + - name: Set up cache + if: matrix.os != 'windows-latest' + uses: actions/cache@v3 with: - path: ~/.cache/pypoetry/virtualenvs - key: ${{ runner.os }}-${{ matrix.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + path: ${{ env.SUBDIRECTORY }}.venv + key: ${{ matrix.os }}-$(python --version)-poetry-${{ env.SUBDIRECTORY }}-${{ hashFiles('poetry.lock') }} - - name: Start MongoDB in docker - run: | - docker run --rm -d -p 27017-27019:27017-27019 --name mongodb mongo:${{ matrix.mongodb-version }} + - name: Start MongoDB + uses: ankane/setup-mongodb@v1 +# with: +# mongodb-version: 5.0 - - name: Start Postgres in docker - run: | - docker run --rm -d --name postgresql-container -p 5432:5432 \ - -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=changeme postgres:9.6.23-alpine3.14 +# - name: Start MongoDB in docker +# if: ${{ matrix.os != 'macos-latest'}} +# run: | +# docker run --rm -d -p 27017-27019:27017-27019 --name mongodb mongo:5.0 - - uses: actions/cache@v2 + - name: Start Postgres + uses: ankane/setup-postgres@v1 with: - path: ~/.cache/pypoetry/virtualenvs - key: ${{ runner.os }}-${{ matrix.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + postgres-version: 14 + +# - name: Start Postgres in docker +# if: ${{ matrix.os == 'ubuntu-latest'}} +# run: | +# docker run --rm -d --name postgresql-container -p 5432:5432 \ +# -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=changeme postgres:9.6.23-alpine3.14 - name: Print environment variables (linux) run: | printenv - - name: Install dependencies - run: | - python -m pip install poetry - poetry install + - name: Install Python dependencies + working-directory: ${{ env.SUBDIRECTORY }} + run: poetry install - - name: Test with pytest - run: | - poetry run pytest ${{ env.SUBDIRECTORY }} + - name: Run Python tests + working-directory: ${{ env.SUBDIRECTORY }} + run: poetry run python -m pytest - name: Run main.py - run: | - poetry run python ${{ env.SUBDIRECTORY }}/main.py - - - name: Run radon (cyclomatic complexity report) - # './' denotes the current directory - run: | - poetry run radon cc ./ -a -nb + working-directory: ${{ env.SUBDIRECTORY }} + run: poetry run python main.py diff --git a/.github/workflows/sonarqube.yml b/.github/workflows/sonarqube.yml new file mode 100644 index 00000000..b6e4eba7 --- /dev/null +++ b/.github/workflows/sonarqube.yml @@ -0,0 +1,26 @@ +name: Sonarqube + +on: + push: + branches: + - develop + + +jobs: + build: + name: Sonarqube + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis + - uses: sonarsource/sonarqube-scan-action@master + env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }} + # If you wish to fail your job when the Quality Gate is red, uncomment the + # following lines. This would typically be used to fail a deployment. + # - uses: sonarsource/sonarqube-quality-gate-action@master + # timeout-minutes: 5 + # env: + # SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/test_discord_bot.yml b/.github/workflows/test_discord_bot.yml new file mode 100644 index 00000000..c547d541 --- /dev/null +++ b/.github/workflows/test_discord_bot.yml @@ -0,0 +1,126 @@ +name: test_discord_bot + +on: + push: + paths: + - discord_bot/** + - .github/workflows/test_discord_bot.yml + pull_request: + branches: + - master + - develop + +env: + SUBDIRECTORY: discord_bot + IMAGENAME: discord_bot + VERSION_NUMBER: 1.0.0 + +jobs: + test_backend: + name: test_backend_py${{ matrix.python-version }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ['3.8', '3.9', '3.10', '3.11'] + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + + # In conflict with pyre-check + # - name: Set up Python ${{ matrix.python-version }} + # uses: actions/setup-python@v4 + # with: + # python-version: ${{ matrix.python-version }} + # + # - name: Cache .venv + # uses: actions/cache@v3 + # with: + # path: .venv + # key: ${{ env.SUBDIRECTORY }}-${{ matrix.os }}-${{ steps.setup-python.outputs.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + + # https://earthly.dev/get-earthly + - name: Install Earthly + run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete' + + - name: Install + working-directory: ${{ env.SUBDIRECTORY }} + run: | + touch SECRETS.toml + earthly +install-dev --verbose true --PYTHONVERSION=${{ matrix.python-version }} + + - name: Run code checks and tests + working-directory: ${{ env.SUBDIRECTORY }} + run: earthly +all --verbose true --PYTHONVERSION=${{ matrix.python-version }} + + # - name: Save cache + # working-directory: ${{ env.SUBDIRECTORY }} + # run: earthly +export-cache --verbose true --PYTHONVERSION=${{ matrix.python-version }} + + build_and_deploy_docker_image: + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + + - name: Build docker image + working-directory: ${{ env.SUBDIRECTORY }} + run: | + docker build -t burnysc2/${{ env.IMAGENAME }}:latest . + docker build -t burnysc2/${{ env.IMAGENAME }}:${{ env.VERSION_NUMBER}} . + + - name: Login to DockerHub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - id: wait-for-jobs + uses: yogeshlonkar/wait-for-jobs@v0 + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} + jobs: | + test_backend_py3.8 + test_backend_py3.9 + test_backend_py3.10 + test_backend_py3.11 + + - name: Upload docker image + if: github.ref == 'refs/heads/develop' && github.event_name == 'push' + run: | + docker push burnysc2/${{ env.IMAGENAME }}:latest + docker push burnysc2/${{ env.IMAGENAME }}:${{ env.VERSION_NUMBER}} + + deploy_backend_prod: + if: github.ref == 'refs/heads/develop' + needs: [build_and_deploy_docker_image] + strategy: + matrix: + os: [ubuntu-latest] + runs-on: ${{ matrix.os }} + env: + USERNAME: discordbot + + steps: + # Requires the following 3 lines added to "visudo" on the server + # discordbot ALL=(ALL) NOPASSWD: /bin/systemctl disable --now discordbot + # discordbot ALL=(ALL) NOPASSWD: /bin/systemctl daemon-reload + # discordbot ALL=(ALL) NOPASSWD: /bin/systemctl enable --now discordbot + - name: Update docker image on server and restart server + uses: appleboy/ssh-action@master + with: + host: ${{ secrets.HOST }} + port: 22 + username: ${{ env.USERNAME }} + key: ${{ secrets.DISCORDBOTKEY }} + passphrase: '' + script: | + sudo /bin/systemctl disable --now ${{ env.USERNAME }} + docker image prune -af + docker pull burnysc2/${{ env.IMAGENAME }}:latest + sudo /bin/systemctl enable --now ${{ env.USERNAME }} diff --git a/.github/workflows/test_fastapi_server.yml b/.github/workflows/test_fastapi_server.yml index 0b135210..fbd474c8 100644 --- a/.github/workflows/test_fastapi_server.yml +++ b/.github/workflows/test_fastapi_server.yml @@ -6,60 +6,153 @@ on: paths: - burny_common/** - fastapi_server/** - - poetry.lock - .github/workflows/test_fastapi_server.yml pull_request: branches: - master - develop +env: + SUBDIRECTORY: fastapi_server + IMAGENAME: fastapi_server + VERSION_NUMBER: 1.0.0 + jobs: test_backend: - env: - SUBDIRECTORY: fastapi_server - DATABASE_PATH: sqlite:///data/sqlmodel.db + name: test_backend_py${{ matrix.python-version }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ['3.10', '3.11'] + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + +# In conflict with pyre-check +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Cache .venv +# uses: actions/cache@v3 +# with: +# path: .venv +# key: ${{ env.SUBDIRECTORY }}-${{ matrix.os }}-${{ steps.setup-python.outputs.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + + # https://earthly.dev/get-earthly + - name: Install Earthly + run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete' + + - name: Install + working-directory: ${{ env.SUBDIRECTORY }} + run: earthly +install-dev --verbose true --PYTHONVERSION=${{ matrix.python-version }} + + - name: Run code checks and tests + working-directory: ${{ env.SUBDIRECTORY }} + run: earthly +all --verbose true --PYTHONVERSION=${{ matrix.python-version }} + +# - name: Save cache +# working-directory: ${{ env.SUBDIRECTORY }} +# run: earthly +export-cache --verbose true --PYTHONVERSION=${{ matrix.python-version }} + + build_and_deploy_docker_image: strategy: fail-fast: false matrix: os: [ubuntu-latest] - python-version: ['3.8', '3.9', '3.10'] runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + - name: Build docker image + working-directory: ${{ env.SUBDIRECTORY }} + run: | + docker build -t burnysc2/${{ env.IMAGENAME }}:latest . + docker build -t burnysc2/${{ env.IMAGENAME }}:latest_dev . + docker build -t burnysc2/${{ env.IMAGENAME }}:${{ env.VERSION_NUMBER}} . + + - name: Login to DockerHub + uses: docker/login-action@v2 with: - python-version: ${{ matrix.python-version }} + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Cache poetry - uses: actions/cache@v2 + - id: wait-for-jobs + uses: yogeshlonkar/wait-for-jobs@v0 with: - path: ~/.cache/pypoetry/virtualenvs - key: ${{ runner.os }}-${{ matrix.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + gh-token: ${{ secrets.GITHUB_TOKEN }} + jobs: | + test_backend_py3.10 + test_backend_py3.11 - - name: Install Python dependencies + - name: Upload docker image (develop) + if: github.ref == 'refs/heads/develop' && github.event_name == 'push' run: | - python -m pip install poetry - poetry install + docker push burnysc2/${{ env.IMAGENAME }}:latest_dev + docker push burnysc2/${{ env.IMAGENAME }}:${{ env.VERSION_NUMBER}} - - name: Run Python tests - run: | - poetry run pytest ${{ env.SUBDIRECTORY }} + - name: Upload docker image (master) + if: github.ref == 'refs/heads/master' && github.event_name == 'push' + run: docker push burnysc2/${{ env.IMAGENAME }}:latest - deploy_backend: + deploy_backend_dev: + if: github.ref == 'refs/heads/develop' + needs: [build_and_deploy_docker_image] strategy: matrix: os: [ubuntu-latest] runs-on: ${{ matrix.os }} - needs: [test_backend] + env: + USERNAME: fastapidev steps: - - uses: actions/checkout@v2 + # Requires the following 3 lines added to "visudo" on the server + # fastapidev ALL=(ALL) NOPASSWD: /bin/systemctl disable --now fastapidev + # fastapidev ALL=(ALL) NOPASSWD: /bin/systemctl daemon-reload + # fastapidev ALL=(ALL) NOPASSWD: /bin/systemctl enable --now fastapidev + - name: Update docker image on server and restart server + uses: appleboy/ssh-action@master + with: + host: ${{ secrets.HOST }} + port: 22 + username: ${{ env.USERNAME }} + key: ${{ secrets.FASTAPISERVERDEVKEY }} + passphrase: '' + script: | + sudo /bin/systemctl disable --now ${{ env.USERNAME }} + docker image prune -af + docker pull burnysc2/${{ env.IMAGENAME }}:latest_dev + sudo /bin/systemctl enable --now ${{ env.USERNAME }} - - name: Deploy backend - if: github.ref == 'refs/heads/master' - run: | - rm -rf data - # TODO Stop/disable service, upload to server, update poetry packages, start/enable service + deploy_backend_prod: + if: github.ref == 'refs/heads/master' + needs: [build_and_deploy_docker_image] + strategy: + matrix: + os: [ubuntu-latest] + runs-on: ${{ matrix.os }} + env: + USERNAME: fastapi + + steps: + # Requires the following 3 lines added to "visudo" on the server + # fastapi ALL=(ALL) NOPASSWD: /bin/systemctl disable --now fastapi + # fastapi ALL=(ALL) NOPASSWD: /bin/systemctl daemon-reload + # fastapi ALL=(ALL) NOPASSWD: /bin/systemctl enable --now fastapi + - name: Update docker image on server and restart server + uses: appleboy/ssh-action@master + with: + host: ${{ secrets.HOST }} + port: 22 + username: ${{ env.USERNAME }} + key: ${{ secrets.FASTAPISERVERKEY }} + passphrase: '' + script: | + sudo /bin/systemctl disable --now ${{ env.USERNAME }} + docker image prune -af + docker pull burnysc2/${{ env.IMAGENAME }}:latest + sudo /bin/systemctl enable --now ${{ env.USERNAME }} diff --git a/.github/workflows/test_svelte_frontend.yml b/.github/workflows/test_svelte_frontend.yml deleted file mode 100644 index 7458ee64..00000000 --- a/.github/workflows/test_svelte_frontend.yml +++ /dev/null @@ -1,150 +0,0 @@ -name: test_svelte_frontend - - -on: - push: - paths: - - burny_common/** - - fastapi_server/** - - poetry.lock - - svelte_frontend/** - - .github/workflows/test_svelte_frontend.yml - pull_request: - branches: - - master - - develop - -jobs: - test_frontend: - env: - SUBDIRECTORY: svelte_frontend - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - node: ['12', '14', '16'] - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v2 - - - name: Set up Node ${{ matrix.node }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node }} - - - name: Cache npm - uses: actions/cache@v2 - with: - path: ~/.npm - key: ${{ runner.os }}-${{ matrix.node }}-${{ env.SUBDIRECTORY }}-node-${{ hashFiles('**/package-lock.json') }} - - - name: Install npm dependencies - run: | - cd ${{ env.SUBDIRECTORY }} - npm install - - # - name: Run tests - # run: | - # npm run test - - test_e2e: - env: - SUBDIRECTORY: svelte_frontend - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: [3.9] - node: ['16'] - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Cache poetry - uses: actions/cache@v2 - with: - path: ~/.cache/pypoetry/virtualenvs - key: ${{ runner.os }}-${{ matrix.python-version }}-poetry-${{ hashFiles('poetry.lock') }} - - - name: Install Python dependencies - run: | - python -m pip install poetry - poetry install - - - name: Set up Node ${{ matrix.node }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node }} - - - name: Cache npm - uses: actions/cache@v2 - with: - path: ~/.npm - key: ${{ runner.os }}-${{ matrix.node }}-${{ env.SUBDIRECTORY }}-node-${{ hashFiles('**/package-lock.json') }} - - - name: Install npm dependencies - run: | - cd ${{ env.SUBDIRECTORY }} - npm install - - - name: Run e2e tests - run: | - poetry run pytest ${{ env.SUBDIRECTORY }}/test_frontend/test_e2e.py - - - test_integration: - env: - SUBDIRECTORY: svelte_frontend - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: ['3.9'] - node: ['16'] - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Cache poetry - uses: actions/cache@v2 - with: - path: ~/.cache/pypoetry/virtualenvs - key: ${{ runner.os }}-${{ matrix.python-version }}-poetry-${{ hashFiles('poetry.lock') }} - - - name: Install Python dependencies - run: | - python -m pip install poetry - poetry install - - - name: Set up Node ${{ matrix.node }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node }} - - - name: Cache npm - uses: actions/cache@v2 - with: - path: ~/.npm - key: ${{ runner.os }}-${{ matrix.node }}-${{ env.SUBDIRECTORY }}-node-${{ hashFiles('**/package-lock.json') }} - - - name: Install npm dependencies - run: | - cd ${{ env.SUBDIRECTORY }} - npm install - - - name: Run integration tests - run: | - poetry run pytest ${{ env.SUBDIRECTORY }}/test_frontend/test_integration.py diff --git a/.github/workflows/test_transcriber_backend.yml b/.github/workflows/test_transcriber_backend.yml new file mode 100644 index 00000000..72a7538b --- /dev/null +++ b/.github/workflows/test_transcriber_backend.yml @@ -0,0 +1,124 @@ +name: test_transcriber_backend + + +on: + push: + paths: + - transcribe_website/transcriber_backend/** + - .github/workflows/test_transcriber_backend.yml + pull_request: + branches: + - develop + +env: + SUBDIRECTORY: transcribe_website/transcriber_backend + IMAGENAME: transcribe_worker + VERSION_NUMBER: 1.0.0 + +jobs: + test_backend: + name: test_backend_py${{ matrix.python-version }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ['3.8', '3.9', '3.10'] + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + +# In conflict with pyre-check +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Cache .venv +# uses: actions/cache@v3 +# with: +# path: .venv +# key: ${{ env.SUBDIRECTORY }}-${{ matrix.os }}-${{ steps.setup-python.outputs.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + + # https://earthly.dev/get-earthly + - name: Install Earthly + run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete' + + - name: Install + working-directory: ${{ env.SUBDIRECTORY }} + run: earthly +install-dev --verbose true --PYTHONVERSION=${{ matrix.python-version }} + + - name: Run code checks and tests + working-directory: ${{ env.SUBDIRECTORY }} + run: earthly +all --verbose true --PYTHONVERSION=${{ matrix.python-version }} + +# - name: Save cache +# working-directory: ${{ env.SUBDIRECTORY }} +# run: earthly +export-cache --verbose true --PYTHONVERSION=${{ matrix.python-version }} + + build_and_deploy_docker_image: + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + + - name: Build docker image + working-directory: ${{ env.SUBDIRECTORY }} + run: | + docker build -t burnysc2/${{ env.IMAGENAME }}:latest . + docker build -t burnysc2/${{ env.IMAGENAME }}:${{ env.VERSION_NUMBER}} . + + - name: Login to DockerHub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - id: wait-for-jobs + uses: yogeshlonkar/wait-for-jobs@v0 + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} + jobs: | + test_backend_py3.8 + test_backend_py3.9 + test_backend_py3.10 + + - name: Upload docker image + if: github.ref == 'refs/heads/develop' && github.event_name == 'push' + run: | + docker push burnysc2/${{ env.IMAGENAME }}:latest + docker push burnysc2/${{ env.IMAGENAME }}:${{ env.VERSION_NUMBER}} + + deploy_transcribe_worker: + if: github.ref == 'refs/heads/develop' + needs: [build_and_deploy_docker_image] + strategy: + matrix: + os: [ubuntu-latest] + runs-on: ${{ matrix.os }} + env: + USERNAME: transcribe + + steps: + # Requires the following 3 lines added to "visudo" on the server + # transcribe ALL=(ALL) NOPASSWD: /bin/systemctl disable --now transcribe + # transcribe ALL=(ALL) NOPASSWD: /bin/systemctl daemon-reload + # transcribe ALL=(ALL) NOPASSWD: /bin/systemctl enable --now transcribe + - name: Update docker image on server and restart server + uses: appleboy/ssh-action@master + with: + host: ${{ secrets.HOST }} + port: 22 + username: ${{ env.USERNAME }} + key: ${{ secrets.TRANSCRIBEWORKERKEY }} + passphrase: '' + script: | + sudo /bin/systemctl disable --now ${{ env.USERNAME }} + docker image prune -af + docker pull burnysc2/${{ env.IMAGENAME }}:latest + sudo /bin/systemctl enable --now ${{ env.USERNAME }} + diff --git a/.gitignore b/.gitignore index 4e8f9dcd..1e28ac94 100644 --- a/.gitignore +++ b/.gitignore @@ -14,7 +14,6 @@ dist/ downloads/ eggs/ .eggs/ -lib/ lib64/ parts/ sdist/ @@ -106,19 +105,19 @@ venv.bak/ # Pycharm .idea/ -# Vscode -.vscode/ - # Header files *.h -# Frontend -build -.svelte-kit -node_modules -svelte_frontend/test_frontend/latest_logs -svelte_frontend/latest_logs -latest_logs +# Pants workspace files +.pants.* +.pids + +# Pyre +.pyre + +# nim +testresults +nimcache # Test files from main.py hello_world.txt @@ -128,6 +127,3 @@ main.log db.json sqlite_asyncio_example.db todos.db - -# Fastapi files -fastapi_server/data diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4bf90426..5aa1d236 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,94 +1,25 @@ -repos: -# Check yaml files like this one and github actions if they are valid -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 - hooks: - - id: check-yaml - -# Autoformat yaml files -- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - rev: v2.1.0 - hooks: - - id: pretty-format-yaml - args: [--autofix, --indent, '2'] - -# Check github action workflow files -- repo: https://github.com/sirosen/check-jsonschema - rev: 0.3.2 - hooks: - - id: check-github-workflows - -# Check toml files like pyproject.toml if it is valid -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 - hooks: - - id: check-toml - -# This removes comments +# To install hooks: +# poetry run pre-commit install +# To update hooks: +# poetry run pre-commit autoupdate +# To run hooks: +# poetry run pre-commit run --all-files --hook-stage push + +# This removes comments which is unwanted #- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks # rev: v2.1.0 # hooks: # - id: pretty-format-toml # args: [--autofix] -# As the name says -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 - hooks: - - id: double-quote-string-fixer - -# Convert simple things like set([]) to set() -- repo: https://github.com/asottile/pyupgrade - rev: v2.26.0 - hooks: - - id: pyupgrade - -# Remove unused imports -- repo: https://github.com/hadialqattan/pycln - rev: v1.0.3 - hooks: - - id: pycln - args: [--config=pyproject.toml] - -# Convert relative to absolute imports -- repo: https://github.com/MarcoGorelli/absolufy-imports - rev: v0.3.0 - hooks: - - id: absolufy-imports - -# Sort imports -- repo: https://github.com/pycqa/isort - rev: 5.5.4 - hooks: - - id: isort - files: \.(py)$ - args: [--settings-path=pyproject.toml] - -# If project is written in Python 3.9+, it will autofix List[str] to list[str] etc -#- repo: https://github.com/sondrelg/pep585-upgrade -# rev: '' # Use the sha / tag you want to point at -# hooks: -# - id: upgrade-type-hints - -# Autoformat code -- repo: local - hooks: - - id: yapf - name: yapf - stages: [commit] - language: system - entry: poetry run yapf -i - types: [python] - exclude: setup.py - +repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 + rev: v4.2.0 hooks: - # Check if python files are vlaid - - id: check-ast - - id: check-builtin-literals - - id: check-docstring-first - - id: debug-statements + # Check yaml files like this one and github actions if they are valid + - id: check-yaml + # Check toml files like pyproject.toml if it is valid + - id: check-toml - repo: https://github.com/pre-commit/pygrep-hooks rev: v1.9.0 @@ -99,6 +30,19 @@ repos: # Enforce type annotation instead of comment annotation - id: python-use-type-annotations +# Autoformat yaml files +# - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks +# rev: v2.11.0 +# hooks: +# - id: pretty-format-yaml +# args: [--autofix, --indent, '2'] + +# Check github action workflow files +- repo: https://github.com/sirosen/check-jsonschema + rev: 0.18.4 + hooks: + - id: check-github-workflows + # Detect functions and variables that are never used #- repo: https://github.com/asottile/dead # rev: v1.4.0 @@ -107,51 +51,20 @@ repos: - repo: local hooks: - - id: format_svelte - name: format_svelte - stages: [commit] - language: system - entry: bash -c "cd svelte_frontend && npm run format" - pass_filenames: false - - - id: pylint - name: pylint - stages: [commit] - language: system - entry: poetry run pylint - types: [python] - - # Run mypy type checks - - id: mypy - name: mypy - stages: [commit] - language: system - entry: poetry run mypy . - types: [python] - pass_filenames: false - - # Run python examples tests - - id: pytest_python_examples - name: pytest_python_examples - stages: [commit] - language: system - entry: poetry run pytest python_examples - pass_filenames: false - - # Run fastapi server tests - - id: pytest_fastapi_server - name: pytest_fastapi_server + # Install earthly requirements + - id: earthly-install + name: Install earthly requirements stages: [commit] language: system - entry: poetry run pytest fastapi_server + entry: earthly +install-all --verbose true pass_filenames: false - # Run svelte e2e tests - - id: svelte_e2e_tests - name: svelte_e2e_tests + # Check formatting, lint, dont run tests because pre-commit should be fast + - id: earthly-pre-commit + name: Run earthly pre commmit stages: [commit] language: system - entry: poetry run pytest svelte_frontend/test_frontend/test_e2e.py --benchmark-skip --headless + entry: earthly +pre-commit --verbose true pass_filenames: false # - id: pytest-cov diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000..890ef2b7 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,7 @@ +{ + "recommendations": [ + "github.vscode-github-actions", + "eamodio.gitlens", + "wayou.vscode-todo-highlight" + ] +} \ No newline at end of file diff --git a/.vscode/install_requirements.sh b/.vscode/install_requirements.sh new file mode 100644 index 00000000..521946bf --- /dev/null +++ b/.vscode/install_requirements.sh @@ -0,0 +1,9 @@ +# Install python / backend +cd burny_common +poetry install +cd ../discord_bot +poetry install +cd ../fastapi_server +poetry install +cd ../python_examples +poetry install diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 00000000..997f5e52 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,12 @@ +{ + // See https://go.microsoft.com/fwlink/?LinkId=733558 + // for the documentation about the tasks.json format + "version": "2.0.0", + "tasks": [ + { + "label": "Install requirements", + "type": "shell", + "command": "sh .vscode/install_requirements.sh", + } + ] +} \ No newline at end of file diff --git a/Earthfile b/Earthfile new file mode 100644 index 00000000..9e9c81ff --- /dev/null +++ b/Earthfile @@ -0,0 +1,38 @@ +VERSION 0.6 +ARG PYTHONVERSION=3.11 # 3.10 to 3.11 +ARG NIMVERSION=2.0.0 +FROM alpine:3.15 # Is only used for formatting, so image can be as small as possible + +# Run autoformatter on all projects +format: + BUILD ./burny_common+format + BUILD ./discord_bot+format + BUILD ./fastapi_server+format + BUILD ./python_examples+format + BUILD ./transcribe_website/transcriber_backend+format + +install-all: + BUILD ./burny_common+install-dev --PYTHONVERSION=${PYTHONVERSION} + BUILD ./discord_bot+install-dev --PYTHONVERSION=${PYTHONVERSION} + BUILD ./fastapi_server+install-dev --PYTHONVERSION=${PYTHONVERSION} + BUILD ./python_examples+install-dev --PYTHONVERSION=${PYTHONVERSION} + BUILD ./transcribe_website/transcriber_backend+install-dev --PYTHONVERSION=3.10 + +pre-commit: + BUILD ./burny_common+pre-commit --PYTHONVERSION=${PYTHONVERSION} + BUILD ./discord_bot+pre-commit --PYTHONVERSION=${PYTHONVERSION} + BUILD ./fastapi_server+pre-commit --PYTHONVERSION=${PYTHONVERSION} + BUILD ./python_examples+pre-commit --PYTHONVERSION=${PYTHONVERSION} + BUILD ./transcribe_website/transcriber_backend+pre-commit --PYTHONVERSION=3.10 + +check-all: + BUILD ./burny_common+all --PYTHONVERSION=${PYTHONVERSION} + BUILD ./discord_bot+all --PYTHONVERSION=${PYTHONVERSION} + BUILD ./fastapi_server+all --PYTHONVERSION=${PYTHONVERSION} + BUILD ./python_examples+all --PYTHONVERSION=${PYTHONVERSION} + BUILD ./transcribe_website/transcriber_backend+all --PYTHONVERSION=3.10 + BUILD ./nim_examples+all --NIMVERSION=${NIMVERSION} + +# Run format-checks, linter and tests +all: + BUILD +check-all diff --git a/README.md b/README.md index ebfdde78..aed37ed2 100644 --- a/README.md +++ b/README.md @@ -1,80 +1,44 @@ -[![publish_example_package](https://github.com/BurnySc2/monorepo/actions/workflows/publish_example_package.yml/badge.svg?branch=master)](https://github.com/BurnySc2/monorepo/actions/workflows/publish_example_package.yml) [![python_examples](https://github.com/BurnySc2/monorepo/actions/workflows/python_examples.yml/badge.svg)](https://github.com/BurnySc2/monorepo/actions/workflows/python_examples.yml) +[![test_discord_bot](https://github.com/BurnySc2/monorepo/actions/workflows/test_discord_bot.yml/badge.svg)](https://github.com/BurnySc2/monorepo/actions/workflows/test_discord_bot.yml) [![test_fastapi_server](https://github.com/BurnySc2/monorepo/actions/workflows/test_fastapi_server.yml/badge.svg)](https://github.com/BurnySc2/monorepo/actions/workflows/test_fastapi_server.yml) -[![test_react_frontend](https://github.com/BurnySc2/monorepo/actions/workflows/test_react_frontend.yml/badge.svg)](https://github.com/BurnySc2/monorepo/actions/workflows/test_react_frontend.yml) -[![test_svelte_frontend](https://github.com/BurnySc2/monorepo/actions/workflows/test_svelte_frontend.yml/badge.svg)](https://github.com/BurnySc2/monorepo/actions/workflows/test_svelte_frontend.yml) +[![run_earthly_checks](https://github.com/BurnySc2/monorepo/actions/workflows/earthly_project_check.yml/badge.svg)](https://github.com/BurnySc2/monorepo/actions/workflows/earthly_project_check.yml) # Monorepo My monorepo for various tools and showcases -# Useful Poetry commands -https://python-poetry.org/docs/cli/ -### Create new project -`poetry init` -### Install dependencies -`poetry install` +# Development +### Pre-requisites +- [Python](https://www.python.org/downloads) + - [Poetry](https://python-poetry.org/docs/) +- [Earthly](https://earthly.dev) -`poetry install --no-dev` -### Add dependencies -`poetry add ` +## VScode +Run VScode task called `Install requirements` or alternatively run `sh .vscode/install_requirements.sh` or alternatively run `poetry install` in the python projects or `npm install` in the frontend projects. -Add dev dependency: +Open the Command Palette and `Workspaces: Add Folder to Workspace...` and select the folders you want to edit. -`poetry add --dev` -### Remove dependencies -`poetry remove ` -### Update dependencies -`poetry update` -### List current and latest available version -`poetry show -l` -### Same as above, but only show outdated -`poetry show -o` -### List of packages -`poetry show` -### Run a file in virtual environment -`poetry run python python_examples/main.py` +Now set up the correct interpreter path (may have to navigate the absolute path, on linux that is `~/.cache/pypoetry/virtualenvs/...`). The running the command `poetry env info --path` in each project shows where the environment was installed to. -`poetry run pytest` +## VS code +TODO -### Write requirements.txt from Poetry lock file -`poetry export -f requirements.txt > requirements.txt` - - -# Run python files -- install `poetry` using command `pip install poetry` -- run the python file `main.py` using `poetry run python main.py` -- or `poetry shell` and then run `python main.py` - - -# Run Tests -Single file: -`poetry run pytest test/test_functions.py` -Single function: -`poetry run pytest test/test_functions.py::test_fuction_name` -Single function in class: -`poetry run pytest test/test_functions.py::class_name::test_fuction_name` - -Test all files in project: -`poetry run pytest` - -# Run and display code coverage -In pycharm: right click folder and `More Run/Debug` -> `Run pytest in '...' with coverage'` - -``` -poetry run pytest --cov=. --cov-report xml --cov-report html && poetry run coverage html +# Check dependencies +To avoid packages with large packages, we can use `pipdeptree` +```sh +poetry run pipdeptree > deps.txt ``` -then use `coverage gutters` extension in VScode - -or open the generated html file in folder `htmlcov` - -# Install and run all pre-commit hook scripts -```py +# Install and run pre-commit hook on all staged files +```sh poetry run pre-commit install -poetry run pre-commit run --all-files +poetry run pre-commit run --all-files --verbose --hook-stage push ``` This runs pylint, mypy, pytest tests, apply autoformatter yapf -# Autoformat all python files -`poetry run yapf ./**/*.py -i` +# Autoformat all files +`earthly +format` + +# Recommended websites and tools: +[Convert JSON API response to types](https://app.quicktype.io/#l=Python) +[Convert curl to python requests](https://curlconverter.com) diff --git a/alembic.ini b/alembic.ini deleted file mode 100644 index c3c7e2fc..00000000 --- a/alembic.ini +++ /dev/null @@ -1,100 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = fastapi_server/migrations - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python-dateutil library that can be -# installed by adding `alembic[tz]` to the pip requirements -# string value is passed to dateutil.tz.gettz() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the -# "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to migrations/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" -# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. Valid values are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -version_path_separator = os # default: use os.pathsep - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -sqlalchemy.url = sqlite:///fastapi_server/data/sqlmodel.db - - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/analysis_line_profiler.sh b/analysis_line_profiler.sh deleted file mode 100644 index cf868a5d..00000000 --- a/analysis_line_profiler.sh +++ /dev/null @@ -1,3 +0,0 @@ -poetry run kernprof -l main.py -poetry run python -m line_profiler main.py.lprof > line_profiler_result.txt -rm main.py.lprof diff --git a/analysis_scalene.sh b/analysis_scalene.sh deleted file mode 100644 index 0bf0e513..00000000 --- a/analysis_scalene.sh +++ /dev/null @@ -1,2 +0,0 @@ -poetry run scalene --html --outfile prof.html --reduced-profile python_examples/main.py -poetry run scalene --html --outfile prof_full.html python_examples/main.py diff --git a/ansible/100_simple_ping/ping.yml b/ansible/100_simple_ping/ping.yml new file mode 100644 index 00000000..d2f16979 --- /dev/null +++ b/ansible/100_simple_ping/ping.yml @@ -0,0 +1,9 @@ +# Execute with +# ansible-playbook ping.yml -i ../hosts +- name: Ping all + hosts: my_servers + gather_facts: false + + tasks: + - name: ping + ping: diff --git a/ansible/hosts b/ansible/hosts new file mode 100644 index 00000000..1ca2e091 --- /dev/null +++ b/ansible/hosts @@ -0,0 +1,7 @@ +my_servers: + hosts: + contabo: # Same as '~/.ssh/config' entry +my_pcs: + hosts: + burnylaptopa: + burnylaptopd: diff --git a/ansible/service_audiobookshelf/audiobookshelf.service b/ansible/service_audiobookshelf/audiobookshelf.service new file mode 100644 index 00000000..00bd35b2 --- /dev/null +++ b/ansible/service_audiobookshelf/audiobookshelf.service @@ -0,0 +1,28 @@ +# /etc/systemd/system/{{ secrets.AUDIOBOOKSHELF.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker run \ + --rm \ + --name=audiobookshelf \ + -e PUID={{ SYNCTHING_USER_ID.stdout }} \ + -e PGID={{ SYNCTHING_GROUP_ID.stdout }} \ + -v ./config:/config \ + -v ./metadata:/metadata \ + -v {{ secrets.AUDIOBOOKSHELF.MEDIA_FOLDER_AUDIOBOOKS }}:/audiobooks \ + -v {{ secrets.AUDIOBOOKSHELF.MEDIA_FOLDER_PODCASTS }}:/podcasts \ + -l traefik.enable=true \ + -l traefik.http.routers.{{ secrets.AUDIOBOOKSHELF.USERNAME }}.rule=Host(`{{ secrets.AUDIOBOOKSHELF.USERNAME }}.{{ secrets.MY_DOMAIN }}`) \ + -l traefik.http.services.{{ secrets.AUDIOBOOKSHELF.USERNAME }}.loadbalancer.server.port=80 \ + -l traefik.http.routers.{{ secrets.AUDIOBOOKSHELF.USERNAME }}.tls=true \ + -l traefik.http.routers.{{ secrets.AUDIOBOOKSHELF.USERNAME }}.tls.certresolver=production \ + -l traefik.http.routers.{{ secrets.AUDIOBOOKSHELF.USERNAME }}.middlewares=authelia@docker \ + --network {{ secrets.TRAFIK_NETWORK }} \ + ghcr.io/advplyr/audiobookshelf +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.AUDIOBOOKSHELF.USERNAME }} +User={{ secrets.AUDIOBOOKSHELF.USERNAME }} +Group={{ secrets.AUDIOBOOKSHELF.USERNAME }} +WorkingDirectory=/home/{{ secrets.AUDIOBOOKSHELF.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_audiobookshelf/audiobookshelf_setup.yml b/ansible/service_audiobookshelf/audiobookshelf_setup.yml new file mode 100644 index 00000000..de33bdb9 --- /dev/null +++ b/ansible/service_audiobookshelf/audiobookshelf_setup.yml @@ -0,0 +1,81 @@ +# https://www.audiobookshelf.org/ +# https://www.audiobookshelf.org/docs#docker-install +# https://play.google.com/store/apps/details?id=com.audiobookshelf.app +# Find podcasts: +# https://www.listennotes.com/ +# https://castos.com/tools/find-podcast-rss-feed/ +# Execute with +# ansible-playbook audiobookshelf_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start audiobookshelf service + hosts: my_servers + vars: + USERNAME: "{{ secrets.AUDIOBOOKSHELF.USERNAME }}" + tasks: + - name: Create group + ansible.builtin.group: + name: "{{ USERNAME }}" + state: present + + - name: Create user + ansible.builtin.user: + name: "{{ USERNAME }}" + groups: + - "{{ USERNAME }}" + - docker + shell: "{{ secrets.DEFAULT_SHELL }}" + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Get syncthing user id + ansible.builtin.shell: + cmd: id syncthing -u + register: SYNCTHING_USER_ID + + - name: Get syncthing group id + ansible.builtin.shell: + cmd: id syncthing -g + register: SYNCTHING_GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: audiobookshelf.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: "{{ USER_ID.stdout }}" + group: "{{ GROUP_ID.stdout }}" + + - name: Stop audiobookshelf service + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: stopped + enabled: false + + - name: Remove audiobookshelf container + community.docker.docker_container: + name: audiobookshelf + state: absent + image: ghcr.io/advplyr/audiobookshelf + + - name: Start audiobookshelf service again + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_audiobookshelf/audiobookshelf_update.yml b/ansible/service_audiobookshelf/audiobookshelf_update.yml new file mode 100644 index 00000000..99d5ccd1 --- /dev/null +++ b/ansible/service_audiobookshelf/audiobookshelf_update.yml @@ -0,0 +1,37 @@ +# Execute with +# ansible-playbook audiobookshelf_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Stop, remove image, and start audiobookshelf service + hosts: my_servers + vars: + USERNAME: '{{ secrets.AUDIOBOOKSHELF.USERNAME }}' + tasks: + - name: Stop audiobookshelf service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Stop audiobookshelf container + community.docker.docker_container: + name: audiobookshelf + state: stopped + image: ghcr.io/advplyr/audiobookshelf + + - name: Remove audiobookshelf container + community.docker.docker_container: + name: audiobookshelf + state: absent + image: ghcr.io/advplyr/audiobookshelf + + - name: Remove audiobookshelf image + community.docker.docker_image: + name: ghcr.io/advplyr/audiobookshelf + tag: latest + state: absent + + - name: Start audiobookshelf service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_authelia/authelia.service b/ansible/service_authelia/authelia.service new file mode 100644 index 00000000..c9c88a9d --- /dev/null +++ b/ansible/service_authelia/authelia.service @@ -0,0 +1,12 @@ +# /etc/systemd/system/{{ secrets.AUTHELIA.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker compose up +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.AUTHELIA.USERNAME }} +User={{ secrets.AUTHELIA.USERNAME }} +Group={{ secrets.AUTHELIA.USERNAME }} +WorkingDirectory=/home/{{ secrets.AUTHELIA.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_authelia/authelia_setup.yml b/ansible/service_authelia/authelia_setup.yml new file mode 100644 index 00000000..e3edfff0 --- /dev/null +++ b/ansible/service_authelia/authelia_setup.yml @@ -0,0 +1,101 @@ +# https://www.authelia.com/integration/proxies/traefik/ +# Execute with +# ansible-playbook authelia_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start authelia service + hosts: my_servers + vars: + USERNAME: '{{ secrets.AUTHELIA.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: authelia.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Copy docker-compose.yml file + template: + src: docker-compose.yml + dest: /home/{{ USERNAME }}/docker-compose.yml + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Create directories + file: + path: /home/{{ USERNAME }}/authelia + recurse: true + state: directory + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Copy authelia config file + template: + src: /home/burny/syncthing/secrets/ansible_secrets/authelia/configuration.yml + dest: /home/{{ USERNAME }}/authelia/configuration.yml + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Copy users database file + template: + src: /home/burny/syncthing/secrets/ansible_secrets/authelia/users_database.yml + dest: /home/{{ USERNAME }}/authelia/users_database.yml + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop authelia service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove authelia container + community.docker.docker_container: + name: authelia + state: absent + image: authelia/authelia:4.37.5 + + - name: Remove authelia_redis container + community.docker.docker_container: + name: authelia_redis + state: absent + image: redis:alpine + + - name: Start authelia service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_authelia/docker-compose.yml b/ansible/service_authelia/docker-compose.yml new file mode 100644 index 00000000..3ae1f591 --- /dev/null +++ b/ansible/service_authelia/docker-compose.yml @@ -0,0 +1,44 @@ +version: "3.3" + +networks: + "{{secrets.TRAFIK_NETWORK}}": + external: true + +services: + authelia: + image: authelia/authelia:4.37.5 + container_name: authelia + volumes: + - ./authelia:/config + networks: + - "{{ secrets.TRAFIK_NETWORK }}" + labels: + - traefik.enable=true + - traefik.http.routers.authelia.rule=Host(`{{ secrets.AUTHELIA.USERNAME }}.{{ secrets.MY_DOMAIN }}`) + - traefik.http.routers.authelia.entrypoints=web,websecure + - traefik.http.routers.authelia.tls=true + - traefik.http.routers.authelia.tls.certresolver=production + - traefik.http.middlewares.authelia.forwardauth.address=http://authelia:9091/api/verify?rd=https%3A%2F%2F{{ secrets.AUTHELIA.USERNAME }}.{{ secrets.MY_DOMAIN }} + - traefik.http.middlewares.authelia.forwardauth.trustForwardHeader=true + - traefik.http.middlewares.authelia.forwardauth.authResponseHeaders=Remote-User,Remote-Groups,Remote-Name,Remote-Email + - traefik.http.middlewares.authelia-basic.forwardAuth.address=http://authelia:9091/api/verify?auth=basic + - traefik.http.middlewares.authelia-basic.forwardAuth.trustForwardHeader=true + - traefik.http.middlewares.authelia-basic.forwardAuth.authResponseHeaders=Remote-User,Remote-Groups,Remote-Name,Remote-Email + restart: unless-stopped + healthcheck: + ## In production the healthcheck section should be commented. + disable: true + environment: + - TZ=Etc/UTC + + redis: + image: redis:alpine + container_name: authelia_redis + hostname: authelia_redis + networks: + - "{{ secrets.TRAFIK_NETWORK }}" + volumes: + - ./redis:/data + restart: unless-stopped + environment: + - TZ=Etc/UTC diff --git a/ansible/service_bookstack/bookstack.service b/ansible/service_bookstack/bookstack.service new file mode 100644 index 00000000..c7587fa8 --- /dev/null +++ b/ansible/service_bookstack/bookstack.service @@ -0,0 +1,12 @@ +# /etc/systemd/system/{{ secrets.BOOKSTACK.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker compose up +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.BOOKSTACK.USERNAME }} +User={{ secrets.BOOKSTACK.USERNAME }} +Group={{ secrets.BOOKSTACK.USERNAME }} +WorkingDirectory=/home/{{ secrets.BOOKSTACK.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_bookstack/bookstack_setup.yml b/ansible/service_bookstack/bookstack_setup.yml new file mode 100644 index 00000000..082f3c50 --- /dev/null +++ b/ansible/service_bookstack/bookstack_setup.yml @@ -0,0 +1,82 @@ +# Execute with +# ansible-playbook bookstack_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start bookstack service + hosts: my_servers + vars: + USERNAME: '{{ secrets.BOOKSTACK.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: bookstack.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Copy docker-compose.yml file + template: + src: docker-compose.yml + dest: /home/{{ USERNAME }}/docker-compose.yml + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop bookstack service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove bookstack container + community.docker.docker_container: + name: bookstack + state: absent + image: lscr.io/linuxserver/bookstack:latest + + - name: Remove bookstack container + community.docker.docker_container: + name: bookstack_db + state: absent + image: lscr.io/linuxserver/mariadb:latest + + - name: Start bookstack service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true + + - name: Print reminder to make admin user + ansible.builtin.debug: + msg: Initial login are 'admin@admin.com' and password 'password' diff --git a/ansible/service_bookstack/docker-compose.yml b/ansible/service_bookstack/docker-compose.yml new file mode 100644 index 00000000..e92ea79f --- /dev/null +++ b/ansible/service_bookstack/docker-compose.yml @@ -0,0 +1,50 @@ +version: "2" + +networks: + "{{secrets.TRAFIK_NETWORK}}": + external: true + +services: + bookstack: + image: lscr.io/linuxserver/bookstack:latest + container_name: bookstack + networks: + - "{{ secrets.TRAFIK_NETWORK }}" + environment: + - PUID=1000 + - PGID=1000 + - APP_URL=https://{{ secrets.BOOKSTACK.USERNAME }}.{{ secrets.MY_DOMAIN }} + - DB_HOST=bookstack_db + - DB_PORT=3306 + - DB_USER={{ secrets.BOOKSTACK.DB_USER }} + - DB_PASS={{ secrets.BOOKSTACK.DB_PASS }} + - DB_DATABASE={{ secrets.BOOKSTACK.DB_DATABASE }} + volumes: + - ./bookstack_app_data:/config + restart: unless-stopped + depends_on: + - bookstack_db + labels: + - traefik.enable=true + - traefik.http.routers.{{ secrets.BOOKSTACK.USERNAME }}.rule=Host(`{{ secrets.BOOKSTACK.USERNAME }}.{{ secrets.MY_DOMAIN }}`) + - traefik.http.services.{{ secrets.BOOKSTACK.USERNAME }}.loadbalancer.server.port=80 + - traefik.http.routers.{{ secrets.BOOKSTACK.USERNAME }}.tls=true + - traefik.http.routers.{{ secrets.BOOKSTACK.USERNAME }}.tls.certresolver=production + - traefik.http.routers.{{ secrets.BOOKSTACK.USERNAME }}.middlewares=authelia@docker + + bookstack_db: + image: lscr.io/linuxserver/mariadb:latest + container_name: bookstack_db + networks: + - "{{ secrets.TRAFIK_NETWORK }}" + environment: + - PUID=1000 + - PGID=1000 + - MYSQL_ROOT_PASSWORD={{ secrets.BOOKSTACK.DB_PASS }} + - TZ=Europe/London + - MYSQL_DATABASE={{ secrets.BOOKSTACK.DB_DATABASE }} + - MYSQL_USER={{ secrets.BOOKSTACK.DB_USER }} + - MYSQL_PASSWORD={{ secrets.BOOKSTACK.DB_PASS }} + volumes: + - ./bookstack_db_data:/config + restart: unless-stopped diff --git a/ansible/service_cocalc/cocalc.service b/ansible/service_cocalc/cocalc.service new file mode 100644 index 00000000..d7f45190 --- /dev/null +++ b/ansible/service_cocalc/cocalc.service @@ -0,0 +1,24 @@ +# /etc/systemd/system/{{ secrets.COCALC.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker run \ + --rm \ + --name=cocalc \ + --env NOSSL=true \ + -v ./cocalc:/projects \ + -l traefik.enable=true \ + -l traefik.http.routers.{{ secrets.COCALC.USERNAME }}.rule=Host(`{{ secrets.COCALC.USERNAME }}.{{ secrets.MY_DOMAIN }}`) \ + -l traefik.http.services.{{ secrets.COCALC.USERNAME }}.loadbalancer.server.port=80 \ + -l traefik.http.routers.{{ secrets.COCALC.USERNAME }}.tls=true \ + -l traefik.http.routers.{{ secrets.COCALC.USERNAME }}.tls.certresolver=production \ + -l traefik.http.routers.{{ secrets.COCALC.USERNAME }}.middlewares=authelia@docker \ + --network {{ secrets.TRAFIK_NETWORK }} \ + sagemathinc/cocalc-v2:latest +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.COCALC.USERNAME }} +User={{ secrets.COCALC.USERNAME }} +Group={{ secrets.COCALC.USERNAME }} +WorkingDirectory=/home/{{ secrets.COCALC.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_cocalc/cocalc_setup.yml b/ansible/service_cocalc/cocalc_setup.yml new file mode 100644 index 00000000..7bfc6819 --- /dev/null +++ b/ansible/service_cocalc/cocalc_setup.yml @@ -0,0 +1,76 @@ +# https://doc.cocalc.com/index.html +# https://github.com/sagemathinc/cocalc-docker +# Execute with +# ansible-playbook cocalc_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Stop, remove image, and start cocalc service + hosts: my_servers + vars: + USERNAME: '{{ secrets.COCALC.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: cocalc.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop cocalc service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove cocalc container + community.docker.docker_container: + name: cocalc + state: absent + image: sagemathinc/cocalc-v2:latest + + - name: Start cocalc service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true + + - name: Print reminder to disallow signup + ansible.builtin.debug: + msg: Remember go to the admin page and create registration token to disallow signup for everyone + + - name: Print reminder to make admin user + ansible.builtin.debug: + msg: Create the first user by signing up, then run 'docker exec -it cocalc bash' and '/cocalc/src/scripts/make-user-admin ' + diff --git a/ansible/service_cocalc/cocalc_update.yml b/ansible/service_cocalc/cocalc_update.yml new file mode 100644 index 00000000..abf825c4 --- /dev/null +++ b/ansible/service_cocalc/cocalc_update.yml @@ -0,0 +1,44 @@ +# Execute with +# ansible-playbook cocalc_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start cocalc service + hosts: my_servers + vars: + USERNAME: '{{ secrets.COCALC.USERNAME }}' + tasks: + - name: Stop cocalc service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Stop cocalc container + community.docker.docker_container: + name: cocalc + state: stopped + image: sagemathinc/cocalc-v2:latest + + - name: Remove cocalc container + community.docker.docker_container: + name: cocalc + state: absent + image: sagemathinc/cocalc-v2:latest + + - name: Create systemd file + template: + src: cocalc.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Remove cocalc image + community.docker.docker_image: + name: sagemathinc/cocalc-v2 + tag: latest + state: absent + + - name: Start cocalc service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_factorio/factorio_setup.yml b/ansible/service_factorio/factorio_setup.yml new file mode 100644 index 00000000..d92d6dac --- /dev/null +++ b/ansible/service_factorio/factorio_setup.yml @@ -0,0 +1,102 @@ +# https://hub.docker.com/r/factoriotools/factorio/ +# Execute with +# ansible-playbook factorio_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start factorio service + hosts: my_servers + vars: + USERNAME: factorio + tasks: + - name: Create group 'factorio' + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create 'factorio' user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: files/factorio.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Create directories + file: + path: '{{ item }}' + recurse: true + state: directory + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + loop: + - /home/{{ USERNAME }}/data/config + - /home/{{ USERNAME }}/data/mods + + - name: Copy admin list file + template: + src: files/server-adminlist.json + dest: /home/{{ USERNAME }}/data/config/server-adminlist.json + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Copy server settings file + template: + src: files/server-settings.json + dest: /home/{{ USERNAME }}/data/config/server-settings.json + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Copy mod list settings + template: + src: files/mod-list.json + dest: /home/{{ USERNAME }}/data/mods/mod-list.json + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop factorio service + ansible.builtin.systemd_service: + name: factorio + state: stopped + enabled: false + + - name: Remove factorio container + community.docker.docker_container: + name: factorio + state: absent + image: factoriotools/factorio + + - name: Start factorio service again + ansible.builtin.systemd_service: + name: factorio + state: started + daemon_reload: true + enabled: true + + - name: Print reminder + ansible.builtin.debug: + msg: Remember to upload an existing savegame manually, aswell as putting mods as .zip files (or mod-list.json) in the '/home/{{ USERNAME }}/data/mods' folder diff --git a/ansible/service_factorio/factorio_stop.yml b/ansible/service_factorio/factorio_stop.yml new file mode 100644 index 00000000..37408e1f --- /dev/null +++ b/ansible/service_factorio/factorio_stop.yml @@ -0,0 +1,37 @@ +# Stop factorio service, remove container, remove image +# Execute with +# ansible-playbook factorio_stop.yml -i ../hosts +- name: Stop factorio + hosts: my_servers + vars: + USERNAME: factorio + tasks: + - name: Stop factorio server + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Stop factorio container + community.docker.docker_container: + name: '{{ USERNAME }}' + state: stopped + image: factoriotools/factorio + + - name: Remove factorio container + community.docker.docker_container: + name: '{{ USERNAME }}' + state: absent + image: factoriotools/factorio + + - name: Remove factorio image + community.docker.docker_image: + name: factoriotools/factorio + tag: latest + state: absent + + # - name: Remove the user 'factorio' + # ansible.builtin.user: + # name: {{ USERNAME }} + # state: absent + # remove: true diff --git a/ansible/service_factorio/factorio_update.yml b/ansible/service_factorio/factorio_update.yml new file mode 100644 index 00000000..eda672cd --- /dev/null +++ b/ansible/service_factorio/factorio_update.yml @@ -0,0 +1,38 @@ +# Stop factorio service, remove container, remove image, start service +# Execute with +# ansible-playbook factorio_update.yml -i ../hosts +- name: Update factorio + hosts: my_servers + vars: + USERNAME: factorio + tasks: + - name: Stop factorio service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Stop factorio container + community.docker.docker_container: + name: '{{ USERNAME }}' + state: stopped + image: factoriotools/factorio + + - name: Remove factorio container + community.docker.docker_container: + name: '{{ USERNAME }}' + state: absent + image: factoriotools/factorio + + - name: Remove factorio image + community.docker.docker_image: + name: factoriotools/factorio + tag: latest + state: absent + + - name: Start factorio service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + enabled: true + diff --git a/ansible/service_factorio/files/factorio.service b/ansible/service_factorio/files/factorio.service new file mode 100644 index 00000000..96108e56 --- /dev/null +++ b/ansible/service_factorio/files/factorio.service @@ -0,0 +1,26 @@ +# /etc/systemd/system/{{ USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker run \ + --rm \ + --name factorio \ + -e PUID={{ USER_ID.stdout }} \ + -e PGID={{ GROUP_ID.stdout }} \ + -p 34197:34197/udp \ + -p 27015:27015/tcp \ + -v ./data:/factorio \ + -e GENERATE_NEW_SAVE=true \ + -e SAVE_NAME={{ secrets.FACTORIO.SAVE_NAME }} \ + -e UPDATE_MODS_ON_START=true \ + -l traefik.enable=true \ + -l traefik.http.routers.{{ USERNAME }}.rule=Host(`{{ USERNAME }}.{{ secrets.MY_DOMAIN }}`) \ + -l traefik.http.services.{{ USERNAME }}.loadbalancer.server.port=27015 \ + -l traefik.http.routers.{{ USERNAME }}.tls=true \ + -l traefik.http.routers.{{ USERNAME }}.tls.certresolver=production \ + --network {{ secrets.TRAFIK_NETWORK }} \ + factoriotools/factorio +Restart=always +RestartSec=20 +SyslogIdentifier={{ USERNAME }} +User={{ USERNAME }} +Group={{ USERNAME }} +WorkingDirectory=/home/{{ USERNAME }} diff --git a/ansible/service_factorio/files/mod-list.json b/ansible/service_factorio/files/mod-list.json new file mode 100644 index 00000000..dc90f6c4 --- /dev/null +++ b/ansible/service_factorio/files/mod-list.json @@ -0,0 +1,36 @@ +{ + "mods": + [ + + { + "name": "base", + "enabled": true + }, + + { + "name": "auto-research", + "enabled": true + }, + + { + "name": "AutoDeconstruct", + "enabled": true + }, + + { + "name": "Bottleneck", + "enabled": true + }, + + { + "name": "far-reach", + "enabled": true + }, + + { + "name": "Squeak Through", + "enabled": true + } + ] + } + \ No newline at end of file diff --git a/ansible/service_factorio/files/server-adminlist.json b/ansible/service_factorio/files/server-adminlist.json new file mode 100644 index 00000000..9fa07f87 --- /dev/null +++ b/ansible/service_factorio/files/server-adminlist.json @@ -0,0 +1,3 @@ +[ +"{{ secrets.FACTORIO.NICKNAME }}" +] diff --git a/ansible/service_factorio/files/server-settings.json b/ansible/service_factorio/files/server-settings.json new file mode 100644 index 00000000..167c7774 --- /dev/null +++ b/ansible/service_factorio/files/server-settings.json @@ -0,0 +1,72 @@ +{ + "name": "Name of the game as it will appear in the game listing", + "description": "Description of the game that will appear in the listing", + "tags": ["game", "tags"], + + "_comment_max_players": "Maximum number of players allowed, admins can join even a full server. 0 means unlimited.", + "max_players": 0, + + "_comment_visibility": ["public: Game will be published on the official Factorio matching server", + "lan: Game will be broadcast on LAN"], + "visibility": + { + "public": true, + "lan": true + }, + + "_comment_credentials": "Your factorio.com login credentials. Required for games with visibility public", + "username": "{{ secrets.FACTORIO.NICKNAME }}", + "password": "", + + "_comment_token": "Authentication token. May be used instead of 'password' above.", + "token": "{{ secrets.FACTORIO.TOKEN }}", + + "game_password": "{{ secrets.FACTORIO.GAME_PASSWORD }}", + + "_comment_require_user_verification": "When set to true, the server will only allow clients that have a valid Factorio.com account", + "require_user_verification": true, + + "_comment_max_upload_in_kilobytes_per_second" : "optional, default value is 0. 0 means unlimited.", + "max_upload_in_kilobytes_per_second": 0, + + "_comment_max_upload_slots" : "optional, default value is 5. 0 means unlimited.", + "max_upload_slots": 5, + + "_comment_minimum_latency_in_ticks": "optional one tick is 16ms in default speed, default value is 0. 0 means no minimum.", + "minimum_latency_in_ticks": 0, + + "_comment_max_heartbeats_per_second": "Network tick rate. Maximum rate game updates packets are sent at before bundling them together. Minimum value is 6, maximum value is 240.", + "max_heartbeats_per_second": 60, + + "_comment_ignore_player_limit_for_returning_players": "Players that played on this map already can join even when the max player limit was reached.", + "ignore_player_limit_for_returning_players": false, + + "_comment_allow_commands": "possible values are, true, false and admins-only", + "allow_commands": "admins-only", + + "_comment_autosave_interval": "Autosave interval in minutes", + "autosave_interval": 10, + + "_comment_autosave_slots": "server autosave slots, it is cycled through when the server autosaves.", + "autosave_slots": 20, + + "_comment_afk_autokick_interval": "How many minutes until someone is kicked when doing nothing, 0 for never.", + "afk_autokick_interval": 0, + + "_comment_auto_pause": "Whether should the server be paused when no players are present.", + "auto_pause": true, + + "only_admins_can_pause_the_game": true, + + "_comment_autosave_only_on_server": "Whether autosaves should be saved only on server or also on all connected clients. Default is true.", + "autosave_only_on_server": true, + + "_comment_non_blocking_saving": "Highly experimental feature, enable only at your own risk of losing your saves. On UNIX systems, server will fork itself to create an autosave. Autosaving on connected Windows clients will be disabled regardless of autosave_only_on_server option.", + "non_blocking_saving": false, + + "_comment_segment_sizes": "Long network messages are split into segments that are sent over multiple ticks. Their size depends on the number of peers currently connected. Increasing the segment size will increase upload bandwidth requirement for the server and download bandwidth requirement for clients. This setting only affects server outbound messages. Changing these settings can have a negative impact on connection stability for some clients.", + "minimum_segment_size": 25, + "minimum_segment_size_peer_count": 20, + "maximum_segment_size": 100, + "maximum_segment_size_peer_count": 10 +} diff --git a/ansible/service_jellyfin/jellyfin.service b/ansible/service_jellyfin/jellyfin.service new file mode 100644 index 00000000..ad2af891 --- /dev/null +++ b/ansible/service_jellyfin/jellyfin.service @@ -0,0 +1,26 @@ +# /etc/systemd/system/{{ secrets.JELLYFIN.USERNAME }}.service +#-e PUID={{ USER_ID.stdout }} \ +#-e PGID={{ GROUP_ID.stdout }} \ +[Service] +ExecStart=/usr/bin/docker run \ + --rm \ + --name jellyfin \ + --volume ./config:/config \ + --volume {{ secrets.JELLYFIN.MEDIA_FOLDER }}:/media \ + -l traefik.enable=true \ + -l traefik.http.routers.{{ secrets.JELLYFIN.USERNAME }}.rule=Host(`{{ secrets.JELLYFIN.USERNAME }}.{{ secrets.MY_DOMAIN }}`) \ + -l traefik.http.services.{{ secrets.JELLYFIN.USERNAME }}.loadbalancer.server.port=8096 \ + -l traefik.http.routers.{{ secrets.JELLYFIN.USERNAME }}.tls=true \ + -l traefik.http.routers.{{ secrets.JELLYFIN.USERNAME }}.tls.certresolver=production \ + -l traefik.http.routers.{{ secrets.JELLYFIN.USERNAME }}.middlewares=authelia@docker \ + --network {{ secrets.TRAFIK_NETWORK }} \ + lscr.io/linuxserver/jellyfin:latest +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.JELLYFIN.USERNAME }} +User={{ secrets.JELLYFIN.USERNAME }} +Group={{ secrets.JELLYFIN.USERNAME }} +WorkingDirectory=/home/{{ secrets.JELLYFIN.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_jellyfin/jellyfin_setup.yml b/ansible/service_jellyfin/jellyfin_setup.yml new file mode 100644 index 00000000..90a33082 --- /dev/null +++ b/ansible/service_jellyfin/jellyfin_setup.yml @@ -0,0 +1,72 @@ +# https://jellyfin.org/ +# https://jellyfin.org/docs/general/installation/container/ +# https://play.google.com/store/apps/details?id=org.jellyfin.mobile&hl=de +# Execute with +# ansible-playbook jellyfin_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start jellyfin service + hosts: my_servers + vars: + USERNAME: '{{ secrets.JELLYFIN.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: jellyfin.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop jellyfin service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove jellyfin container + community.docker.docker_container: + name: jellyfin + state: absent + image: lscr.io/linuxserver/jellyfin:latest + + - name: Start jellyfin service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true + + - name: Print reminder + ansible.builtin.debug: + msg: Remember to create libraries and users on first time setup diff --git a/ansible/service_jellyfin/jellyfin_update.yml b/ansible/service_jellyfin/jellyfin_update.yml new file mode 100644 index 00000000..58c116ed --- /dev/null +++ b/ansible/service_jellyfin/jellyfin_update.yml @@ -0,0 +1,36 @@ +# Execute with +# ansible-playbook jellyfin_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Stop, remove image, and start jellyfin service + hosts: my_servers + vars: + USERNAME: '{{ secrets.JELLYFIN.USERNAME }}' + tasks: + - name: Stop jellyfin service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Stop jellyfin container + community.docker.docker_container: + name: jellyfin + state: stopped + image: lscr.io/linuxserver/jellyfin:latest + + - name: Remove jellyfin container + community.docker.docker_container: + name: jellyfin + state: absent + image: lscr.io/linuxserver/jellyfin:latest + + - name: Remove jellyfin image + community.docker.docker_image: + name: lscr.io/linuxserver/jellyfin + tag: latest + state: absent + + - name: Start jellyfin service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + enabled: true diff --git a/ansible/service_navidrome/navidrome.service b/ansible/service_navidrome/navidrome.service new file mode 100644 index 00000000..ddb0c9fe --- /dev/null +++ b/ansible/service_navidrome/navidrome.service @@ -0,0 +1,26 @@ +# /etc/systemd/system/{{ secrets.NAVIDROME.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker run \ + --rm \ + --name=navidrome \ + -e PUID={{ USER_ID.stdout }} \ + -e PGID={{ GROUP_ID.stdout }} \ + -v {{ secrets.NAVIDROME.MEDIA_FOLDER }}:/music:ro \ + -v ./data:/data \ + -l traefik.enable=true \ + -l traefik.http.routers.{{ secrets.NAVIDROME.USERNAME }}.rule=Host(`{{ secrets.NAVIDROME.USERNAME }}.{{ secrets.MY_DOMAIN }}`) \ + -l traefik.http.services.{{ secrets.NAVIDROME.USERNAME }}.loadbalancer.server.port=4533 \ + -l traefik.http.routers.{{ secrets.NAVIDROME.USERNAME }}.tls=true \ + -l traefik.http.routers.{{ secrets.NAVIDROME.USERNAME }}.tls.certresolver=production \ + -l traefik.http.routers.{{ secrets.NAVIDROME.USERNAME }}.middlewares=authelia@docker \ + --network {{ secrets.TRAFIK_NETWORK }} \ + deluan/navidrome:latest +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.NAVIDROME.USERNAME }} +User={{ secrets.NAVIDROME.USERNAME }} +Group={{ secrets.NAVIDROME.USERNAME }} +WorkingDirectory=/home/{{ secrets.NAVIDROME.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_navidrome/navidrome_setup.yml b/ansible/service_navidrome/navidrome_setup.yml new file mode 100644 index 00000000..d70af566 --- /dev/null +++ b/ansible/service_navidrome/navidrome_setup.yml @@ -0,0 +1,68 @@ +# https://github.com/navidrome/navidrome +# https://www.navidrome.org/docs/installation/docker/ +# https://www.navidrome.org/docs/overview/#apps +# Execute with +# ansible-playbook navidrome_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start navidrome service + hosts: my_servers + vars: + USERNAME: '{{ secrets.NAVIDROME.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: navidrome.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop navidrome service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove navidrome container + community.docker.docker_container: + name: navidrome + state: absent + image: deluan/navidrome:latest + + - name: Start navidrome service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_navidrome/navidrome_update.yml b/ansible/service_navidrome/navidrome_update.yml new file mode 100644 index 00000000..9a9a9ce7 --- /dev/null +++ b/ansible/service_navidrome/navidrome_update.yml @@ -0,0 +1,38 @@ +# Execute with +# ansible-playbook navidrome_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Stop, remove image, and start navidrome service + hosts: my_servers + vars: + USERNAME: '{{ secrets.NAVIDROME.USERNAME }}' + tasks: + - name: Stop navidrome service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Stop navidrome container + community.docker.docker_container: + name: navidrome + state: stopped + image: deluan/navidrome:latest + + - name: Remove navidrome container + community.docker.docker_container: + name: navidrome + state: absent + image: deluan/navidrome:latest + + - name: Remove navidrome image + community.docker.docker_image: + name: deluan/navidrome + tag: latest + state: absent + + - name: Start navidrome service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true + diff --git a/ansible/service_owncloud/docker-compose.yml b/ansible/service_owncloud/docker-compose.yml new file mode 100644 index 00000000..60b8b1cb --- /dev/null +++ b/ansible/service_owncloud/docker-compose.yml @@ -0,0 +1,66 @@ +version: '3' + +networks: + '{{secrets.TRAFIK_NETWORK}}': + external: true + +services: + owncloud: + image: owncloud/server:latest + container_name: owncloud_server + hostname: owncloud_server + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + restart: always + labels: + - traefik.enable=true + - traefik.http.routers.{{ secrets.OWNCLOUD.USERNAME }}.rule=Host(`{{ secrets.OWNCLOUD.USERNAME }}.{{ secrets.MY_DOMAIN }}`) + - traefik.http.services.{{ secrets.OWNCLOUD.USERNAME }}.loadbalancer.server.port=8080 + - traefik.http.routers.{{ secrets.OWNCLOUD.USERNAME }}.tls=true + - traefik.http.routers.{{ secrets.OWNCLOUD.USERNAME }}.tls.certresolver=production + - traefik.http.routers.{{ secrets.OWNCLOUD.USERNAME }}.middlewares=authelia@docker + depends_on: + - mariadb + environment: + - PUID="{{ USER_ID.stdout }}" + - PGID="{{ GROUP_ID.stdout }}" + - OWNCLOUD_DOMAIN={{ secrets.OWNCLOUD.USERNAME }}.{{ secrets.MY_DOMAIN }} + - OWNCLOUD_TRUSTED_DOMAINS="localhost,{{ secrets.OWNCLOUD.USERNAME }}.{{ secrets.MY_DOMAIN }} + - OWNCLOUD_DB_TYPE=mysql + - OWNCLOUD_DB_NAME={{ secrets.OWNCLOUD.MYSQL_DATABASE }} + - OWNCLOUD_DB_USERNAME={{ secrets.OWNCLOUD.MYSQL_USER }} + - OWNCLOUD_DB_PASSWORD={{ secrets.OWNCLOUD.MYSQL_PASSWORD }} + - OWNCLOUD_DB_HOST=owncloud_mariadb + - OWNCLOUD_ADMIN_USERNAME={{ secrets.OWNCLOUD.ADMIN_USERNAME }} + - OWNCLOUD_ADMIN_PASSWORD={{ secrets.OWNCLOUD.ADMIN_PASSWORD }} + - OWNCLOUD_MYSQL_UTF8MB4=true + - OWNCLOUD_REDIS_ENABLED=false + - OWNCLOUD_REDIS_HOST=owncloud_redis + healthcheck: + test: [CMD, /usr/bin/healthcheck] + interval: 30s + timeout: 10s + retries: 5 + volumes: + - ./data:/mnt/data + + mariadb: + image: mariadb:10.11 # minimum required ownCloud version is 10.9 + container_name: owncloud_mariadb + hostname: owncloud_mariadb + restart: always + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + environment: + - MYSQL_ROOT_PASSWORD={{ secrets.OWNCLOUD.MYSQL_ROOT_PASSWORD }} + - MYSQL_USER={{ secrets.OWNCLOUD.MYSQL_USER }} + - MYSQL_PASSWORD={{ secrets.OWNCLOUD.MYSQL_PASSWORD }} + - MYSQL_DATABASE={{ secrets.OWNCLOUD.MYSQL_DATABASE }} + command: [--max-allowed-packet=128M, --innodb-log-file-size=64M] + healthcheck: + test: [CMD, mysqladmin, ping, -u, root, '--password={{ secrets.OWNCLOUD.MYSQL_PASSWORD }}'] + interval: 10s + timeout: 5s + retries: 5 + volumes: + - ./mysql:/var/lib/mysql diff --git a/ansible/service_owncloud/owncloud.service b/ansible/service_owncloud/owncloud.service new file mode 100644 index 00000000..5535c2cd --- /dev/null +++ b/ansible/service_owncloud/owncloud.service @@ -0,0 +1,12 @@ +# /etc/systemd/system/{{ secrets.OWNCLOUD.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker compose up +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.OWNCLOUD.USERNAME }} +User={{ secrets.OWNCLOUD.USERNAME }} +Group={{ secrets.OWNCLOUD.USERNAME }} +WorkingDirectory=/home/{{ secrets.OWNCLOUD.USERNAME }} + +[Install] +WantedBy=multi-user.target \ No newline at end of file diff --git a/ansible/service_owncloud/owncloud_setup.yml b/ansible/service_owncloud/owncloud_setup.yml new file mode 100644 index 00000000..dcfd2734 --- /dev/null +++ b/ansible/service_owncloud/owncloud_setup.yml @@ -0,0 +1,100 @@ +# https://hub.docker.com/r/owncloud/server +# https://owncloud.com/ +# Execute with +# ansible-playbook owncloud_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start owncloud service + hosts: my_servers + vars: + USERNAME: '{{ secrets.OWNCLOUD.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: owncloud.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Create docker-compose.yml file + template: + src: docker-compose.yml + dest: /home/{{ USERNAME }}/docker-compose.yml + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop owncloud service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove cloud-server container + community.docker.docker_container: + name: owncloud_server + state: absent + image: owncloud/server:latest + + - name: Remove cloud-db container + community.docker.docker_container: + name: owncloud_mariadb + state: absent + image: mariadb:10.11 + + - name: Remove cloud-redis container + community.docker.docker_container: + name: owncloud_redis + state: absent + image: redis:6 + + - name: Start owncloud service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true + + - name: Clean up trashbin every 30 minutes + ansible.builtin.cron: + name: Clean up trash + job: docker exec owncloud_server occ trashbin:cleanup + special_time: daily + user: '{{ USERNAME }}' + + - name: Scan once per day + ansible.builtin.cron: + name: Execute scan + job: docker exec owncloud_server occ files:scan --all + special_time: daily + user: '{{ USERNAME }}' diff --git a/ansible/service_owncloud/owncloud_update.yml b/ansible/service_owncloud/owncloud_update.yml new file mode 100644 index 00000000..e517f232 --- /dev/null +++ b/ansible/service_owncloud/owncloud_update.yml @@ -0,0 +1,43 @@ +# Execute with +# ansible-playbook owncloud_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Stop, remove image, and start owncloud service + hosts: my_servers + vars: + USERNAME: '{{ secrets.OWNCLOUD.USERNAME }}' + tasks: + - name: Stop owncloud service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove cloud-server container + community.docker.docker_container: + name: owncloud_server + state: absent + image: owncloud/server:latest + + - name: Remove cloud-db container + community.docker.docker_container: + name: owncloud_mariadb + state: absent + image: mariadb:10.6 + + - name: Remove cloud-redis container + community.docker.docker_container: + name: owncloud_redis + state: absent + image: redis:6 + + - name: Remove owncloud image + community.docker.docker_image: + name: owncloud/server + tag: latest + state: absent + + - name: Start owncloud service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_paperless/docker-compose.env b/ansible/service_paperless/docker-compose.env new file mode 100644 index 00000000..1051eb63 --- /dev/null +++ b/ansible/service_paperless/docker-compose.env @@ -0,0 +1,42 @@ +# The UID and GID of the user used to run paperless in the container. Set this +# to your UID and GID on the host so that you have write access to the +# consumption directory. +#USERMAP_UID=1000 +#USERMAP_GID=1000 + +# Additional languages to install for text recognition, separated by a +# whitespace. Note that this is +# different from PAPERLESS_OCR_LANGUAGE (default=eng), which defines the +# language used for OCR. +# The container installs English, German, Italian, Spanish and French by +# default. +# See https://packages.debian.org/search?keywords=tesseract-ocr-&searchon=names&suite=buster +# for available languages. +#PAPERLESS_OCR_LANGUAGES=tur ces + +############################################################################### +# Paperless-specific settings # +############################################################################### + +# All settings defined in the paperless.conf.example can be used here. The +# Docker setup does not use the configuration file. +# A few commonly adjusted settings are provided below. + +# This is required if you will be exposing Paperless-ngx on a public domain +# (if doing so please consider security measures such as reverse proxy) +#PAPERLESS_URL=https://paperless.example.com + +# Adjust this key if you plan to make paperless available publicly. It should +# be a very long sequence of random characters. You don't need to remember it. +#PAPERLESS_SECRET_KEY=change-me + +# Use this variable to set a timezone for the Paperless Docker containers. If not specified, defaults to UTC. +#PAPERLESS_TIME_ZONE=America/Los_Angeles + +# The default language to use for OCR. Set this to the language most of your +# documents are written in. +#PAPERLESS_OCR_LANGUAGE=eng + +# Set if accessing paperless via a domain subpath e.g. https://domain.com/PATHPREFIX and using a reverse-proxy like traefik or nginx +#PAPERLESS_FORCE_SCRIPT_NAME=/PATHPREFIX +#PAPERLESS_STATIC_URL=/PATHPREFIX/static/ # trailing slash required \ No newline at end of file diff --git a/ansible/service_paperless/docker-compose.yml b/ansible/service_paperless/docker-compose.yml new file mode 100644 index 00000000..c1207882 --- /dev/null +++ b/ansible/service_paperless/docker-compose.yml @@ -0,0 +1,51 @@ +version: '3.4' + +networks: + '{{secrets.TRAFIK_NETWORK}}': + external: true + +services: + broker: + container_name: paperless-broker + hostname: paperless-broker + image: docker.io/library/redis:7 + restart: unless-stopped + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + volumes: + - redisdata:/data + + webserver: + container_name: paperless-webserver + image: ghcr.io/paperless-ngx/paperless-ngx:latest + restart: unless-stopped + depends_on: + - broker + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + labels: + - traefik.enable=true + - traefik.http.routers.{{ secrets.PAPERLESS.USERNAME }}.rule=Host(`{{ secrets.PAPERLESS.USERNAME }}.{{ secrets.MY_DOMAIN }}`) + - traefik.http.services.{{ secrets.PAPERLESS.USERNAME }}.loadbalancer.server.port=8000 + - traefik.http.routers.{{ secrets.PAPERLESS.USERNAME }}.tls=true + - traefik.http.routers.{{ secrets.PAPERLESS.USERNAME }}.tls.certresolver=production + - traefik.http.routers.{{ secrets.PAPERLESS.USERNAME }}.middlewares=authelia@docker + healthcheck: + test: [CMD, curl, -fs, -S, --max-time, '2', 'http://localhost:8000'] + interval: 30s + timeout: 10s + retries: 5 + volumes: + - ./data:/usr/src/paperless/data + - ./media:/usr/src/paperless/media + - ./export:/usr/src/paperless/export + - ./consume:/usr/src/paperless/consume + env_file: docker-compose.env + environment: + PAPERLESS_REDIS: redis://paperless-broker:6379 + PAPERLESS_URL: https://{{ secrets.PAPERLESS.USERNAME }}.{{ secrets.MY_DOMAIN }} + PAPERLESS_CSRF_TRUSTED_ORIGINS: https://{{ secrets.PAPERLESS.USERNAME }}.{{ secrets.MY_DOMAIN }} + PAPERLESS_CORS_ALLOWED_HOSTS: https://{{ secrets.PAPERLESS.USERNAME }}.{{ secrets.MY_DOMAIN }} + +volumes: + redisdata: diff --git a/ansible/service_paperless/paperless.service b/ansible/service_paperless/paperless.service new file mode 100644 index 00000000..ba575845 --- /dev/null +++ b/ansible/service_paperless/paperless.service @@ -0,0 +1,12 @@ +# /etc/systemd/system/{{ secrets.PAPERLESS.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker compose up +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.PAPERLESS.USERNAME }} +User={{ secrets.PAPERLESS.USERNAME }} +Group={{ secrets.PAPERLESS.USERNAME }} +WorkingDirectory=/home/{{ secrets.PAPERLESS.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_paperless/paperless_setup.yml b/ansible/service_paperless/paperless_setup.yml new file mode 100644 index 00000000..5dbf55e9 --- /dev/null +++ b/ansible/service_paperless/paperless_setup.yml @@ -0,0 +1,86 @@ +# https://github.com/paperless-ngx/paperless-ngx +# Execute with +# ansible-playbook paperless_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start paperless service + hosts: my_servers + vars: + USERNAME: '{{ secrets.PAPERLESS.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: paperless.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Copy docker-compose.env file + template: + src: docker-compose.env + dest: /home/{{ USERNAME }}/docker-compose.env + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop paperless service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove paperless container + community.docker.docker_container: + name: paperless-webserver + state: absent + image: ghcr.io/paperless-ngx/paperless-ngx:latest + + - name: Remove paperless-broker container + community.docker.docker_container: + name: paperless-broker + state: absent + image: docker.io/library/redis:7 + + - name: Copy docker-compose.yml file + template: + src: docker-compose.yml + dest: /home/{{ USERNAME }}/docker-compose.yml + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Start paperless service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_paperless/paperless_update.yml b/ansible/service_paperless/paperless_update.yml new file mode 100644 index 00000000..b22b2c45 --- /dev/null +++ b/ansible/service_paperless/paperless_update.yml @@ -0,0 +1,49 @@ +# Execute with +# ansible-playbook paperless_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Stop, remove image, and start paperless service + hosts: my_servers + vars: + USERNAME: '{{ secrets.PAPERLESS.USERNAME }}' + tasks: + - name: Stop paperless service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Stop paperless container + community.docker.docker_container: + name: paperless-webserver + state: stopped + image: ghcr.io/paperless-ngx/paperless-ngx:latest + + - name: Remove paperless container + community.docker.docker_container: + name: paperless-webserver + state: absent + image: ghcr.io/paperless-ngx/paperless-ngx:latest + + - name: Stop paperless-broker container + community.docker.docker_container: + name: paperless-broker + state: stopped + image: docker.io/library/redis:7 + + - name: Remove paperless-broker container + community.docker.docker_container: + name: paperless-broker + state: absent + image: docker.io/library/redis:7 + + - name: Remove paperless image + community.docker.docker_image: + name: ghcr.io/paperless-ngx/paperless-ngx + tag: latest + state: absent + + - name: Start paperless service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_postgres/backup_postgres.sh b/ansible/service_postgres/backup_postgres.sh new file mode 100644 index 00000000..834d5bcb --- /dev/null +++ b/ansible/service_postgres/backup_postgres.sh @@ -0,0 +1,33 @@ +# Exit the script if any command fails +set -e + +BACKUP_FOLDER="{{ secrets.POSTGRES.BACKUP_MOUNT_PATH }}" +BACKUP_PATH="${BACKUP_FOLDER}/$(date +'%Y-%m-%d_%H:%M:%S').tar" + +# Create folder if it doesn't exist, and make syncthing owner +mkdir -p $BACKUP_FOLDER +chown syncthing:syncthing $BACKUP_FOLDER +# Only owner can enter the folder +chmod 700 $BACKUP_FOLDER + +# https://medium.com/@burakkocakeu/get-pg-dump-from-a-docker-container-and-pg-restore-into-another-in-5-steps-74ca5bf0589c +CONTAINER_NAME="postgres_postgres" +docker exec $CONTAINER_NAME pg_dump -U postgres -F t postgres > $BACKUP_PATH +# Make owner syncthing so that it can sync properly +chown syncthing:syncthing $BACKUP_PATH +# Only owner can read/write, group can read +chmod 640 $BACKUP_PATH + +# Remove backups with file size < 1kb (= failed backups) +cd $BACKUP_FOLDER +find . -maxdepth 1 -type f -name '*.tar' -size -1024c -delete + +# Keep up to 3 backups newest, remove other +MAX_BACKUPS_TO_KEEP=3 +FILES_TO_REMOVE=$(ls -t *.tar | tail -n +$(expr $MAX_BACKUPS_TO_KEEP + 1)) +if [ -n "$FILES_TO_REMOVE" ]; then + for file in $FILES_TO_REMOVE; do + echo "Removing older .tar file: $file" + rm $file + done +fi diff --git a/ansible/service_postgres/docker-compose.yml b/ansible/service_postgres/docker-compose.yml new file mode 100644 index 00000000..b8a82866 --- /dev/null +++ b/ansible/service_postgres/docker-compose.yml @@ -0,0 +1,66 @@ +version: '3.8' + +networks: + '{{secrets.TRAFIK_NETWORK}}': + external: true + +services: + postgres: + container_name: postgres_postgres + # In pgadmin and nocodb we can now use connection string where hostname is 'postgres_postgres' + # postgres://user:password@host/dbname + # nocodb: + # postgresql://{ POSTGRES_USER }:{ POSTGRES_PASSWORD }@postgres_postgres:5432/{ POSTGRES_DATABASE } + hostname: postgres_postgres + image: postgres:15-alpine + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + restart: unless-stopped + ports: + - 5432:5432 + environment: + POSTGRES_USER: '{{ secrets.POSTGRES.POSTGRES_USER }}' + POSTGRES_PASSWORD: '{{ secrets.POSTGRES.POSTGRES_PASSWORD }}' + volumes: + # Do I need to mount a backup path? + - ./postgres_data:/var/lib/postgresql/data + + pgadmin: + container_name: postgres_pgadmin + image: dpage/pgadmin4:7 + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + labels: + - traefik.enable=true + - traefik.http.routers.{{ secrets.POSTGRES.PGADMIN_USERNAME }}.rule=Host(`{{ secrets.POSTGRES.PGADMIN_USERNAME }}.{{ secrets.MY_DOMAIN }}`) + - traefik.http.services.{{ secrets.POSTGRES.PGADMIN_USERNAME }}.loadbalancer.server.port=80 + - traefik.http.routers.{{ secrets.POSTGRES.PGADMIN_USERNAME }}.tls=true + - traefik.http.routers.{{ secrets.POSTGRES.PGADMIN_USERNAME }}.tls.certresolver=production + - traefik.http.routers.{{ secrets.POSTGRES.PGADMIN_USERNAME }}.middlewares=authelia@docker + restart: unless-stopped + environment: + PGADMIN_DEFAULT_EMAIL: '{{ secrets.POSTGRES.PGADMIN_USER }}' + PGADMIN_DEFAULT_PASSWORD: '{{ secrets.POSTGRES.PGADMIN_PASSWORD }}' + volumes: + # pgadmindata folder needs to be owned by user id '5050' + - ./pgadmindata:/var/lib/pgadmin + + nocodb: + # TODO Add default connection + container_name: postgres_nocodb + image: nocodb/nocodb:latest + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + labels: + - traefik.enable=true + - traefik.http.routers.{{ secrets.POSTGRES.NOCODB_USERNAME }}.rule=Host(`{{ secrets.POSTGRES.NOCODB_USERNAME }}.{{ secrets.MY_DOMAIN }}`) + - traefik.http.services.{{ secrets.POSTGRES.NOCODB_USERNAME }}.loadbalancer.server.port=8080 + - traefik.http.routers.{{ secrets.POSTGRES.NOCODB_USERNAME }}.tls=true + - traefik.http.routers.{{ secrets.POSTGRES.NOCODB_USERNAME }}.tls.certresolver=production + - traefik.http.routers.{{ secrets.POSTGRES.NOCODB_USERNAME }}.middlewares=authelia@docker + restart: unless-stopped + environment: + PUID: '{{ USER_ID.stdout }}' + PGID: '{{ GROUP_ID.stdout }}' + volumes: + - ./nocodb:/usr/app/data/ diff --git a/ansible/service_postgres/postgres.service b/ansible/service_postgres/postgres.service new file mode 100644 index 00000000..314a2948 --- /dev/null +++ b/ansible/service_postgres/postgres.service @@ -0,0 +1,12 @@ +# /etc/systemd/system/{{ secrets.POSTGRES.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker compose up +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.POSTGRES.USERNAME }} +User={{ secrets.POSTGRES.USERNAME }} +Group={{ secrets.POSTGRES.USERNAME }} +WorkingDirectory=/home/{{ secrets.POSTGRES.USERNAME }} + +[Install] +WantedBy=multi-user.target \ No newline at end of file diff --git a/ansible/service_postgres/postgres_setup.yml b/ansible/service_postgres/postgres_setup.yml new file mode 100644 index 00000000..8b21557f --- /dev/null +++ b/ansible/service_postgres/postgres_setup.yml @@ -0,0 +1,128 @@ +# https://github.com/nocodb/nocodb +# https://www.nocodb.com/ +# Execute with +# ansible-playbook postgres_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start postgres service + hosts: my_servers + vars: + USERNAME: '{{ secrets.POSTGRES.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Get syncthing user id + ansible.builtin.shell: + cmd: id syncthing -u + register: SYNCTHING_USER_ID + + - name: Get syncthing group id + ansible.builtin.shell: + cmd: id syncthing -g + register: SYNCTHING_GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: postgres.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Create directories + file: + path: /home/{{ USERNAME }}/pgadmindata + recurse: true + state: directory + # pgadmin user + owner: '5050' + group: '5050' + + - name: Copy docker-compose.yml file + template: + src: docker-compose.yml + dest: /home/{{ USERNAME }}/docker-compose.yml + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop postgres service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove postgres_postgres container + community.docker.docker_container: + name: postgres_postgres + state: absent + image: postgres:15-alpine + + - name: Remove postgres_pgadmin container + community.docker.docker_container: + name: postgres_pgadmin + state: absent + image: dpage/pgadmin4:7 + + - name: Remove postgres_nocodb container + community.docker.docker_container: + name: postgres_nocodb + state: absent + image: nocodb/nocodb:latest + + - name: Start postgres service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true + + # BACKUP + - name: Create directories + file: + path: '{{ secrets.POSTGRES.BACKUP_MOUNT_PATH }}' + recurse: true + state: directory + owner: '{{ SYNCTHING_USER_ID.stdout }}' + group: '{{ SYNCTHING_GROUP_ID.stdout }}' + + - name: Copy backup script file + template: + src: backup_postgres.sh + dest: '{{ secrets.POSTGRES.BACKUP_MOUNT_PATH }}/backup_postgres.sh' + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Backup once per day as user 'root' + ansible.builtin.cron: + name: Run postgres backup + job: /bin/sh '{{ secrets.POSTGRES.BACKUP_MOUNT_PATH }}/backup_postgres.sh' >> '{{ secrets.POSTGRES.BACKUP_MOUNT_PATH }}/backup_postgres.log' + special_time: daily + user: root diff --git a/ansible/service_rdiff_backup/backup_script.sh b/ansible/service_rdiff_backup/backup_script.sh new file mode 100644 index 00000000..bf918a4f --- /dev/null +++ b/ansible/service_rdiff_backup/backup_script.sh @@ -0,0 +1,37 @@ +# Exit the script if any command fails +set -e + +backup_function() { + # Check if the correct number of parameters is provided + if [ "$#" -ne 3 ]; then + echo "Usage: backup_function " + exit 1 + fi + + SOURCE_DIRECTORY="$1" + TARGET_DIRECTORY="$2" + MAX_BACKUPS="$3" + + mkdir -p "$TARGET_DIRECTORY" + + # Perform rdiff-backup + rdiff-backup backup \ + --exclude "**/data/transcodes/*" \ + --exclude "**/data/metadata/*" \ + --exclude "**/cache/*" \ + --exclude "**/log/*" \ + "$SOURCE_DIRECTORY" \ + "$TARGET_DIRECTORY" + + # Change ownership to syncthing:syncthing + chown -R syncthing:syncthing "$TARGET_DIRECTORY" + # Only allow user syncthing to access directory + chmod 700 "$TARGET_DIRECTORY" + + # Keep at most N backups (replace N with the provided amount) + rdiff-backup --force remove increments --older-than "${MAX_BACKUPS}"B "$TARGET_DIRECTORY" +} + +# Call the function +backup_function $1 $2 $3 +# Usage: sh backup_script.sh "/path/to/source" "/path/to/target" "amount of backups" diff --git a/ansible/service_rdiff_backup/rdiff_backup_setup.yml b/ansible/service_rdiff_backup/rdiff_backup_setup.yml new file mode 100644 index 00000000..85e231e1 --- /dev/null +++ b/ansible/service_rdiff_backup/rdiff_backup_setup.yml @@ -0,0 +1,91 @@ +# https://github.com/rdiff-backup/rdiff-backup +# Execute with +# ansible-playbook rdiff_backup_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start duplicati service + hosts: my_servers + vars: + USERNAME: root + BACKUP_SCRIPT_PATH: "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/backup_script.sh" + AMOUNT_OF_BACKUPS: 3 + backup_jobs: + - name: factorio + LOG_NAME: backup_factorio.log + SOURCE_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_SOURCE_FACTORIO }}" + TARGET_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/factorio" + - name: uptime_kuma + LOG_NAME: backup_uptime_kuma.log + SOURCE_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_SOURCE_UPTIME_KUMA }}" + TARGET_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/uptime_kuma" + # - name: syncthing + # LOG_NAME: backup_syncthing.log + # SOURCE_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_SOURCE_SYNCTHING }}" + # TARGET_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/syncthing" + - name: jellyfin + LOG_NAME: backup_jellyfin.log + SOURCE_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_SOURCE_JELLYFIN }}" + TARGET_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/jellyfin" + - name: trillium + LOG_NAME: backup_trillium.log + SOURCE_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_SOURCE_TRILLIUM }}" + TARGET_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/trillium" + - name: audiobookshelf + LOG_NAME: backup_audiobookshelf.log + SOURCE_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_SOURCE_AUDIOBOOKSHELF }}" + TARGET_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/audiobookshelf" + - name: paperless + LOG_NAME: backup_paperless.log + SOURCE_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_SOURCE_PAPERLESS }}" + TARGET_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/paperless" + - name: bookshelf + LOG_NAME: backup_bookshelf.log + SOURCE_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_SOURCE_BOOKSHELF }}" + TARGET_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/bookshelf" + - name: navidrome + LOG_NAME: backup_navidrome.log + SOURCE_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_SOURCE_NAVIDROME }}" + TARGET_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/navidrome" + - name: reactive_resume + LOG_NAME: backup_reactive_resume.log + SOURCE_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_SOURCE_REACTIVE_RESUME }}" + TARGET_DIRECTORY: "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/reactive_resume" + tasks: + - name: Get syncthing user id + ansible.builtin.shell: + cmd: id syncthing -u + register: SYNCTHING_USER_ID + + - name: Get syncthing group id + ansible.builtin.shell: + cmd: id syncthing -g + register: SYNCTHING_GROUP_ID + + # Install latest version of https://github.com/rdiff-backup/rdiff-backup + - name: Install rdiff-backup + ansible.builtin.apt: + name: rdiff-backup + state: latest + + - name: Create directories + file: + path: "{{ item }}" + recurse: true + state: directory + owner: "{{ SYNCTHING_USER_ID.stdout }}" + group: "{{ SYNCTHING_GROUP_ID.stdout }}" + loop: + - "{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}" + + - name: Copy backup script + template: + src: backup_script.sh + dest: "{{ BACKUP_SCRIPT_PATH }}" + owner: "{{ SYNCTHING_USER_ID.stdout }}" + group: "{{ SYNCTHING_GROUP_ID.stdout }}" + + - name: Add backups + ansible.builtin.cron: + name: "Run {{ item.name }} backup" + job: /bin/sh '{{ BACKUP_SCRIPT_PATH }}' '{{ item.SOURCE_DIRECTORY }}' '{{ item.TARGET_DIRECTORY }}' '{{ AMOUNT_OF_BACKUPS }}' >> '{{ secrets.RDIFF_BACKUP.BACKUP_TARGET_PATH }}/{{ item.LOG_NAME }}' + special_time: daily + user: "{{ USERNAME }}" + with_items: "{{ backup_jobs }}" diff --git a/ansible/service_reactive_resume/docker-compose.yml b/ansible/service_reactive_resume/docker-compose.yml new file mode 100644 index 00000000..6a478cb5 --- /dev/null +++ b/ansible/service_reactive_resume/docker-compose.yml @@ -0,0 +1,144 @@ +version: "3.8" + +# In this Docker Compose example, it assumes that you maintain a reverse proxy externally (or chose not to). +# The only two exposed ports here are from minio (:9000) and the app itself (:3000). +# If these ports are changed, ensure that the env vars passed to the app are also changed accordingly. + +networks: + '{{secrets.TRAFIK_NETWORK}}': + external: true + +services: + # Database (Postgres) + postgres: + image: postgres:15-alpine + container_name: reactive_resume_postgres + hostname: reactive_resume_postgres + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + restart: unless-stopped + volumes: + - ./postgres_data:/var/lib/postgresql/data + environment: + POSTGRES_DB: postgres + POSTGRES_USER: postgres + POSTGRES_PASSWORD: '{{ secrets.REACTIVE_RESUME.POSTGRES_PASSWORD }}' + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres -d postgres"] + interval: 10s + timeout: 5s + retries: 5 + + # Storage (for image uploads) + minio: + image: minio/minio:latest + container_name: reactive_resume_minio + hostname: reactive_resume_minio + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + restart: unless-stopped + command: server /data + volumes: + - ./minio_data:/data + environment: + MINIO_ROOT_USER: '{{ secrets.REACTIVE_RESUME.STORAGE_ACCESS_KEY }}' + MINIO_ROOT_PASSWORD: '{{ secrets.REACTIVE_RESUME.STORAGE_SECRET_KEY }}' + + # Chrome Browser (for printing and previews) + chrome: + image: browserless/chrome:1.61.0-puppeteer-21.4.1 + container_name: reactive_resume_chrome + hostname: reactive_resume_chrome + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + restart: unless-stopped + environment: + TOKEN: chrome_token + EXIT_ON_HEALTH_FAILURE: true + PRE_REQUEST_HEALTH_CHECK: true + + # Redis (for cache & server session management) + redis: + image: redis:alpine + container_name: reactive_resume_redis + hostname: reactive_resume_redis + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + restart: unless-stopped + command: redis-server --requirepass password + + app: + image: amruthpillai/reactive-resume:latest + container_name: reactive_resume_app + hostname: reactive_resume_app + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + restart: unless-stopped + depends_on: + - postgres + - minio + - redis + - chrome + environment: + # -- Environment Variables -- + PORT: 3000 + NODE_ENV: production + + # -- URLs -- + PUBLIC_URL: http://{{ secrets.REACTIVE_RESUME.USERNAME }}.{{ secrets.MY_DOMAIN }} + STORAGE_URL: http://reactive_resume_minio:9000/default + + # -- Printer (Chrome) -- + CHROME_TOKEN: chrome_token + CHROME_URL: ws://reactive_resume_chrome:3000 + + # -- Database (Postgres) -- + DATABASE_URL: postgresql://postgres:{{ secrets.REACTIVE_RESUME.POSTGRES_PASSWORD }}@reactive_resume_postgres:5432/postgres + + # -- Auth -- + ACCESS_TOKEN_SECRET: '{{ secrets.REACTIVE_RESUME.ACCESS_TOKEN_SECRET }}' + REFRESH_TOKEN_SECRET: '{{ secrets.REACTIVE_RESUME.REFRESH_TOKEN_SECRET }}' + + # -- Emails -- + MAIL_FROM: noreply@localhost + # SMTP_URL: smtp://user:pass@smtp:587 # Optional + + # -- Storage (Minio) -- + STORAGE_ENDPOINT: minio + STORAGE_PORT: 9000 + STORAGE_REGION: us-east-1 # Optional + STORAGE_BUCKET: default + STORAGE_ACCESS_KEY: '{{ secrets.REACTIVE_RESUME.STORAGE_ACCESS_KEY }}' + STORAGE_SECRET_KEY: '{{ secrets.REACTIVE_RESUME.STORAGE_SECRET_KEY }}' + STORAGE_USE_SSL: false + + # -- Cache (Redis) -- + REDIS_URL: redis://default:password@reactive_resume_redis:6379 + + # -- Sentry -- + # VITE_SENTRY_DSN: https://id.sentry.io # Optional + + # -- Crowdin (Optional) -- + # CROWDIN_PROJECT_ID: + # CROWDIN_PERSONAL_TOKEN: + + # -- Email (Optional) -- + # DISABLE_EMAIL_AUTH: true + # VITE_DISABLE_SIGNUPS: true + + # -- GitHub (Optional) -- + # GITHUB_CLIENT_ID: github_client_id + # GITHUB_CLIENT_SECRET: github_client_secret + # GITHUB_CALLBACK_URL: http://localhost:3000/api/auth/github/callback + + # -- Google (Optional) -- + # GOOGLE_CLIENT_ID: google_client_id + # GOOGLE_CLIENT_SECRET: google_client_secret + # GOOGLE_CALLBACK_URL: http://localhost:3000/api/auth/google/callback + labels: + - traefik.enable=true + - traefik.http.routers.{{ secrets.REACTIVE_RESUME.USERNAME }}.rule=Host(`{{ secrets.REACTIVE_RESUME.USERNAME }}.{{ secrets.MY_DOMAIN }}`) + - traefik.http.services.{{ secrets.REACTIVE_RESUME.USERNAME }}.loadbalancer.server.port=3000 + - traefik.http.routers.{{ secrets.REACTIVE_RESUME.USERNAME }}.tls=true + - traefik.http.routers.{{ secrets.REACTIVE_RESUME.USERNAME }}.tls.certresolver=production + - traefik.http.routers.{{ secrets.REACTIVE_RESUME.USERNAME }}.middlewares=authelia@docker diff --git a/ansible/service_reactive_resume/reactive_resume.service b/ansible/service_reactive_resume/reactive_resume.service new file mode 100644 index 00000000..c0ff95bd --- /dev/null +++ b/ansible/service_reactive_resume/reactive_resume.service @@ -0,0 +1,12 @@ +# /etc/systemd/system/{{ secrets.REACTIVE_RESUME.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker compose up +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.REACTIVE_RESUME.USERNAME }} +User={{ secrets.REACTIVE_RESUME.USERNAME }} +Group={{ secrets.REACTIVE_RESUME.USERNAME }} +WorkingDirectory=/home/{{ secrets.REACTIVE_RESUME.USERNAME }} + +[Install] +WantedBy=multi-user.target \ No newline at end of file diff --git a/ansible/service_reactive_resume/reactive_resume_setup.yml b/ansible/service_reactive_resume/reactive_resume_setup.yml new file mode 100644 index 00000000..e67f5d9a --- /dev/null +++ b/ansible/service_reactive_resume/reactive_resume_setup.yml @@ -0,0 +1,96 @@ +# Execute with +# ansible-playbook reactive_resume_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start reactive_resume service + hosts: my_servers + vars: + USERNAME: "{{ secrets.REACTIVE_RESUME.USERNAME }}" + tasks: + - name: Create group + ansible.builtin.group: + name: "{{ USERNAME }}" + state: present + + - name: Create user + ansible.builtin.user: + name: "{{ USERNAME }}" + groups: + - "{{ USERNAME }}" + - docker + shell: "{{ secrets.DEFAULT_SHELL }}" + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: reactive_resume.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: "{{ USER_ID.stdout }}" + group: "{{ GROUP_ID.stdout }}" + + - name: Create docker-compose.yml file + template: + src: docker-compose.yml + dest: /home/{{ USERNAME }}/docker-compose.yml + owner: "{{ USER_ID.stdout }}" + group: "{{ GROUP_ID.stdout }}" + + - name: Stop reactive_resume service + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: stopped + enabled: false + + - name: Remove reactive_resume_app container + community.docker.docker_container: + name: reactive_resume_app + state: absent + image: amruthpillai/reactive-resume:latest + + - name: Remove reactive_resume_postgres container + community.docker.docker_container: + name: reactive_resume_postgres + state: absent + image: postgres:15-alpine + + - name: Remove reactive_resume_minio container + community.docker.docker_container: + name: reactive_resume_postgres + state: absent + image: minio/minio:latest + + - name: Remove reactive_resume_chrome container + community.docker.docker_container: + name: reactive_resume_chrome + state: absent + image: browserless/chrome:1.61.0-puppeteer-21.4.1 + + - name: Remove reactive_resume_redis container + community.docker.docker_container: + name: reactive_resume_redis + state: absent + image: redis:alpine + + - name: Start reactive_resume service again + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_reactive_resume/reactive_resume_update.yml b/ansible/service_reactive_resume/reactive_resume_update.yml new file mode 100644 index 00000000..ceaaca7f --- /dev/null +++ b/ansible/service_reactive_resume/reactive_resume_update.yml @@ -0,0 +1,55 @@ +# Execute with +# ansible-playbook reactive_resume_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start reactive_resume service + hosts: my_servers + vars: + USERNAME: "{{ secrets.REACTIVE_RESUME.USERNAME }}" + tasks: + - name: Stop reactive_resume service + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: stopped + enabled: false + + - name: Remove reactive_resume_app container + community.docker.docker_container: + name: reactive_resume_app + state: absent + image: amruthpillai/reactive-resume:latest + + - name: Remove reactive_resume_postgres container + community.docker.docker_container: + name: reactive_resume_postgres + state: absent + image: postgres:15-alpine + + - name: Remove reactive_resume_minio container + community.docker.docker_container: + name: reactive_resume_postgres + state: absent + image: minio/minio:latest + + - name: Remove reactive_resume_chrome container + community.docker.docker_container: + name: reactive_resume_chrome + state: absent + image: browserless/chrome:1.61.0-puppeteer-21.4.1 + + - name: Remove reactive_resume_redis container + community.docker.docker_container: + name: reactive_resume_redis + state: absent + image: redis:alpine + + - name: Remove reactive_resume image + community.docker.docker_image: + name: amruthpillai/reactive-resume + tag: latest + state: absent + + - name: Start reactive_resume service again + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_sonarcube/docker-compose.yml b/ansible/service_sonarcube/docker-compose.yml new file mode 100644 index 00000000..b453e855 --- /dev/null +++ b/ansible/service_sonarcube/docker-compose.yml @@ -0,0 +1,50 @@ +version: "3" + +networks: + "{{secrets.TRAFIK_NETWORK}}": + external: true + +services: + sonarqube: + image: sonarqube:lts-community + container_name: sonarqube-community + hostname: sonarqube-community + restart: unless-stopped + # user: 1000:1000 + depends_on: + - db + networks: + - "{{ secrets.TRAFIK_NETWORK }}" + environment: + SONAR_JDBC_URL: jdbc:postgresql://sonarqube-db:5432/sonar + SONAR_JDBC_USERNAME: '{{ secrets.SONARQUBE.POSTGRES_USER }}' + SONAR_JDBC_PASSWORD: '{{ secrets.SONARQUBE.POSTGRES_PASSWORD }}' + volumes: + - sonarqube_data:/opt/sonarqube/data + - sonarqube_extensions:/opt/sonarqube/extensions + - sonarqube_logs:/opt/sonarqube/logs + labels: + - traefik.enable=true + - traefik.http.routers.{{ secrets.SONARQUBE.USERNAME }}.rule=Host(`{{ secrets.SONARQUBE.USERNAME }}.{{ secrets.MY_DOMAIN }}`) + - traefik.http.services.{{ secrets.SONARQUBE.USERNAME }}.loadbalancer.server.port=9000 + - traefik.http.routers.{{ secrets.SONARQUBE.USERNAME }}.tls=true + - traefik.http.routers.{{ secrets.SONARQUBE.USERNAME }}.tls.certresolver=production + - traefik.http.routers.{{ secrets.SONARQUBE.USERNAME }}.middlewares=authelia@docker + + db: + image: postgres:12 + container_name: sonarqube-db + hostname: sonarqube-db + restart: unless-stopped + networks: + - "{{ secrets.TRAFIK_NETWORK }}" + environment: + POSTGRES_USER: '{{ secrets.SONARQUBE.POSTGRES_USER }}' + POSTGRES_PASSWORD: '{{ secrets.SONARQUBE.POSTGRES_PASSWORD }}' + volumes: + - ./postgresql_data:/var/lib/postgresql/data + +volumes: + sonarqube_data: + sonarqube_extensions: + sonarqube_logs: diff --git a/ansible/service_sonarcube/sonarqube.service b/ansible/service_sonarcube/sonarqube.service new file mode 100644 index 00000000..2a00d7b6 --- /dev/null +++ b/ansible/service_sonarcube/sonarqube.service @@ -0,0 +1,12 @@ +# /etc/systemd/system/{{ secrets.SONARQUBE.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker compose up +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.SONARQUBE.USERNAME }} +User={{ secrets.SONARQUBE.USERNAME }} +Group={{ secrets.SONARQUBE.USERNAME }} +WorkingDirectory=/home/{{ secrets.SONARQUBE.USERNAME }} + +[Install] +WantedBy=multi-user.target \ No newline at end of file diff --git a/ansible/service_sonarcube/sonarqube_setup.yml b/ansible/service_sonarcube/sonarqube_setup.yml new file mode 100644 index 00000000..c5f5d312 --- /dev/null +++ b/ansible/service_sonarcube/sonarqube_setup.yml @@ -0,0 +1,97 @@ +# https://hub.docker.com/_/sonarqube +# https://docs.sonarsource.com/sonarqube/latest/setup-and-upgrade/install-the-server/installing-sonarqube-from-docker/ +# https://docs.sonarsource.com/sonarqube/9.9/devops-platform-integration/github-integration/ +# Execute with +# ansible-playbook sonarqube_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start sonarqube service + hosts: my_servers + vars: + USERNAME: "{{ secrets.SONARQUBE.USERNAME }}" + tasks: + - name: Create group + ansible.builtin.group: + name: "{{ USERNAME }}" + state: present + + - name: Create user + ansible.builtin.user: + name: "{{ USERNAME }}" + groups: + - "{{ USERNAME }}" + - docker + shell: "{{ secrets.DEFAULT_SHELL }}" + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Set vm.max_map_count + ansible.posix.sysctl: + name: vm.max_map_count + value: "524288" + state: present + + - name: Set fs.file-max + ansible.posix.sysctl: + name: fs.file-max + value: "131072" + state: present + + - name: Create systemd file + template: + src: sonarqube.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: "{{ USER_ID.stdout }}" + group: "{{ GROUP_ID.stdout }}" + + - name: Create docker-compose.yml file + template: + src: docker-compose.yml + dest: /home/{{ USERNAME }}/docker-compose.yml + owner: "{{ USER_ID.stdout }}" + group: "{{ GROUP_ID.stdout }}" + + - name: Stop sonarqube service + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: stopped + enabled: false + + - name: Remove sonarqube-server container + community.docker.docker_container: + name: sonarqube-community + state: absent + image: sonarqube:lts-community + + - name: Remove sonarqube-db container + community.docker.docker_container: + name: sonarqube-db + state: absent + image: postgres:12 + + - name: Start sonarqube service again + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: started + daemon_reload: true + enabled: true + + - name: Print reminder to change admin password + ansible.builtin.debug: + msg: Initial login is 'admin' with password 'admin'. Change it immediately! diff --git a/ansible/service_sonarcube/sonarqube_update.yml b/ansible/service_sonarcube/sonarqube_update.yml new file mode 100644 index 00000000..8b4fd86a --- /dev/null +++ b/ansible/service_sonarcube/sonarqube_update.yml @@ -0,0 +1,37 @@ +# Execute with +# ansible-playbook sonarqube_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Stop, remove image, and start sonarqube service + hosts: my_servers + vars: + USERNAME: "{{ secrets.SONARQUBE.USERNAME }}" + tasks: + - name: Stop sonarqube service + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: stopped + enabled: false + + - name: Remove sonarqube-server container + community.docker.docker_container: + name: sonarqube-community + state: absent + image: sonarqube:lts-community + + - name: Remove sonarqube-db container + community.docker.docker_container: + name: sonarqube-db + state: absent + image: postgres:12 + + - name: Remove sonarqube image + community.docker.docker_image: + name: sonarqube + tag: lts-community + state: absent + + - name: Start sonarqube service again + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_stirling/stirling.service b/ansible/service_stirling/stirling.service new file mode 100644 index 00000000..478c6f50 --- /dev/null +++ b/ansible/service_stirling/stirling.service @@ -0,0 +1,26 @@ +# /etc/systemd/system/{{ secrets.STIRLING.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker run \ + --rm \ + --name=stirling \ + -v ./training_data:/usr/share/tesseract-ocr/4.00/tessdata \ + -v ./configs:/configs \ + -v ./customFiles:/customFiles \ + -e DOCKER_ENABLE_SECURITY=false \ + -l traefik.enable=true \ + -l traefik.http.routers.{{ secrets.STIRLING.USERNAME }}.rule=Host(`{{ secrets.STIRLING.USERNAME }}.{{ secrets.MY_DOMAIN }}`) \ + -l traefik.http.services.{{ secrets.STIRLING.USERNAME }}.loadbalancer.server.port=8080 \ + -l traefik.http.routers.{{ secrets.STIRLING.USERNAME }}.tls=true \ + -l traefik.http.routers.{{ secrets.STIRLING.USERNAME }}.tls.certresolver=production \ + -l traefik.http.routers.{{ secrets.STIRLING.USERNAME }}.middlewares=authelia@docker \ + --network {{ secrets.TRAFIK_NETWORK }} \ + frooodle/s-pdf:latest +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.STIRLING.USERNAME }} +User={{ secrets.STIRLING.USERNAME }} +Group={{ secrets.STIRLING.USERNAME }} +WorkingDirectory=/home/{{ secrets.STIRLING.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_stirling/stirling_setup.yml b/ansible/service_stirling/stirling_setup.yml new file mode 100644 index 00000000..d9d40e23 --- /dev/null +++ b/ansible/service_stirling/stirling_setup.yml @@ -0,0 +1,66 @@ +# https://github.com/Frooodle/Stirling-PDF +# Execute with +# ansible-playbook stirling_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start stirling service + hosts: my_servers + vars: + USERNAME: '{{ secrets.STIRLING.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: stirling.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop stirling service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove stirling container + community.docker.docker_container: + name: stirling + state: absent + image: frooodle/s-pdf:latest + + - name: Start stirling service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_stirling/stirling_update.yml b/ansible/service_stirling/stirling_update.yml new file mode 100644 index 00000000..a0173d24 --- /dev/null +++ b/ansible/service_stirling/stirling_update.yml @@ -0,0 +1,37 @@ +# Execute with +# ansible-playbook stirling_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start stirling service + hosts: my_servers + vars: + USERNAME: '{{ secrets.STIRLING.USERNAME }}' + tasks: + - name: Stop stirling service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Stop stirling container + community.docker.docker_container: + name: stirling + state: stopped + image: frooodle/s-pdf:latest + + - name: Remove stirling container + community.docker.docker_container: + name: stirling + state: absent + image: frooodle/s-pdf:latest + + - name: Remove stirling image + community.docker.docker_image: + name: frooodle/s-pdf + tag: latest + state: absent + + - name: Start stirling service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_syncthing/docker-compose.yml b/ansible/service_syncthing/docker-compose.yml new file mode 100644 index 00000000..9632d02c --- /dev/null +++ b/ansible/service_syncthing/docker-compose.yml @@ -0,0 +1,27 @@ +version: '3.5' + +networks: + '{{secrets.TRAFIK_NETWORK}}': + external: true + +services: + syncthing: + container_name: syncthing + hostname: syncthing + restart: unless-stopped + image: lscr.io/linuxserver/syncthing:latest + volumes: + - ./config:/config + - ./data1:/data1 + environment: + - PUID={{ USER_ID.stdout }} + - PGID={{ GROUP_ID.stdout }} + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + labels: + - traefik.enable=true + - traefik.http.routers.{{ secrets.SYNCTHING.USERNAME }}.rule=Host(`{{ secrets.SYNCTHING.SUBDOMAIN }}.{{ secrets.MY_DOMAIN }}`) + - traefik.http.services.{{ secrets.SYNCTHING.USERNAME }}.loadbalancer.server.port=8384 + - traefik.http.routers.{{ secrets.SYNCTHING.USERNAME }}.tls=true + - traefik.http.routers.{{ secrets.SYNCTHING.USERNAME }}.tls.certresolver=production + - traefik.http.routers.{{ secrets.SYNCTHING.USERNAME }}.middlewares=authelia@docker diff --git a/ansible/service_syncthing/syncthing.service b/ansible/service_syncthing/syncthing.service new file mode 100644 index 00000000..f7218ba0 --- /dev/null +++ b/ansible/service_syncthing/syncthing.service @@ -0,0 +1,12 @@ +# /etc/systemd/system/{{ secrets.SYNCTHING.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker compose up +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.SYNCTHING.USERNAME }} +User={{ secrets.SYNCTHING.USERNAME }} +Group={{ secrets.SYNCTHING.USERNAME }} +WorkingDirectory=/home/{{ secrets.SYNCTHING.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_syncthing/syncthing_setup.yml b/ansible/service_syncthing/syncthing_setup.yml new file mode 100644 index 00000000..ed367442 --- /dev/null +++ b/ansible/service_syncthing/syncthing_setup.yml @@ -0,0 +1,72 @@ +# Execute with +# ansible-playbook syncthing_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start syncthing service + hosts: my_servers + vars: + USERNAME: '{{ secrets.SYNCTHING.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: syncthing.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Copy docker-compose.yml file + template: + src: docker-compose.yml + dest: /home/{{ USERNAME }}/docker-compose.yml + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop syncthing service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove syncthing container + community.docker.docker_container: + name: syncthing + state: absent + image: lscr.io/linuxserver/syncthing:latest + + - name: Start syncthing service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_syncthing/syncthing_update.yml b/ansible/service_syncthing/syncthing_update.yml new file mode 100644 index 00000000..66527299 --- /dev/null +++ b/ansible/service_syncthing/syncthing_update.yml @@ -0,0 +1,37 @@ +# Execute with +# ansible-playbook syncthing_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start syncthing service + hosts: my_servers + vars: + USERNAME: '{{ secrets.SYNCTHING.USERNAME }}' + tasks: + - name: Stop syncthing service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Stop syncthing container + community.docker.docker_container: + name: syncthing + state: stopped + image: lscr.io/linuxserver/syncthing:latest + + - name: Remove syncthing container + community.docker.docker_container: + name: syncthing + state: absent + image: lscr.io/linuxserver/syncthing:latest + + - name: Remove syncthing image + community.docker.docker_image: + name: lscr.io/linuxserver/syncthing + tag: latest + state: absent + + - name: Start syncthing service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_traefik/docker-compose.yml b/ansible/service_traefik/docker-compose.yml new file mode 100644 index 00000000..395da1c7 --- /dev/null +++ b/ansible/service_traefik/docker-compose.yml @@ -0,0 +1,60 @@ +networks: + '{{secrets.TRAFIK_NETWORK}}': + external: true + +services: + reverse-proxy: + # The official v2 Traefik docker image + image: traefik:v2.10 + container_name: traefik + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + # Enables the web UI and tells Traefik to listen to docker + command: + - --api=true + - --api.dashboard=true + - --api.insecure=false + - --pilot.dashboard=false + - --global.sendAnonymousUsage=false + - --global.checkNewVersion=false + - --log=true + - --log.level=DEBUG + - --log.filepath=/config/traefik.log + - --providers.docker=true + - --providers.docker.exposedByDefault=false + - --entryPoints.http=true + - --entryPoints.http.address=:8080/tcp + - --entryPoints.http.http.redirections.entryPoint.to=https + - --entryPoints.http.http.redirections.entryPoint.scheme=https + ## Please see the Forwarded Header Trust section of the Authelia Traefik Integration documentation. + # - '--entryPoints.http.forwardedHeaders.trustedIPs=10.0.0.0/8,172.16.0.0/16,192.168.0.0/16,fc00::/7' + # - '--entryPoints.http.proxyProtocol.trustedIPs=10.0.0.0/8,172.16.0.0/16,192.168.0.0/16,fc00::/7' + - --entryPoints.http.forwardedHeaders.insecure=false + - --entryPoints.http.proxyProtocol.insecure=false + - --entryPoints.https=true + - --entryPoints.https.address=:8443/tcp + ## Please see the Forwarded Header Trust section of the Authelia Traefik Integration documentation. + # - '--entryPoints.https.forwardedHeaders.trustedIPs=10.0.0.0/8,172.16.0.0/16,192.168.0.0/16,fc00::/7' + # - '--entryPoints.https.proxyProtocol.trustedIPs=10.0.0.0/8,172.16.0.0/16,192.168.0.0/16,fc00::/7' + - --entryPoints.https.forwardedHeaders.insecure=false + - --entryPoints.https.proxyProtocol.insecure=false + ports: + # The HTTP port + - 80:80 + # The HTTPS port + - 443:443 + # The Web UI (enabled by --api.insecure=true), dont use in production! + - 8080:8080 + volumes: + - ./traefik:/etc/traefik + # So that Traefik can listen to the Docker events + - /var/run/docker.sock:/var/run/docker.sock:ro + labels: + - traefik.enable=true + - traefik.http.routers.api.rule=Host(`{{ secrets.TRAEFIK.USERNAME }}.{{ secrets.MY_DOMAIN }}`) + - traefik.http.routers.api.entryPoints=web,websecure + - traefik.http.routers.api.tls=true + - traefik.http.routers.api.tls.certresolver=production + - traefik.http.routers.api.service=api@internal + #- 'traefik.http.routers.api.middlewares=authelia@docker' + - traefik.http.services.api.loadbalancer.server.port=8080 diff --git a/ansible/service_traefik/traefik.service b/ansible/service_traefik/traefik.service new file mode 100644 index 00000000..c85bbc3b --- /dev/null +++ b/ansible/service_traefik/traefik.service @@ -0,0 +1,12 @@ +# /etc/systemd/system/{{ secrets.TRAEFIK.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker compose up +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.TRAEFIK.USERNAME }} +User={{ secrets.TRAEFIK.USERNAME }} +Group={{ secrets.TRAEFIK.USERNAME }} +WorkingDirectory=/home/{{ secrets.TRAEFIK.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_traefik/traefik.yml b/ansible/service_traefik/traefik.yml new file mode 100644 index 00000000..1c5496b8 --- /dev/null +++ b/ansible/service_traefik/traefik.yml @@ -0,0 +1,77 @@ +global: + checkNewVersion: false + sendAnonymousUsage: false # true by default + +# (Optional) Log information +# --- +# log: +# level: ERROR # DEBUG, INFO, WARNING, ERROR, CRITICAL +# format: common # common, json, logfmt +# filePath: /var/log/traefik/traefik.log + +# (Optional) Accesslog +# --- +# accesslog: + # format: common # common, json, logfmt + # filePath: /var/log/traefik/access.log + +# (Optional) Enable API and Dashboard +# --- +api: + dashboard: true # true by default + insecure: false # Don't do this in production! + +# Entry Points configuration +# --- +entryPoints: + web: + address: :80 + # (Optional) Redirect to HTTPS + # --- + http: + redirections: + entryPoint: + to: websecure + scheme: https + + websecure: + address: :443 + +# Configure your CertificateResolver here... +# --- +certificatesResolvers: + staging: + acme: + email: '{{ secrets.TRAEFIK.MY_EMAIL }}' + storage: /etc/traefik/acme.json + caServer: https://acme-staging-v02.api.letsencrypt.org/directory + httpChallenge: + entryPoint: web + + production: + acme: + email: '{{ secrets.TRAEFIK.MY_EMAIL }}' + storage: /etc/traefik/acme.json + caServer: https://acme-v02.api.letsencrypt.org/directory + httpChallenge: + entryPoint: web + +# (Optional) Overwrite Default Certificates +# tls: +# stores: +# default: +# defaultCertificate: +# certFile: /etc/traefik/certs/cert.pem +# keyFile: /etc/traefik/certs/cert-key.pem +# (Optional) Disable TLS version 1.0 and 1.1 +# options: +# default: +# minVersion: VersionTLS12 + +providers: + docker: + exposedByDefault: false # Default is true + file: + # watch for dynamic configuration changes + directory: /etc/traefik + watch: true diff --git a/ansible/service_traefik/traefik_setup.yml b/ansible/service_traefik/traefik_setup.yml new file mode 100644 index 00000000..693378de --- /dev/null +++ b/ansible/service_traefik/traefik_setup.yml @@ -0,0 +1,95 @@ +# Execute with +# ansible-playbook traefik_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start traefik service + hosts: my_servers + vars: + USERNAME: '{{ secrets.TRAEFIK.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: traefik.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + # https://hollo.me/devops/routing-to-multiple-docker-compose-development-setups-with-traefik.html + - name: Create docker external network + docker_network: + name: '{{ secrets.TRAFIK_NETWORK }}' + internal: false + attachable: true + driver: bridge + + - name: Copy docker-compose.yml file + template: + src: docker-compose.yml + dest: /home/{{ USERNAME }}/docker-compose.yml + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Create directories + file: + path: /home/{{ USERNAME }}/traefik + recurse: true + state: directory + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Copy traefik.yml file + template: + src: traefik.yml + dest: /home/{{ USERNAME }}/traefik/traefik.yml + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop traefik service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove traefik container + community.docker.docker_container: + name: traefik + state: absent + image: traefik:v2.10 + + - name: Start traefik service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_trillium/trillium.service b/ansible/service_trillium/trillium.service new file mode 100644 index 00000000..09c66d51 --- /dev/null +++ b/ansible/service_trillium/trillium.service @@ -0,0 +1,23 @@ +# /etc/systemd/system/{{ secrets.TRILLIUM.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker run \ + --rm \ + --name=trillium \ + -v ./data:/home/node/trilium-data \ + -l traefik.enable=true \ + -l traefik.http.routers.{{ secrets.TRILLIUM.USERNAME }}.rule=Host(`{{ secrets.TRILLIUM.USERNAME }}.{{ secrets.MY_DOMAIN }}`) \ + -l traefik.http.services.{{ secrets.TRILLIUM.USERNAME }}.loadbalancer.server.port=8080 \ + -l traefik.http.routers.{{ secrets.TRILLIUM.USERNAME }}.tls=true \ + -l traefik.http.routers.{{ secrets.TRILLIUM.USERNAME }}.tls.certresolver=production \ + -l traefik.http.routers.{{ secrets.TRILLIUM.USERNAME }}.middlewares=authelia@docker \ + --network {{ secrets.TRAFIK_NETWORK }} \ + zadam/trilium +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.TRILLIUM.USERNAME }} +User={{ secrets.TRILLIUM.USERNAME }} +Group={{ secrets.TRILLIUM.USERNAME }} +WorkingDirectory=/home/{{ secrets.TRILLIUM.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_trillium/trillium_setup.yml b/ansible/service_trillium/trillium_setup.yml new file mode 100644 index 00000000..22102c9b --- /dev/null +++ b/ansible/service_trillium/trillium_setup.yml @@ -0,0 +1,66 @@ +# https://github.com/zadam/trilium +# Execute with +# ansible-playbook trillium_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start trillium service + hosts: my_servers + vars: + USERNAME: '{{ secrets.TRILLIUM.USERNAME }}' + tasks: + - name: Create group + ansible.builtin.group: + name: '{{ USERNAME }}' + state: present + + - name: Create user + ansible.builtin.user: + name: '{{ USERNAME }}' + groups: + - '{{ USERNAME }}' + - docker + shell: '{{ secrets.DEFAULT_SHELL }}' + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: trillium.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop trillium service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove trillium container + community.docker.docker_container: + name: trillium + state: absent + image: zadam/trilium + + - name: Start trillium service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_trillium/trillium_update.yml b/ansible/service_trillium/trillium_update.yml new file mode 100644 index 00000000..f5228450 --- /dev/null +++ b/ansible/service_trillium/trillium_update.yml @@ -0,0 +1,44 @@ +# Execute with +# ansible-playbook trillium_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Stop, remove image, and start trillium service + hosts: my_servers + vars: + USERNAME: '{{ secrets.TRILLIUM.USERNAME }}' + tasks: + - name: Stop trillium service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Stop trillium container + community.docker.docker_container: + name: trillium + state: stopped + image: zadam/trilium:latest + + - name: Remove trillium container + community.docker.docker_container: + name: trillium + state: absent + image: zadam/trilium:latest + + - name: Create systemd file + template: + src: trillium.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Remove trillium image + community.docker.docker_image: + name: zadam/trilium + tag: latest + state: absent + + - name: Start trillium service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_uptime_kuma/uptime_kuma.service b/ansible/service_uptime_kuma/uptime_kuma.service new file mode 100644 index 00000000..e74841db --- /dev/null +++ b/ansible/service_uptime_kuma/uptime_kuma.service @@ -0,0 +1,24 @@ +# /etc/systemd/system/{{ secrets.UPTIME_KUMA.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker run \ + --rm \ + --name=uptime-kuma \ + -v ./uptime-kuma:/app/data \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -l traefik.enable=true \ + -l traefik.http.routers.{{ secrets.UPTIME_KUMA.USERNAME }}.rule=Host(`{{ secrets.UPTIME_KUMA.USERNAME }}.{{ secrets.MY_DOMAIN }}`) \ + -l traefik.http.services.{{ secrets.UPTIME_KUMA.USERNAME }}.loadbalancer.server.port=3001 \ + -l traefik.http.routers.{{ secrets.UPTIME_KUMA.USERNAME }}.tls=true \ + -l traefik.http.routers.{{ secrets.UPTIME_KUMA.USERNAME }}.tls.certresolver=production \ + -l traefik.http.routers.{{ secrets.UPTIME_KUMA.USERNAME }}.middlewares=authelia@docker \ + --network {{ secrets.TRAFIK_NETWORK }} \ + louislam/uptime-kuma:1 +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.UPTIME_KUMA.USERNAME }} +User={{ secrets.UPTIME_KUMA.USERNAME }} +Group={{ secrets.UPTIME_KUMA.USERNAME }} +WorkingDirectory=/home/{{ secrets.UPTIME_KUMA.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_uptime_kuma/uptime_kuma_setup.yml b/ansible/service_uptime_kuma/uptime_kuma_setup.yml new file mode 100644 index 00000000..f03830ab --- /dev/null +++ b/ansible/service_uptime_kuma/uptime_kuma_setup.yml @@ -0,0 +1,70 @@ +# https://github.com/louislam/uptime-kuma +# Execute with +# ansible-playbook uptime_kuma_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Stop, remove image, and start uptime_kuma service + hosts: my_servers + vars: + USERNAME: "{{ secrets.UPTIME_KUMA.USERNAME }}" + tasks: + - name: Create group + ansible.builtin.group: + name: "{{ USERNAME }}" + state: present + + - name: Create user + ansible.builtin.user: + name: "{{ USERNAME }}" + groups: + - "{{ USERNAME }}" + - docker + shell: "{{ secrets.DEFAULT_SHELL }}" + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: uptime_kuma.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: "{{ USER_ID.stdout }}" + group: "{{ GROUP_ID.stdout }}" + + - name: Stop uptime_kuma service + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: stopped + enabled: false + + - name: Remove uptime_kuma container + community.docker.docker_container: + name: uptime-kuma + state: absent + image: louislam/uptime-kuma:1 + + - name: Start uptime_kuma service again + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: started + daemon_reload: true + enabled: true + + - name: Print reminder + ansible.builtin.debug: + msg: Remember to make an admin account asap (on first time use) diff --git a/ansible/service_uptime_kuma/uptime_kuma_update.yml b/ansible/service_uptime_kuma/uptime_kuma_update.yml new file mode 100644 index 00000000..a3684348 --- /dev/null +++ b/ansible/service_uptime_kuma/uptime_kuma_update.yml @@ -0,0 +1,39 @@ +# Execute with +# ansible-playbook uptime_kuma_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start uptime_kuma service + hosts: my_servers + vars: + USERNAME: '{{ secrets.UPTIME_KUMA.USERNAME }}' + tasks: + + - name: Create systemd file + template: + src: uptime_kuma.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: '{{ USER_ID.stdout }}' + group: '{{ GROUP_ID.stdout }}' + + - name: Stop uptime_kuma service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Remove uptime_kuma container + community.docker.docker_container: + name: uptime-kuma + state: absent + image: louislam/uptime-kuma:1 + + - name: Remove uptime_kuma image + community.docker.docker_image: + name: louislam/uptime-kuma + tag: 1 + state: absent + + - name: Start uptime_kuma service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_windmill/docker-compose.yml b/ansible/service_windmill/docker-compose.yml new file mode 100644 index 00000000..5b9026bc --- /dev/null +++ b/ansible/service_windmill/docker-compose.yml @@ -0,0 +1,114 @@ +version: '3.7' + +networks: + '{{secrets.TRAFIK_NETWORK}}': + external: true + +services: + db: + container_name: windmill_db + hostname: windmill_db + deploy: + # To use an external database, set replicas to 0 and set DATABASE_URL to the external database url in the .env file + replicas: 1 + image: postgres:14 + restart: unless-stopped + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + volumes: + - ./db_data:/var/lib/postgresql/data + environment: + POSTGRES_DB: '{{ secrets.WINDMILL.POSTGRES_DB }}' + POSTGRES_PASSWORD: '{{ secrets.WINDMILL.POSTGRES_PASSWORD }}' + healthcheck: + test: [CMD-SHELL, pg_isready -U postgres] + interval: 10s + timeout: 5s + retries: 5 + + windmill_server: + container_name: windmill_server + image: ghcr.io/windmill-labs/windmill:main + pull_policy: always + deploy: + replicas: 1 + restart: unless-stopped + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + labels: + - traefik.enable=true + - traefik.http.routers.{{ secrets.WINDMILL.USERNAME }}.rule=Host(`{{ secrets.WINDMILL.USERNAME }}.{{ secrets.MY_DOMAIN }}`) + - traefik.http.services.{{ secrets.WINDMILL.USERNAME }}.loadbalancer.server.port=8000 + - traefik.http.routers.{{ secrets.WINDMILL.USERNAME }}.tls=true + - traefik.http.routers.{{ secrets.WINDMILL.USERNAME }}.tls.certresolver=production + - traefik.http.routers.{{ secrets.WINDMILL.USERNAME }}.middlewares=authelia@docker + environment: + - DATABASE_URL={{ secrets.WINDMILL.DATABASE_URL }} + - MODE=server + depends_on: + db: + condition: service_healthy + + windmill_worker: + image: ghcr.io/windmill-labs/windmill:main + pull_policy: always + deploy: + replicas: 3 + resources: + limits: + cpus: '1' + memory: 2048M + restart: unless-stopped + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + environment: + - DATABASE_URL={{ secrets.WINDMILL.DATABASE_URL }} + - MODE=worker + - WORKER_GROUP=default + depends_on: + db: + condition: service_healthy + # to mount the worker folder to debug, KEEP_JOB_DIR=true and mount /tmp/windmill + volumes: + # mount the docker socket to allow to run docker containers from within the workers + - /var/run/docker.sock:/var/run/docker.sock + - worker_dependency_cache:/tmp/windmill/cache + + ## This worker is specialized for "native" jobs. Native jobs run in-process and thus are much more lightweight than other jobs + windmill_worker_native: + image: ghcr.io/windmill-labs/windmill:main + pull_policy: always + deploy: + replicas: 2 + resources: + limits: + cpus: '0.1' + memory: 128M + restart: unless-stopped + networks: + - '{{ secrets.TRAFIK_NETWORK }}' + environment: + - DATABASE_URL={{ secrets.WINDMILL.DATABASE_URL }} + - MODE=worker + - WORKER_GROUP=native + depends_on: + db: + condition: service_healthy + + lsp: + image: ghcr.io/windmill-labs/windmill-lsp:latest + pull_policy: always + restart: unless-stopped + volumes: + - lsp_cache:/root/.cache + + multiplayer: + image: ghcr.io/windmill-labs/windmill-multiplayer:latest + deploy: + replicas: 0 # Set to 1 to enable multiplayer, only available on Enterprise Edition + restart: unless-stopped + +volumes: + db_data: + worker_dependency_cache: + lsp_cache: diff --git a/ansible/service_windmill/windmill.service b/ansible/service_windmill/windmill.service new file mode 100644 index 00000000..5ddcdfec --- /dev/null +++ b/ansible/service_windmill/windmill.service @@ -0,0 +1,12 @@ +# /etc/systemd/system/{{ secrets.WINDMILL.USERNAME }}.service +[Service] +ExecStart=/usr/bin/docker compose up +Restart=always +RestartSec=20 +SyslogIdentifier={{ secrets.WINDMILL.USERNAME }} +User={{ secrets.WINDMILL.USERNAME }} +Group={{ secrets.WINDMILL.USERNAME }} +WorkingDirectory=/home/{{ secrets.WINDMILL.USERNAME }} + +[Install] +WantedBy=multi-user.target diff --git a/ansible/service_windmill/windmill_setup.yml b/ansible/service_windmill/windmill_setup.yml new file mode 100644 index 00000000..3349ef00 --- /dev/null +++ b/ansible/service_windmill/windmill_setup.yml @@ -0,0 +1,78 @@ +# https://www.windmill.dev/docs/intro +# https://www.windmill.dev/docs/advanced/self_host#docker +# Log in with (first time use) +# admin@windmill.dev /// changeme +# Execute with +# ansible-playbook windmill_setup.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start windmill service + hosts: my_servers + vars: + USERNAME: "{{ secrets.WINDMILL.USERNAME }}" + tasks: + - name: Create group + ansible.builtin.group: + name: "{{ USERNAME }}" + state: present + + - name: Create user + ansible.builtin.user: + name: "{{ USERNAME }}" + groups: + - "{{ USERNAME }}" + - docker + shell: "{{ secrets.DEFAULT_SHELL }}" + + - name: Get user id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -u + register: USER_ID + + - name: Get group id + ansible.builtin.shell: + cmd: id {{ USERNAME }} -g + register: GROUP_ID + + - name: Print user id + ansible.builtin.debug: + var: USER_ID.stdout + verbosity: 0 + + - name: Print group id + ansible.builtin.debug: + var: GROUP_ID.stdout + verbosity: 0 + + - name: Create systemd file + template: + src: windmill.service + dest: /etc/systemd/system/{{ USERNAME }}.service + owner: "{{ USER_ID.stdout }}" + group: "{{ GROUP_ID.stdout }}" + + - name: Copy docker-compose.yml file + template: + src: docker-compose.yml + dest: /home/{{ USERNAME }}/docker-compose.yml + owner: "{{ USER_ID.stdout }}" + group: "{{ GROUP_ID.stdout }}" + + - name: Stop windmill service + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: stopped + enabled: false + + - name: Remove windmill container + community.docker.docker_container: + name: windmill_server + state: absent + image: ghcr.io/windmill-labs/windmill:main + + # TODO backup once per day via cron. if hash of new backup matches the latest, dont do anything. else copy. only keep up to 3 backups + + - name: Start windmill service again + ansible.builtin.systemd_service: + name: "{{ USERNAME }}" + state: started + daemon_reload: true + enabled: true diff --git a/ansible/service_windmill/windmill_update.yml b/ansible/service_windmill/windmill_update.yml new file mode 100644 index 00000000..2f7ac147 --- /dev/null +++ b/ansible/service_windmill/windmill_update.yml @@ -0,0 +1,37 @@ +# Execute with +# ansible-playbook windmill_update.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: Create and start windmill service + hosts: my_servers + vars: + USERNAME: '{{ secrets.WINDMILL.USERNAME }}' + tasks: + - name: Stop windmill service + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: stopped + enabled: false + + - name: Stop windmill_server container + community.docker.docker_container: + name: windmill_server + state: stopped + image: ghcr.io/windmill-labs/windmill:main + + - name: Remove windmill_server container + community.docker.docker_container: + name: windmill_server + state: absent + image: ghcr.io/windmill-labs/windmill:main + + - name: Remove windmill image + community.docker.docker_image: + name: ghcr.io/windmill-labs/windmill + tag: latest + state: absent + + - name: Start windmill service again + ansible.builtin.systemd_service: + name: '{{ USERNAME }}' + state: started + daemon_reload: true + enabled: true diff --git a/ansible/setup_pc/setup_manjaro.yml b/ansible/setup_pc/setup_manjaro.yml new file mode 100644 index 00000000..69fc01bb --- /dev/null +++ b/ansible/setup_pc/setup_manjaro.yml @@ -0,0 +1,159 @@ +# Execute with +# ansible-playbook setup_manjaro.yml -i ../hosts -i /home/burny/syncthing/secrets/ansible_secrets/.ansible_secrets +- name: After a fresh manjaro installation, install or upgrade all software + hosts: my_pcs + vars: + USERNAME: burny + tasks: + - name: Print initial instructions + ansible.builtin.debug: + msg: + - On a fresh manjaro installation you will want to connect to wifi / internep asap. + - After install run in terminal 'sudo systemctl enable --now sshd' + - To find out the local network ip, run 'nmcli device show | grep IP4.ADDRESS' on the remote device + - Create a key pair with 'ssh-keygen -t rsa' + - ssh onto the remote with 'ssh root@' and put the public key in '/root/.ssh/authorized_keys' + - Modify '~/.ssh/config' on the controller device to have an entry + - ====================== + - Host + - HostName + - IdentityFile ~/.ssh/ + - User root + - ServerAliveInterval 60 + - ====================== + + - name: Run the equivalent of "pacman -Sy" as a separate step + community.general.pacman: + update_cache: true + + - name: Run the equivalent of "pacman -Su" as a separate step + community.general.pacman: + upgrade: true + + - name: Install pacman packages + community.general.pacman: + name: + # Package manager + - yay + # Internet browser + - brave-browser + # Programming and server tools + - docker + - git + - cloc + - openssh + - filezilla + - ansible + - ansible-lint + - syncthing + # Terminal + - fish + # Explorer + - thunar + # Communication + - thunderbird + - discord + # Drawing and media viewer + - xournalpp + - gimp + - vlc + # System & utility tools + - gnome-system-monitor + - zip + - pavucontrol + - flameshot + - ffmpeg + - simple-scan + - inxi # system information + - samba # share folders in local network + # Games + - lutris + # Programming languages + - make # For other pamac packages + - gcc + - rustup + - python-pip + - python-poetry + state: latest + + - name: Create group + ansible.builtin.group: + name: docker + state: present + + - name: Create user and set default shell + ansible.builtin.user: + name: "{{ USERNAME }}" + groups: + - "{{ USERNAME }}" + - docker + - wheel # sudo rights + shell: "{{ secrets.DEFAULT_SHELL }}" + + - name: Start syncthing service for user + become_user: '{{ USERNAME }}' + ansible.builtin.systemd_service: + name: syncthing.service + scope: user + state: started + daemon_reload: true + enabled: true + + - name: Update AUR store + community.general.pacman: + executable: yay + update_cache: true + extra_args: --answerclean N --answerdiff N --answerupgrade N --noprogressbar + + - name: Upgrade AUR packages + community.general.pacman: + executable: yay + upgrade: true + extra_args: --answerclean N --answerdiff N --answerupgrade N --noprogressbar + timeout: 600 # Max 600 seconds + + # TODO this lists as "changed" even when all packages are already installed + - name: Install AUR packages + # From https://github.com/mnussbaum/ansible-yay/blob/4fd5ec87f3c3ec5366376b30317714fee7e20b06/yay#L143 + ansible.builtin.command: + cmd: "yay --answerclean N --answerdiff N --answerupgrade N --noconfirm --noprogressbar --needed --sync {{ item }}" + timeout: 600 # Max 600 seconds per installation + with_items: + - aur/visual-studio-code-bin + - aur/earthly # development helper + - aur/qdirstat # disk space usage + - aur/xnviewmp # image viewer + - aur/zoom + - aur/anydesk-bin + + - name: Create directories + file: + path: "{{ item }}" + recurse: true + state: directory + owner: "{{ USERNAME }}" + group: "{{ USERNAME }}" + loop: + - "~/syncthing" + + - name: Enable sync clock + ansible.builtin.systemd_service: + name: ntpd + scope: system + state: started + daemon_reload: true + enabled: true + + - name: Print reminder + ansible.builtin.debug: + msg: + - If using brave browser, set up sync + - If using syncthing, set it up. Set GUI to dark mode and default folder to '~/syncthing' + - Power manager, turn off display power management when plugged in + - Set up terminal to have solid background + - Set keyboard repeat rate to 40 and delay to 250 + - Set up 'keyboard' settings shortcut for terminal to remove '--dropbown' and 'ctrl+g' to open 'thunar' + - Set up thunar to show hidden files, use 'list view' and proper datetime display for modified files + - Set system clock, and sync clock 'time only' and format '%A %d %b %y, %T' + - Never group windows in task bar + - Set task bar to 2 rows and row size 20 diff --git a/burny_common/.earthlyignore b/burny_common/.earthlyignore new file mode 100644 index 00000000..46cd015b --- /dev/null +++ b/burny_common/.earthlyignore @@ -0,0 +1,8 @@ +__pycache__ +.benchmarks +.hypothesis +.idea +.pyre +.pytest_cache +.ruff_cache +data \ No newline at end of file diff --git a/burny_common/.pyre_configuration b/burny_common/.pyre_configuration new file mode 100644 index 00000000..7c512daa --- /dev/null +++ b/burny_common/.pyre_configuration @@ -0,0 +1,6 @@ +{ + "site_package_search_strategy": "pep561", + "source_directories": [ + "." + ] +} diff --git a/burny_common/.vscode/extensions.json b/burny_common/.vscode/extensions.json new file mode 100644 index 00000000..20c088c4 --- /dev/null +++ b/burny_common/.vscode/extensions.json @@ -0,0 +1,11 @@ +{ + "recommendations": [ + "charliermarsh.ruff", + "ms-python.vscode-pylance", + "ms-python.python", + "tamasfe.even-better-toml", + "earthly.earthfile-syntax-highlighting", + "quicktype.quicktype", + "eeyore.yapf" + ] +} \ No newline at end of file diff --git a/burny_common/.vscode/launch.json b/burny_common/.vscode/launch.json new file mode 100644 index 00000000..dce24ee1 --- /dev/null +++ b/burny_common/.vscode/launch.json @@ -0,0 +1,28 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Python Examples: Run Current File", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "justMyCode": true, + "env": { + "PYTHONPATH": "${workspaceFolder}" + } + }, + { + "name": "Python Examples: Run main.py", + "type": "python", + "request": "launch", + "cwd": "${workspaceFolder}", + "program": "main.py", + "console": "integratedTerminal", + "justMyCode": true + } + ] +} \ No newline at end of file diff --git a/burny_common/.vscode/settings.json b/burny_common/.vscode/settings.json new file mode 100644 index 00000000..d9569d4e --- /dev/null +++ b/burny_common/.vscode/settings.json @@ -0,0 +1,4 @@ +{ + "python.formatting.provider": "yapf", + "editor.formatOnSave": true, +} \ No newline at end of file diff --git a/burny_common/Earthfile b/burny_common/Earthfile new file mode 100644 index 00000000..21248f17 --- /dev/null +++ b/burny_common/Earthfile @@ -0,0 +1,59 @@ +VERSION 0.6 +# earthly +all --PYTHONVERSION=3.11 +ARG PYTHONVERSION=3.11 +FROM python:${PYTHONVERSION}-slim +WORKDIR /root/burny_common + +install-dev: + RUN pip install poetry --no-cache-dir + RUN mkdir -p burny_common + RUN touch burny_common/__init__.py + COPY poetry.lock pyproject.toml ./ + RUN poetry install + SAVE ARTIFACT ./.venv + COPY . /root/burny_common + +format: + # Run on host system instead of inside a container + LOCALLY + # Requirements: + # pip install poetry + # poetry install + + # Convert single to double quotes + RUN poetry run ruff check . --select Q --fix + # Remove unused imports + RUN poetry run ruff check . --select F --fix + # Sort imports + RUN poetry run ruff check . --select I --fix + # Format code + RUN poetry run yapf -ir . + +# Check if files are correctly formatted +format-check: + FROM +install-dev + RUN poetry run yapf -dr . + +# Ignore errors via "# noqa: F841" +lint: + FROM +install-dev + RUN poetry run ruff check . + +# Ignore errors via "# pyre-fixme[11]" +pyre: + FROM +install-dev + RUN poetry run pyre + +# TODO Add tests +pytest: + FROM +install-dev + RUN poetry run pytest + +pre-commit: + BUILD +format-check + BUILD +lint + BUILD +pyre + +all: + BUILD +pre-commit + #BUILD +pytest diff --git a/burny_common/__init__.py b/burny_common/burny_common/__init__.py similarity index 100% rename from burny_common/__init__.py rename to burny_common/burny_common/__init__.py diff --git a/example_package/burny_test_template/behavior_tree.py b/burny_common/burny_common/behavior_tree.py similarity index 90% rename from example_package/burny_test_template/behavior_tree.py rename to burny_common/burny_common/behavior_tree.py index 3ffcb866..1b80923c 100644 --- a/example_package/burny_test_template/behavior_tree.py +++ b/burny_common/burny_common/behavior_tree.py @@ -1,6 +1,7 @@ +from __future__ import annotations + from dataclasses import dataclass, field from enum import Enum -from typing import List from loguru import logger @@ -14,7 +15,7 @@ class NodeOutcome(Enum): @dataclass class ActionNode: - child_actions: List['ActionNode'] = field(default_factory=list) + child_actions: list[ActionNode] = field(default_factory=list) continue_on_success: bool = True continue_on_fail: bool = False @@ -50,14 +51,14 @@ def display_tree(self): """ TODO: output tree to display/monitor via matplotlib or graphviz or similar """ -if __name__ == '__main__': +if __name__ == "__main__": class MyAction(ActionNode): my_status: int = 0 def run_node(self) -> NodeOutcome: self.my_status += 1 - logger.info(f'Counting up: {self.my_status}') + logger.info(f"Counting up: {self.my_status}") if self.my_status != 10: return NodeOutcome.RUNNING return NodeOutcome.SUCCESS diff --git a/burny_common/copy_file_to_server.py b/burny_common/burny_common/copy_file_to_server.py similarity index 60% rename from burny_common/copy_file_to_server.py rename to burny_common/burny_common/copy_file_to_server.py index 609f0d9d..466e866f 100644 --- a/burny_common/copy_file_to_server.py +++ b/burny_common/burny_common/copy_file_to_server.py @@ -3,22 +3,22 @@ from pathlib import Path import click -import paramiko # type: ignore +import paramiko from click.testing import CliRunner from paramiko import SSHClient -from paramiko.sftp_client import SFTPClient # type: ignore +from paramiko.sftp_client import SFTPClient def generate_path(client: SSHClient, target_path: str) -> Path: - if target_path.startswith('/'): + if target_path.startswith("/"): return Path(target_path) # If target path doesn't start with "/" it means it's a relative path - _stdin, stdout, _stderr = client.exec_command('pwd') + _stdin, stdout, _stderr = client.exec_command("pwd") return Path(stdout.readline().strip()) / Path(target_path) def create_target_dir(client: SSHClient, target_folder_path: Path): - _stdin, _stdout, _stderr = client.exec_command(f'mkdir -p {target_folder_path}') + _stdin, _stdout, _stderr = client.exec_command(f"mkdir -p {target_folder_path}") def copy_file_to_server_helper( @@ -30,18 +30,18 @@ def copy_file_to_server_helper( ): if create_target_folder: create_target_dir(client, target_path.parent) - return sftp.put(source_path.absolute().__str__(), target_path.absolute().__str__()) + return sftp.put(str(source_path.absolute()), str(target_path.absolute())) @click.command() -@click.option('--host', default='', help='host address') -@click.option('--port', default=22, help='port') -@click.option('--username', default='', help='user name') -@click.option('--password', default='', help='user password') -@click.option('--pkey', default='', help='private key') -@click.option('--sourcepath', default='', help='source file to copy') -@click.option('--targetpath', default='', help='target path to copy the file to') -@click.option('--createtargetdir', default=True, help='create directory if file doesnt exist') +@click.option("--host", default="", help="host address") +@click.option("--port", default=22, help="port") +@click.option("--username", default="", help="user name") +@click.option("--password", default="", help="user password") +@click.option("--pkey", default="", help="private key") +@click.option("--sourcepath", default="", help="source file to copy") +@click.option("--targetpath", default="", help="target path to copy the file to") +@click.option("--createtargetdir", default=True, help="create directory if file doesnt exist") def copy_file_to_server( host: str, port: int, @@ -68,7 +68,7 @@ def copy_file_to_server( with client.open_sftp() as sftp: assert path_source.is_file() - print(f'Copying {path_source.absolute().__str__()} to {path_target.absolute().__str__()}') + print(f"Copying {path_source.absolute()} to {path_target.absolute()}") copy_file_to_server_helper( client, sftp, @@ -81,17 +81,17 @@ def copy_file_to_server( def main(): runner = CliRunner() - ip = 'some.url' - username = 'some_name' - key = 'my ssh key' + ip = "some.url" + username = "some_name" + key = "my ssh key" result = runner.invoke( copy_file_to_server, [ - f'--host={ip}', - f'--username={username}', - f'--pkey={key}', - '--sourcepath=copy_file_to_server.py', - '--targetpath=test2/copy_file_to_server3.py', + f"--host={ip}", + f"--username={username}", + f"--pkey={key}", + "--sourcepath=copy_file_to_server.py", + "--targetpath=test2/copy_file_to_server3.py", ], ) for line in result.output.splitlines(): @@ -101,5 +101,5 @@ def main(): print(result.exit_code) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/burny_common/copy_folder_to_server.py b/burny_common/burny_common/copy_folder_to_server.py similarity index 76% rename from burny_common/copy_folder_to_server.py rename to burny_common/burny_common/copy_folder_to_server.py index 8f939f6c..a1b72809 100644 --- a/burny_common/copy_folder_to_server.py +++ b/burny_common/burny_common/copy_folder_to_server.py @@ -5,10 +5,10 @@ from typing import Optional, Set import click -import paramiko # type: ignore +import paramiko from click.testing import CliRunner from paramiko import SSHClient -from paramiko.sftp_client import SFTPClient # type: ignore +from paramiko.sftp_client import SFTPClient from burny_common.copy_file_to_server import copy_file_to_server_helper, generate_path @@ -43,14 +43,14 @@ def copy_folder_to_server_helper( @click.command() -@click.option('--host', default='', help='host address') -@click.option('--port', default=22, help='port') -@click.option('--username', default='', help='user name') -@click.option('--password', default='', help='user password') -@click.option('--pkey', default='', help='private key') -@click.option('--sourcepath', default='', help='source folder to copy') -@click.option('--targetpath', default='', help='which folder the output should be') -@click.option('--respectgitignore', default=True, help='ignore files that are ignored by .gitignore') +@click.option("--host", default="", help="host address") +@click.option("--port", default=22, help="port") +@click.option("--username", default="", help="user name") +@click.option("--password", default="", help="user password") +@click.option("--pkey", default="", help="private key") +@click.option("--sourcepath", default="", help="source folder to copy") +@click.option("--targetpath", default="", help="which folder the output should be") +@click.option("--respectgitignore", default=True, help="ignore files that are ignored by .gitignore") def copy_folder_to_server( host: str, port: int, @@ -71,10 +71,9 @@ def copy_folder_to_server( path_target_root_folder = generate_path(client, targetpath) allowed_files: Optional[Set] = None - # pylint: disable=R1732 if respectgitignore: proc = subprocess.Popen( - ['git', 'ls-files'], + ["git", "ls-files"], cwd=path_source_root_folder.absolute(), stdout=subprocess.PIPE, ) @@ -99,16 +98,16 @@ def copy_folder_to_server( def main(): runner = CliRunner() - ip = 'some.url' - username = 'some_name' - key = 'my ssh key' + ip = "some.url" + username = "some_name" + key = "my ssh key" result = runner.invoke( copy_folder_to_server, [ - f'--host={ip}', - f'--username={username}', - f'--pkey={key}', - '--sourcepath=/home/burny/github/python-template', - '--targetpath=test5', + f"--host={ip}", + f"--username={username}", + f"--pkey={key}", + "--sourcepath=/home/burny/github/python-template", + "--targetpath=test5", ] ) for line in result.output.splitlines(): @@ -118,5 +117,5 @@ def main(): print(result.exit_code) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/burny_common/measure_time.py b/burny_common/burny_common/measure_time.py similarity index 68% rename from burny_common/measure_time.py rename to burny_common/burny_common/measure_time.py index 7b7308f3..1fe843e9 100644 --- a/burny_common/measure_time.py +++ b/burny_common/burny_common/measure_time.py @@ -9,11 +9,11 @@ def time_this(label: str): start = time.perf_counter_ns() yield end = time.perf_counter_ns() - logger.info(f'TIME {label}: {(end - start) / 1e9} sec') + logger.info(f"TIME {label}: {(end - start) / 1e9} sec") # Use like this -if __name__ == '__main__': - with time_this('square rooting'): +if __name__ == "__main__": + with time_this("square rooting"): for i in range(10**4): _x = i**0.5 diff --git a/example_package/burny_test_template/min_heap.py b/burny_common/burny_common/min_heap.py similarity index 80% rename from example_package/burny_test_template/min_heap.py rename to burny_common/burny_common/min_heap.py index b0332748..8b633726 100644 --- a/example_package/burny_test_template/min_heap.py +++ b/burny_common/burny_common/min_heap.py @@ -1,22 +1,23 @@ +from __future__ import annotations + import math -from typing import List, Optional class Minheap: def __init__(self): """ Implementation of binary heap as min-heap """ - self.heap: List[int] = [-1] + self.heap: list[int] = [-1] def __repr__(self): return_list = [] multiples_of_two = {2**n for n in range(1, 1 + int(math.log(len(self.heap), 2)))} for i, value in enumerate(self.heap[1:], start=1): if i in multiples_of_two: - return_list.append('\n') + return_list.append("\n") return_list.append(value) - return_list.append(' ') - return ''.join(str(x) for x in return_list) + return_list.append(" ") + return "".join(str(x) for x in return_list) def get_parent(self, index: int) -> int: return self.heap[index // 2] @@ -33,13 +34,13 @@ def get_left_child_index(cls, index: int) -> int: def get_right_child_index(cls, index: int) -> int: return index * 2 + 1 - def get_left_child(self, index: int) -> Optional[int]: + def get_left_child(self, index: int) -> int | None: try: return self.heap[index * 2] except IndexError: return None - def get_right_child(self, index: int) -> Optional[int]: + def get_right_child(self, index: int) -> int | None: try: return self.heap[index * 2 + 1] except IndexError: @@ -88,12 +89,12 @@ def insert(self, number: int): def get_min(self) -> int: if len(self.heap) > 0: return self.heap[1] - raise IndexError('get_min from empty heap') + raise IndexError("get_min from empty heap") def delete_min(self): # Swap minimum with last item in list before removing if len(self.heap) < 2: - raise IndexError('delete_min from empty heap') + raise IndexError("delete_min from empty heap") if len(self.heap) < 3: self.heap.pop(1) return @@ -109,22 +110,23 @@ def build(self, my_list: list): self.insert(i) -if __name__ == '__main__': +if __name__ == "__main__": p = Minheap() build_list = [1, 2, 3, 4, 5, 6, 7] p.build(build_list) - assert len(p.heap) == 8, 'build() function or insert() function not working as expected' + assert len(p.heap) == 8, "build() function or insert() function not working as expected" """ p: 1 2 3 4 5 6 7 """ for i in build_list: - assert not p.is_empty(), 'Min heap should be not empty, but is returned to be empty' + assert not p.is_empty(), "Min heap should be not empty, but is returned to be empty" value = p.get_min() - assert ( - value == i - ), f"get_min or delete_min function not working as expected, received value '{value}' but should have been '{i}', heap:\n{p}" + assert (value == i), ( + f"get_min or delete_min function not working as expected, received value '{value}' " + f"but should have been '{i}', heap:\n{p}" + ) p.delete_min() assert p.is_empty(), "Min heap should be empty, but isn't" diff --git a/burny_common/path_manipulation.py b/burny_common/burny_common/path_manipulation.py similarity index 84% rename from burny_common/path_manipulation.py rename to burny_common/burny_common/path_manipulation.py index 7dc442a3..672cf580 100644 --- a/burny_common/path_manipulation.py +++ b/burny_common/burny_common/path_manipulation.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Iterator, Iterable +from typing import Iterable def convert_string_to_path(multi_line_string: str) -> Iterable[Path]: @@ -27,5 +27,4 @@ def recurse_path(path: Path, depth: int = 0) -> Iterable[Path]: yield path elif path.is_dir() and depth > 0: for subfile_path in sorted(path.iterdir()): - for new_file in recurse_path(subfile_path, depth=depth - 1): - yield new_file + yield from recurse_path(subfile_path, depth=depth - 1) diff --git a/burny_common/run_command_on_server.py b/burny_common/burny_common/run_command_on_server.py similarity index 57% rename from burny_common/run_command_on_server.py rename to burny_common/burny_common/run_command_on_server.py index e34daaa9..9abae06b 100644 --- a/burny_common/run_command_on_server.py +++ b/burny_common/burny_common/run_command_on_server.py @@ -2,25 +2,25 @@ from io import StringIO import click -import paramiko # type: ignore +import paramiko from click.testing import CliRunner from paramiko import SSHClient @click.command() -@click.option('--host', default='', help='host address') -@click.option('--port', default=22, help='port') -@click.option('--username', default='', help='user name') -@click.option('--password', default='', help='user password') -@click.option('--pkey', default='', help='private key') -@click.option('--command', default='ls -la', help='command to execute') +@click.option("--host", default="", help="host address") +@click.option("--port", default=22, help="port") +@click.option("--username", default="", help="user name") +@click.option("--password", default="", help="user password") +@click.option("--pkey", default="", help="private key") +@click.option("--command", default="ls -la", help="command to execute") def run_command_on_server(host: str, port: int, username: str, password: str, pkey: str, command: str): client: SSHClient with SSHClient() as client: client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) pkey_loaded = paramiko.RSAKey.from_private_key(StringIO(pkey)) client.connect(hostname=host, port=port, username=username, password=password, pkey=pkey_loaded) - _stdin, stdout, _stderr = client.exec_command(f'{command}') + _stdin, stdout, _stderr = client.exec_command(f"{command}") lines = stdout.readlines() for line in lines: print(line) @@ -29,15 +29,15 @@ def run_command_on_server(host: str, port: int, username: str, password: str, pk def main(): runner = CliRunner() - ip = 'some.url' - username = 'some_name' - key = 'my ssh key' + ip = "some.url" + username = "some_name" + key = "my ssh key" result = runner.invoke( run_command_on_server, [ - f'--host={ip}', - f'--username={username}', - f'--pkey={key}', - '--command=ls -lah', + f"--host={ip}", + f"--username={username}", + f"--pkey={key}", + "--command=ls -lah", ] ) for line in result.output.splitlines(): @@ -47,5 +47,5 @@ def main(): print(result.exit_code) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/burny_common/integration_test_helper.py b/burny_common/integration_test_helper.py deleted file mode 100644 index 22b3d22b..00000000 --- a/burny_common/integration_test_helper.py +++ /dev/null @@ -1,284 +0,0 @@ -import os -import signal -import socket -import subprocess -import time -from pathlib import Path -from typing import Optional, Set - -import psutil -import pymongo -import requests # type: ignore -from loguru import logger -from pymongo import MongoClient - -WEBSITE_IP = 'http://localhost' - - -class Timeout: - """ - Run something for a maximum limited time - try: - with Timeout(seconds=2): - ... - except TimeoutError: - """ - - def __init__(self, seconds=1, error_message='Timeout'): - self.seconds = seconds - self.error_message = error_message - - def handle_timeout(self, signum, frame): - raise TimeoutError(self.error_message) - - def __enter__(self): - signal.signal(signal.SIGALRM, self.handle_timeout) - signal.alarm(self.seconds) - - def __exit__(self, type_, value, traceback): - signal.alarm(0) - - -def get_pid(name: str) -> Set[int]: - """ Return a list of PIDs of all processes with the exact given name. """ - process_pids = set() - for proc in psutil.process_iter(): - if name == proc.name(): - pid = proc.pid - process_pids.add(pid) - return process_pids - - -def remove_leftover_files(files: Set[Path]): - for file in files: - if file.is_file(): - os.remove(file) - - -def is_port_free(port: int) -> bool: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - try: - sock.bind(('', port)) - sock.close() - return True - except OSError: - return False - - -def find_next_free_port(port: int = 10_000, max_port: int = 65_535, exclude_ports: Optional[Set[int]] = None) -> int: - if exclude_ports is None: - exclude_ports = set() - - while port <= max_port: - if port not in exclude_ports and is_port_free(port): - return port - port += 1 - raise IOError('No free ports') - - -# pylint: disable=R1732 -def check_if_docker_is_running() -> bool: - p = subprocess.Popen(['docker', 'ps'], stdout=subprocess.PIPE) - _return_code = p.wait() - if not p.stdout: - return False - output = p.stdout.read().decode() - docker_running = output.startswith('CONTAINER ID') - if docker_running: - logger.info('Docker running detected') - return docker_running - - -def get_website_address(port: int) -> str: - return f'{WEBSITE_IP}:{port}' - - -def start_svelte_dev_server( - port: int, - NEWLY_CREATED_PROCESSES: Set[int], - backend_proxy: str = 'localhost:8000', -): - env = os.environ.copy() - currently_running_node_processes = get_pid('node') - - frontend_folder = Path(__file__).parents[1] / 'svelte_frontend' - assert is_port_free(port), f'Unable to start svelte dev server because port {port} is blocked' - logger.info(f'Starting frontend on port {port}') - _ = subprocess.Popen( - ['npx', 'cross-env', f'BACKEND_SERVER={backend_proxy}', 'svelte-kit', 'dev', '--port', f'{port}'], - cwd=frontend_folder, - env=env - ) - # Give it some time to create dev server and all (3?) node proccesses - with Timeout(10, 'Took more than 10 seconds'): - while is_port_free(port): - time.sleep(0.1) - while 1: - try: - result = requests.get(get_website_address(port)) - if result.status_code == 200: - break - except requests.exceptions.ConnectionError: - pass - time.sleep(0.1) - - new_processes = get_pid('node') - currently_running_node_processes - logger.info(f'New node processes: {new_processes}') - NEWLY_CREATED_PROCESSES |= new_processes - - -def start_fastapi_dev_server( - port: int, - NEWLY_CREATED_PROCESSES: Set[int], - CREATED_FILES: Set[Path], -): - root_folder = Path(__file__).parents[1] - backend_folder = root_folder / 'fastapi_server' - currently_running_uvicorn_processes = get_pid('uvicorn') - env = os.environ.copy() - env['DATABASE_USE_MEMORY'] = 'TRUE' - - sqlite_test_file_name = 'todos_TEST.db' - sqlite_test_file_path = backend_folder / 'data' / sqlite_test_file_name - CREATED_FILES.add(sqlite_test_file_path) - remove_leftover_files({sqlite_test_file_path}) - - # Why does this return errors even when fastapi server is not running - # assert is_port_free(port), f"Unable to start fastapi server because port {port} is blocked" - logger.info(f'Starting backend on port {port}') - _ = subprocess.Popen( - ['poetry', 'run', 'uvicorn', 'fastapi_server.main:app', '--host', 'localhost', '--port', f'{port}'], - cwd=root_folder, - env=env, - ) - # Give it some time to create backend dev server - with Timeout(10, 'Took more than 10 seconds'): - while is_port_free(port): - time.sleep(0.1) - while 1: - try: - result = requests.get(get_website_address(port)) - if result.status_code == 200: - break - except requests.exceptions.ConnectionError: - pass - time.sleep(0.1) - - new_processes = get_pid('uvicorn') - currently_running_uvicorn_processes - logger.info(f'New uvicorn processes: {new_processes}') - NEWLY_CREATED_PROCESSES |= new_processes - - -def check_if_mongodb_is_running(mongo_db_port: int = 27017) -> bool: - mongo_db_address = f'mongodb://localhost:{mongo_db_port}' - try: - with Timeout(seconds=2): - _my_client: MongoClient - with pymongo.MongoClient(mongo_db_address) as _my_client: - pass - except TimeoutError: - return False - return True - - -# pylint: disable=R1732 -def start_mongodb(mongo_db_port: int = 27017) -> int: - # Start mongodb via docker - if check_if_mongodb_is_running(mongo_db_port): - logger.info(f'MongoDB is already running on port {mongo_db_port}') - return mongo_db_port - command = [ - # TODO add volume to save db - 'docker', - 'run', - '--rm', - '-d', - '--name', - 'mongodb_test', - '-p', - # TODO use mongo_db_port - '27017-27019:27017-27019', - 'mongo:5.0.0', - ] - logger.info(f"Starting mongoDB with command: {' '.join(command)}") - process = subprocess.Popen(command) - process.wait() - return mongo_db_port - - -def check_if_postgres_is_running(port: int = 5432) -> bool: - # If we can connect to port 5432, postgres is already running - # TODO find better way to figure out if postgress is running - return not is_port_free(port) - - -# pylint: disable=R1732 -def start_postgres(postgres_port: int = 5432) -> int: - # Start postgres via docker - if check_if_postgres_is_running(postgres_port): - logger.info(f'Postgres is already running on port {postgres_port}') - return postgres_port - postgres_container_name = 'postgres_test' - postgres_volume_name = 'postgres_test' - postgres_username = 'postgres' - postgres_password = 'changeme' - postgres_image = 'postgres:9.6.23-alpine3.14' - # postgres_port = find_next_free_port() - if check_if_docker_is_running(): - # docker run --rm --name postgres_test -p 5432:5432 --volume postgres_test:/data/postgres -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=changeme postgres:9.6.23-alpine3.14 - command = [ - # TODO add volume to save db, or should that not be active while testing? - 'docker', - 'run', - '--rm', - '-d', - '--name', - postgres_container_name, - '-p', - f'{postgres_port}:{postgres_port}', - '--volume', - f'{postgres_volume_name}:/data/postgres', - '-e', - f'POSTGRES_USER={postgres_username}', - '-e', - f'POSTGRES_PASSWORD={postgres_password}', - postgres_image, - ] - logger.info(f"Starting postgres with command: {' '.join(command)}") - _process = subprocess.Popen(command) - else: - raise NotImplementedError() - return postgres_port - - -def kill_processes(processes: Set[int]): - # Soft kill - for pid in processes: - logger.info(f'Killing {pid}') - try: - os.kill(pid, signal.SIGTERM) - except ProcessLookupError: - pass - time.sleep(.1) - - # Force kill - for pid in processes: - logger.info(f'Force killing {pid}') - try: - os.kill(pid, signal.SIGKILL) - except ProcessLookupError: - pass - - -if __name__ == '__main__': - logger.info(f'Docker running: {check_if_docker_is_running()}') - logger.info(f'Postgres running: {check_if_postgres_is_running()}') - logger.info(f'MongoDB running: {check_if_mongodb_is_running()}') - # start_postgres() - # start_mongodb() - free_frontend_port = find_next_free_port() - free_backend_port = find_next_free_port(exclude_ports={free_frontend_port}) - start_svelte_dev_server(free_frontend_port, set(), backend_proxy=f'localhost:{free_backend_port}') - start_fastapi_dev_server(free_backend_port, set(), set()) - while 1: - time.sleep(1) diff --git a/burny_common/poetry.lock b/burny_common/poetry.lock new file mode 100644 index 00000000..d25666e0 --- /dev/null +++ b/burny_common/poetry.lock @@ -0,0 +1,862 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "bcrypt" +version = "4.0.1" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.6" +files = [ + {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, + {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, + {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, + {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "certifi" +version = "2023.5.7" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, +] + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.1.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, +] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "41.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"}, + {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"}, + {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"}, + {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"}, + {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"}, + {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"}, + {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dataclasses-json" +version = "0.5.8" +description = "Easily serialize dataclasses to and from JSON" +optional = false +python-versions = ">=3.6" +files = [ + {file = "dataclasses-json-0.5.8.tar.gz", hash = "sha256:6572ac08ad9340abcb74fd8c4c8e9752db2a182a402c8e871d0a8aa119e3804e"}, + {file = "dataclasses_json-0.5.8-py3-none-any.whl", hash = "sha256:65b167c15fdf9bde27569c09ac18dd39bf1cc5b7998525024cb4678d2653946c"}, +] + +[package.dependencies] +marshmallow = ">=3.3.0,<4.0.0" +marshmallow-enum = ">=1.5.1,<2.0.0" +typing-inspect = ">=0.4.0" + +[package.extras] +dev = ["flake8", "hypothesis", "ipython", "mypy (>=0.710)", "portray", "pytest (>=7.2.0)", "simplejson", "types-dataclasses"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "intervaltree" +version = "3.1.0" +description = "Editable interval tree data structure for Python 2 and 3" +optional = false +python-versions = "*" +files = [ + {file = "intervaltree-3.1.0.tar.gz", hash = "sha256:902b1b88936918f9b2a19e0e5eb7ccb430ae45cde4f39ea4b36932920d33952d"}, +] + +[package.dependencies] +sortedcontainers = ">=2.0,<3.0" + +[[package]] +name = "libcst" +version = "1.0.1" +description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs." +optional = false +python-versions = ">=3.7" +files = [ + {file = "libcst-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80423311f09fc5fc3270ede44d30d9d8d3c2d3dd50dbf703a581ca7346949fa6"}, + {file = "libcst-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9d6dec2a3c443792e6af7c36fadc256e4ea586214c76b52f0d18118811dbe351"}, + {file = "libcst-1.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4840a3de701778f0a19582bb3085c61591329153f801dc25da84689a3733960b"}, + {file = "libcst-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0138068baf09561268c7f079373bda45f0e2b606d2d19df1307ca8a5134fc465"}, + {file = "libcst-1.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a4931feceab171e6fce73de94e13880424367247dad6ff2b49cabfec733e144"}, + {file = "libcst-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:47dba43855e9c7b06d8b256ee81f0ebec6a4f43605456519577e09dfe4b4288c"}, + {file = "libcst-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c50541c3fd6b1d5a3765c4bb5ee8ecbba9d0e798e48f79fd5adf3b6752de4d0"}, + {file = "libcst-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5599166d5fec40e18601fb8868519dde99f77b6e4ad6074958018f9545da7abd"}, + {file = "libcst-1.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600c4d3a9a2f75d5a055fed713a5a4d812709947909610aa6527abe08a31896f"}, + {file = "libcst-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b5aea04c35e13109edad3cf83bc6dcd74309b150a781d2189eecb288b73a87"}, + {file = "libcst-1.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddd4e0eeec499d1c824ab545e62e957dbbd69a16bc4273208817638eb7d6b3c6"}, + {file = "libcst-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:414350df5e334ddf0db1732d63da44e81b734d45abe1c597b5e5c0dd46aa4156"}, + {file = "libcst-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1adcfa7cafb6a0d39a1a0bec541355608038b45815e0c5019c95f91921d42884"}, + {file = "libcst-1.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d31ce2790eab59c1bd8e33fe72d09cfc78635c145bdc3f08296b360abb5f443"}, + {file = "libcst-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2cb687e1514625e91024e50a5d2e485c0ad3be24f199874ebf32b5de0346150"}, + {file = "libcst-1.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6caa33430c0c7a0fcad921b0deeec61ddb96796b6f88dca94966f6db62065f4f"}, + {file = "libcst-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b97f652b15c50e91df411a9c8d5e6f75882b30743a49b387dcedd3f68ed94d75"}, + {file = "libcst-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:967c66fabd52102954207bf1541312b467afc210fdf7033f32da992fb6c2372c"}, + {file = "libcst-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b666a605f4205c8357696f3b6571a38f6a8537cdcbb8f357587d35168298af34"}, + {file = "libcst-1.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae49dcbfadefb82e830d41d9f0a1db0af3b771224768f431f1b7b3a9803ed7e3"}, + {file = "libcst-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c90c74a8a314f0774f045122323fb60bacba79cbf5f71883c0848ecd67179541"}, + {file = "libcst-1.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0533de4e35396c61aeb3a6266ac30369a855910c2385aaa902ff4aabd60d409"}, + {file = "libcst-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:5e3293e77657ba62533553bb9f0c5fb173780e164c65db1ea2a3e0d03944a284"}, + {file = "libcst-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:119ba709f1dcb785a4458cf36cedb51d6f9cb2eec0acd7bb171f730eac7cb6ce"}, + {file = "libcst-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b4e336f6d68456017671cdda8ddebf9caebce8052cc21a3f494b03d7bd28386"}, + {file = "libcst-1.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8420926791b0b6206cb831a7ec73d26ae820e65bdf07ce9813c7754c7722c07a"}, + {file = "libcst-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d237e9164a43caa7d6765ee560412264484e7620c546a2ee10a8d01bd56884e0"}, + {file = "libcst-1.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:440887e5f82efb299f2e98d4bfa5663851a878cfc0efed652ab8c50205191436"}, + {file = "libcst-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ae7f4e71d714f256b5f2ff98b5a9effba0f9dff4d779d8f35d7eb157bef78f59"}, + {file = "libcst-1.0.1.tar.gz", hash = "sha256:37187337f979ba426d8bfefc08008c3c1b09b9e9f9387050804ed2da88107570"}, +] + +[package.dependencies] +pyyaml = ">=5.2" +typing-extensions = ">=3.7.4.2" +typing-inspect = ">=0.4.0" + +[package.extras] +dev = ["Sphinx (>=5.1.1)", "black (==23.3.0)", "build (>=0.10.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8,<5)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.1.2)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.16)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.10)", "setuptools-rust (>=1.5.2)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==2.1.0)", "usort (==1.0.7)"] + +[[package]] +name = "loguru" +version = "0.7.0" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = ">=3.5" +files = [ + {file = "loguru-0.7.0-py3-none-any.whl", hash = "sha256:b93aa30099fa6860d4727f1b81f8718e965bb96253fa190fab2077aaad6d15d3"}, + {file = "loguru-0.7.0.tar.gz", hash = "sha256:1612053ced6ae84d7959dd7d5e431a0532642237ec21f7fd83ac73fe539e03e1"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (==5.3.0)", "colorama (==0.4.5)", "colorama (==0.4.6)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v0.990)", "pre-commit (==3.2.1)", "pytest (==6.1.2)", "pytest (==7.2.1)", "pytest-cov (==2.12.1)", "pytest-cov (==4.0.0)", "pytest-mypy-plugins (==1.10.1)", "pytest-mypy-plugins (==1.9.3)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.2.0)", "tox (==3.27.1)", "tox (==4.4.6)"] + +[[package]] +name = "marshmallow" +version = "3.19.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.7" +files = [ + {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"}, + {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "marshmallow-enum" +version = "1.5.1" +description = "Enum field for Marshmallow" +optional = false +python-versions = "*" +files = [ + {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, + {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, +] + +[package.dependencies] +marshmallow = ">=2.0.0" + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "paramiko" +version = "2.12.0" +description = "SSH2 protocol library" +optional = false +python-versions = "*" +files = [ + {file = "paramiko-2.12.0-py2.py3-none-any.whl", hash = "sha256:b2df1a6325f6996ef55a8789d0462f5b502ea83b3c990cbb5bbe57345c6812c4"}, + {file = "paramiko-2.12.0.tar.gz", hash = "sha256:376885c05c5d6aa6e1f4608aac2a6b5b0548b1add40274477324605903d9cd49"}, +] + +[package.dependencies] +bcrypt = ">=3.1.3" +cryptography = ">=2.5" +pynacl = ">=1.0.1" +six = "*" + +[package.extras] +all = ["bcrypt (>=3.1.3)", "gssapi (>=1.4.1)", "invoke (>=1.3)", "pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "pywin32 (>=2.1.8)"] +ed25519 = ["bcrypt (>=3.1.3)", "pynacl (>=1.0.1)"] +gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +invoke = ["invoke (>=1.3)"] + +[[package]] +name = "portpicker" +version = "1.5.2" +description = "A library to choose unique available network ports." +optional = false +python-versions = ">=3.6" +files = [ + {file = "portpicker-1.5.2-py3-none-any.whl", hash = "sha256:01113f51c3cc63290a44dd7ae6e3eb9f8fe1b8a1f9d7988a897944230c39cd52"}, + {file = "portpicker-1.5.2.tar.gz", hash = "sha256:c55683ad725f5c00a41bc7db0225223e8be024b1fa564d039ed3390e4fd48fb3"}, +] + +[package.dependencies] +psutil = "*" + +[[package]] +name = "psutil" +version = "5.9.5" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, + {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, + {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, + {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, + {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, + {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, + {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, + {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pygments" +version = "2.15.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, + {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] + +[[package]] +name = "pyre-check" +version = "0.9.18" +description = "A performant type checker for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyre-check-0.9.18.tar.gz", hash = "sha256:d5eb6db9011a7207189ecd0eaf32951e46cb0769c0f96a78fd0b90e633c9df2c"}, + {file = "pyre_check-0.9.18-py3-none-macosx_10_11_x86_64.whl", hash = "sha256:22633f5af3b986d266451a9e386a32414f8868de0a94226c7766f81eb080c59d"}, + {file = "pyre_check-0.9.18-py3-none-manylinux1_x86_64.whl", hash = "sha256:5659d4dbd6d1dd3052359861d828419f07d1ced1dad4ce4ca79071d252699c26"}, +] + +[package.dependencies] +click = ">=8.0" +dataclasses-json = "*" +intervaltree = "*" +libcst = "*" +psutil = "*" +pyre-extensions = ">=0.0.29" +tabulate = "*" +testslide = ">=2.7.0" +typing-extensions = "*" + +[[package]] +name = "pyre-extensions" +version = "0.0.30" +description = "Type system extensions for use with the pyre type checker" +optional = false +python-versions = "*" +files = [ + {file = "pyre-extensions-0.0.30.tar.gz", hash = "sha256:ba7923c486e089afb37a10623a8f4ae82d73cff42426d711c48af070e5bc31b2"}, + {file = "pyre_extensions-0.0.30-py3-none-any.whl", hash = "sha256:32b37ede4eed0ea879fdd6d84e0c7811e129f19b76614f1be3a6b47f9a4b1fa0"}, +] + +[package.dependencies] +typing-extensions = "*" +typing-inspect = "*" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "ruff" +version = "0.0.247" +description = "An extremely fast Python linter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.247-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:0151face9ef0e09c0d09166eae5f6df9d61ed7b1686086092d56164b790d1adf"}, + {file = "ruff-0.0.247-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:0abffda0039dc0eec18d624a48a725c414587c816194d1c9889eceba82e87ad0"}, + {file = "ruff-0.0.247-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e34ce0a12a9c7ac25fcfd8a9a25ade778f4e54df37f7ce58c406c36f9d5a1e3"}, + {file = "ruff-0.0.247-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c31adc9f08e1652acb6c1b6d494a3e52895e341398b5dcaffe3325688f70de87"}, + {file = "ruff-0.0.247-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ebc3b3077a880ea8af9f17c5614f606d6c1a15db6823501f4b8d3daf51f78782"}, + {file = "ruff-0.0.247-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:403f67452655923d0775c6c3854750e77c9c97eb875ea979ad515d3c75a45cff"}, + {file = "ruff-0.0.247-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53dd6124c6b822c27ee23965ce9d8c5fbc76a97ecc209daef0bbfbe8f905cb18"}, + {file = "ruff-0.0.247-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1483c7435db4926da3793a89f6bbb68dedf2990aeddef01407d8c47953403e0"}, + {file = "ruff-0.0.247-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ce619be01206ab71054c9f492a803cc81be678222379c69a0d60aa66c30e4a2"}, + {file = "ruff-0.0.247-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:172c0a8fb295259d9e12e43c39cf3bd006ae85eae89b8e9ca6ece7252241b603"}, + {file = "ruff-0.0.247-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0cda3a13e67adaf5198c69847a2f04011434bdfbfdca05ac32c101991dd56162"}, + {file = "ruff-0.0.247-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4481b5b6103dffc09156f2fea79a9a9282a72c0109ca4ab74828ae1089ec8c7e"}, + {file = "ruff-0.0.247-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8c835b703cebb0f23d59ec3d83ff498c5290fae51f98df548aacbb9ff85cc93c"}, + {file = "ruff-0.0.247-py3-none-win32.whl", hash = "sha256:3695f5fd2f4ad44030799a6021b2626442e8d92e432d646aadeefd4a1fceab12"}, + {file = "ruff-0.0.247-py3-none-win_amd64.whl", hash = "sha256:3e22f08bc403d3b4f32488ea52cd69fc3cb343b2c99431fd969cda1c83f4bc2f"}, + {file = "ruff-0.0.247-py3-none-win_arm64.whl", hash = "sha256:737b7fd25d2523b7c526830a3670364a953cb6c6bbf9912c78cba06bbf0ca125"}, + {file = "ruff-0.0.247.tar.gz", hash = "sha256:cce9566cea1cb348bb2dec99f810d846d112627fa52bf3a554773ce4737a061b"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "testslide" +version = "2.7.1" +description = "A test framework for Python that makes mocking and iterating over code with tests a breeze" +optional = false +python-versions = "*" +files = [ + {file = "TestSlide-2.7.1.tar.gz", hash = "sha256:d25890d5c383f673fac44a5f9e2561b7118d04f29f2c2b3d4f549e6db94cb34d"}, +] + +[package.dependencies] +psutil = ">=5.6.7" +Pygments = ">=2.2.0" +typeguard = "<3.0" + +[package.extras] +build = ["black", "coverage", "coveralls", "flake8", "ipython", "isort (>=5.1,<6.0)", "mypy (==0.991)", "sphinx", "sphinx-autobuild", "sphinx-kr-theme", "twine"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typeguard" +version = "2.13.3" +description = "Run-time type checker for Python" +optional = false +python-versions = ">=3.5.3" +files = [ + {file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"}, + {file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"}, +] + +[package.extras] +doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["mypy", "pytest", "typing-extensions"] + +[[package]] +name = "types-requests" +version = "2.31.0.1" +description = "Typing stubs for requests" +optional = false +python-versions = "*" +files = [ + {file = "types-requests-2.31.0.1.tar.gz", hash = "sha256:3de667cffa123ce698591de0ad7db034a5317457a596eb0b4944e5a9d9e8d1ac"}, + {file = "types_requests-2.31.0.1-py3-none-any.whl", hash = "sha256:afb06ef8f25ba83d59a1d424bd7a5a939082f94b94e90ab5e6116bd2559deaa3"}, +] + +[package.dependencies] +types-urllib3 = "*" + +[[package]] +name = "types-urllib3" +version = "1.26.25.13" +description = "Typing stubs for urllib3" +optional = false +python-versions = "*" +files = [ + {file = "types-urllib3-1.26.25.13.tar.gz", hash = "sha256:3300538c9dc11dad32eae4827ac313f5d986b8b21494801f1bf97a1ac6c03ae5"}, + {file = "types_urllib3-1.26.25.13-py3-none-any.whl", hash = "sha256:5dbd1d2bef14efee43f5318b5d36d805a489f6600252bb53626d4bfafd95e27c"}, +] + +[[package]] +name = "typing-extensions" +version = "4.6.3" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, + {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "urllib3" +version = "2.0.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, + {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] + +[package.extras] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] + +[[package]] +name = "yapf" +version = "0.32.0" +description = "A formatter for Python code." +optional = false +python-versions = "*" +files = [ + {file = "yapf-0.32.0-py2.py3-none-any.whl", hash = "sha256:8fea849025584e486fd06d6ba2bed717f396080fd3cc236ba10cb97c4c51cf32"}, + {file = "yapf-0.32.0.tar.gz", hash = "sha256:a3f5085d37ef7e3e004c4ba9f9b3e40c54ff1901cd111f05145ae313a7c67d1b"}, +] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.8, <3.12" +content-hash = "2cf22908337e32c82f922d811207d26caa80b75fafa87922efa5ae1b80eb07bf" diff --git a/burny_common/pyproject.toml b/burny_common/pyproject.toml new file mode 100644 index 00000000..70b77956 --- /dev/null +++ b/burny_common/pyproject.toml @@ -0,0 +1,70 @@ +[tool.poetry] +name = "burny_common" +version = "0.0.10" +description = "" +authors = ["BurnySc2 "] +packages = [ + { include = "burny_common/*.py" }, +] + +[tool.poetry.dependencies] +python = ">=3.8, <3.12" +loguru = "^0.7" +click = "^8.1" +paramiko = "^2.11" +psutil = "^5.9" +requests = "^2.28" +types-requests = "^2.27" +portpicker = "^1.5" + +[tool.poetry.group.dev.dependencies] +toml = "^0.10.2" +# Autoformat +yapf = "^0.32.0" +# Linter +ruff = "^0.0.247" +# Type checker +pyre-check = "^0.9.18" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.yapf] +based_on_style = "pep8" +column_limit = 120 +split_arguments_when_comma_terminated = true +dedent_closing_brackets = true +allow_split_before_dict_value = false + +[tool.ruff] +target-version = 'py38' +line-length = 120 +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" +select = [ + "C4", # flake8-comprehensions + "E", # Error + "F", # pyflakes + "BLE", # flake8-blind-except + "I", # isort + "ISC", # flake8-implicit-str-concat + "N", # pep8-naming + "PGH", # pygrep-hooks + "PTH", # flake8-use-pathlib + "SIM", # flake8-simplify + "W", # Warning + "Q", # flake8-quotes + "YTT", # flake8-2020 + "UP", # pyupgrade + "A", # flake8-builtins +] + +[tool.ruff.pyupgrade] +# Preserve types, even if a file imports `from __future__ import annotations`. +# Remove once support for py3.8 and 3.9 is dropped +keep-runtime-typing = true + +[tool.ruff.pep8-naming] +# Allow Pydantic's `@validator` decorator to trigger class method treatment. +classmethod-decorators = ["pydantic.validator", "classmethod"] diff --git a/discord_bot/.dockerignore b/discord_bot/.dockerignore new file mode 100644 index 00000000..63083eac --- /dev/null +++ b/discord_bot/.dockerignore @@ -0,0 +1,2 @@ +*test*/* +SECRETS.toml diff --git a/discord_bot/.earthlyignore b/discord_bot/.earthlyignore new file mode 100644 index 00000000..c5b0d55b --- /dev/null +++ b/discord_bot/.earthlyignore @@ -0,0 +1,8 @@ +**/__pycache__ +.benchmarks +.hypothesis +.idea +.pyre +.pytest_cache +.ruff_cache +data \ No newline at end of file diff --git a/discord_bot/.gitignore b/discord_bot/.gitignore new file mode 100644 index 00000000..e0422b27 --- /dev/null +++ b/discord_bot/.gitignore @@ -0,0 +1,2 @@ +SECRETS.toml +data/ diff --git a/discord_bot/.pyre_configuration b/discord_bot/.pyre_configuration new file mode 100644 index 00000000..7c512daa --- /dev/null +++ b/discord_bot/.pyre_configuration @@ -0,0 +1,6 @@ +{ + "site_package_search_strategy": "pep561", + "source_directories": [ + "." + ] +} diff --git a/discord_bot/.vscode/extensions.json b/discord_bot/.vscode/extensions.json new file mode 100644 index 00000000..20c088c4 --- /dev/null +++ b/discord_bot/.vscode/extensions.json @@ -0,0 +1,11 @@ +{ + "recommendations": [ + "charliermarsh.ruff", + "ms-python.vscode-pylance", + "ms-python.python", + "tamasfe.even-better-toml", + "earthly.earthfile-syntax-highlighting", + "quicktype.quicktype", + "eeyore.yapf" + ] +} \ No newline at end of file diff --git a/discord_bot/.vscode/launch.json b/discord_bot/.vscode/launch.json new file mode 100644 index 00000000..227f03ed --- /dev/null +++ b/discord_bot/.vscode/launch.json @@ -0,0 +1,38 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Discord Bot: Run Current File", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "justMyCode": true, + "env": { + "PYTHONPATH": "${workspaceFolder}" + } + }, + { + "name": "Discord Bot: Start", + "type": "python", + "request": "launch", + "cwd": "${workspaceFolder}", + "program": "main.py", + "console": "integratedTerminal", + "justMyCode": true, + }, + { + "name": "Discord Bot: Run Tests", + "type": "python", + "request": "launch", + "cwd": "${workspaceFolder}", + "module": "pytest", + "args": [], + "console": "integratedTerminal", + "justMyCode": true + } + ] +} \ No newline at end of file diff --git a/discord_bot/.vscode/settings.json b/discord_bot/.vscode/settings.json new file mode 100644 index 00000000..d9569d4e --- /dev/null +++ b/discord_bot/.vscode/settings.json @@ -0,0 +1,4 @@ +{ + "python.formatting.provider": "yapf", + "editor.formatOnSave": true, +} \ No newline at end of file diff --git a/discord_bot/Dockerfile b/discord_bot/Dockerfile new file mode 100644 index 00000000..35da9fba --- /dev/null +++ b/discord_bot/Dockerfile @@ -0,0 +1,17 @@ +FROM python:3.10-slim + +WORKDIR /root/discord_bot + +ADD poetry.lock pyproject.toml ./ + +RUN pip install poetry --no-cache-dir \ + && poetry install --no-dev + +ADD . /root/discord_bot + +# Allow imports like 'from discord_bot.commands.public_remind import Remind' +ENV PYTHONPATH "${PYTHONPATH}:/root" + +CMD ["poetry", "run", "python", "main.py"] + +# See run.sh diff --git a/discord_bot/Earthfile b/discord_bot/Earthfile new file mode 100644 index 00000000..9a13f784 --- /dev/null +++ b/discord_bot/Earthfile @@ -0,0 +1,61 @@ +VERSION 0.6 +# earthly +all --PYTHONVERSION=3.11 +ARG PYTHONVERSION=3.11 +FROM python:${PYTHONVERSION}-slim +WORKDIR /root/discord_bot + +# Start bot locally for development +start-dev: + # Run on host system instead of inside a container + LOCALLY + RUN sh run.sh + +install-dev: + RUN pip install poetry --no-cache-dir + COPY poetry.lock pyproject.toml ./ + RUN poetry install + COPY . /root/discord_bot + +format: + # Run on host system instead of inside a container + LOCALLY + # Requirements: + # pip install poetry + # poetry install + + # Convert single to double quotes + RUN poetry run ruff check . --select Q --fix + # Remove unused imports + RUN poetry run ruff check . --select F --fix + # Sort imports + RUN poetry run ruff check . --select I --fix + # Format code + RUN poetry run yapf -ir . + +# Check if files are correctly formatted +format-check: + FROM +install-dev + RUN poetry run yapf -dr . + +# Ignore errors via "# noqa: F841" +lint: + FROM +install-dev + RUN poetry run ruff check . + +# Ignore errors via "# pyre-fixme[11]" +pyre: + FROM +install-dev + RUN poetry run pyre + +pytest: + FROM +install-dev + RUN poetry run pytest + +pre-commit: + BUILD +format-check + BUILD +lint + BUILD +pyre + +all: + BUILD +pre-commit + BUILD +pytest diff --git a/discord_bot/README.md b/discord_bot/README.md new file mode 100644 index 00000000..cb7cc52c --- /dev/null +++ b/discord_bot/README.md @@ -0,0 +1,77 @@ +# Python Discord Sc2 Bot + +### Installation +- Install python 3.8 or newer (32 or 64 bit) +- Run commands + ``` + pip install poetry --user + poetry install + ``` +- Required private file: DISCORDKEY, SUPABASEKEY, SUPABASEURL (the error messages should display if certain keys are missing) + +### Development +Open this project folder `discord_bot` with VSCode via command `code discord_bot`. Configure the python interpreter to point to your venv location, which can be found via `poetry env info --path`. Now the debugger options from the project's launch.json and the `testing` tab should be available in VSCode. Consider installing the recommended VSCode extensions. + +You can run and debug the bot and tests via the debug config, or manually via terminal `poetry run python main.py` and the tests via `poetry run pytest` + +### Running + +Start the bot in `cwd=discord_bot/` with command + +`poetry run python main.py` + +or inside docker via + +`sh run.sh` + +### Commands +**Public commands:** +```markdown +# Uses nephest.com to grab mmr of the player +!mmr + +# Remind the user in a certain time in the same channel of a text message +!reminder +!reminder 2m this will remind me in 2 minutes +!reminder 2h this will remind me in 2 hours +!reminder 2h 2m this will remind me in 2 hours and 2 minutes + +# Remind the user at a certain time in the same channel of a text message +!remindat